hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
719dfe0474b8ecd5fc17e742fde14046441220ad
| 37
|
py
|
Python
|
jesse/exchanges/__init__.py
|
noenfugler/jesse
|
217a3168620a755c1a9576d9deb27105db7dccf8
|
[
"MIT"
] | 3,999
|
2018-11-09T10:38:51.000Z
|
2022-03-31T12:29:12.000Z
|
jesse/exchanges/__init__.py
|
noenfugler/jesse
|
217a3168620a755c1a9576d9deb27105db7dccf8
|
[
"MIT"
] | 172
|
2020-04-16T16:19:08.000Z
|
2022-03-28T13:28:55.000Z
|
jesse/exchanges/__init__.py
|
noenfugler/jesse
|
217a3168620a755c1a9576d9deb27105db7dccf8
|
[
"MIT"
] | 495
|
2019-03-01T21:48:53.000Z
|
2022-03-30T15:35:19.000Z
|
from .sandbox.Sandbox import Sandbox
| 18.5
| 36
| 0.837838
| 5
| 37
| 6.2
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
71b6541ce558469361644bfb59d7e23175c355db
| 79
|
py
|
Python
|
double_speed.py
|
astrofitz/tokyo
|
78f1aa9b2ce78d453e403b8d00d685ecda5f3c51
|
[
"BSD-3-Clause"
] | 16
|
2015-01-13T21:22:35.000Z
|
2020-01-20T23:44:28.000Z
|
double_speed.py
|
astrofitz/tokyo
|
78f1aa9b2ce78d453e403b8d00d685ecda5f3c51
|
[
"BSD-3-Clause"
] | null | null | null |
double_speed.py
|
astrofitz/tokyo
|
78f1aa9b2ce78d453e403b8d00d685ecda5f3c51
|
[
"BSD-3-Clause"
] | 3
|
2016-06-18T13:55:15.000Z
|
2021-09-30T18:51:02.000Z
|
#!/usr/bin/env python
import double_speed # test runs automatically on import
| 19.75
| 55
| 0.78481
| 12
| 79
| 5.083333
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139241
| 79
| 3
| 56
| 26.333333
| 0.897059
| 0.683544
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
71b799647d72fd3f2368a9d4748b53b9cb835706
| 163
|
py
|
Python
|
0_PythonFundamental/1_05_sequencelist.py
|
hnwarid/DQLabAcademy
|
e03d82f97536ae103b6abc65db0ae16520fb68c7
|
[
"MIT"
] | null | null | null |
0_PythonFundamental/1_05_sequencelist.py
|
hnwarid/DQLabAcademy
|
e03d82f97536ae103b6abc65db0ae16520fb68c7
|
[
"MIT"
] | null | null | null |
0_PythonFundamental/1_05_sequencelist.py
|
hnwarid/DQLabAcademy
|
e03d82f97536ae103b6abc65db0ae16520fb68c7
|
[
"MIT"
] | null | null | null |
contoh_list = [1, 'dua', 3, 4.0, 5]
print(contoh_list[0])
print(contoh_list[3])
contoh_list = [1, 'dua', 3, 4.0, 5]
contoh_list[3] = 'empat'
print(contoh_list[3])
| 23.285714
| 35
| 0.650307
| 32
| 163
| 3.125
| 0.3125
| 0.6
| 0.45
| 0.28
| 0.36
| 0.36
| 0.36
| 0.36
| 0
| 0
| 0
| 0.097902
| 0.122699
| 163
| 6
| 36
| 27.166667
| 0.601399
| 0
| 0
| 0.666667
| 0
| 0
| 0.067485
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
e0c1afa1540a5ded49f6b3dd3da6d5ea56258a74
| 4,741
|
py
|
Python
|
RL/model.py
|
namabilly/iLOCuS
|
761fe4162a9fb551f43d887c3ae9d448c3cc8c14
|
[
"MIT"
] | 7
|
2020-05-28T02:16:22.000Z
|
2021-12-20T12:20:47.000Z
|
RL/model.py
|
namabilly/iLOCuS
|
761fe4162a9fb551f43d887c3ae9d448c3cc8c14
|
[
"MIT"
] | null | null | null |
RL/model.py
|
namabilly/iLOCuS
|
761fe4162a9fb551f43d887c3ae9d448c3cc8c14
|
[
"MIT"
] | 2
|
2020-05-18T03:44:34.000Z
|
2020-06-08T12:58:55.000Z
|
import tensorflow as tf
from keras.models import Model
from keras.layers import (Activation, Convolution2D, Dense, Flatten, Input, Dropout, Conv2DTranspose,
Lambda, Concatenate, Reshape, LeakyReLU, ReLU)
# def create_model(look_back_steps, input_shape, num_actions, model_name='q_network'):
# with tf.name_scope(model_name):
# input_img = Input(shape = (look_back_steps + 3,) + input_shape)
# # input_loc = Input(shape = [1] )
# # input_loc = Lambda(lambda x: expand_dims(x, axis=1))(input_loc)
# # print(input_loc.shape)
# # Input shape = (batch, look_back_steps + 5, 84, 84)
# # input_loc = input_img[:,-1,0,0]
#
# # embeddings = []
# # for i in range(look_back_steps + 4):
# # ch_i = Lambda(lambda x: x[:,i,:,:])(input_img)
# # embeddings.append(embedding(ch_i, input_shape, 128, 'embed_'+str(i)))
#
# # embed_feat = Concatenate(axis=1)(embeddings)
# deconv1 = Conv2DTranspose(32, (5, 5), strides=(2, 2),
# input_shape=[look_back_steps + 4,input_shape[0],input_shape[1]],
# data_format='channels_first')(input_img)
# deconv1 = LeakyReLU(alpha=0.2)(deconv1)
# deconv2 = Conv2DTranspose(128, (5, 5), strides=(2, 2),
# input_shape=[look_back_steps + 4, input_shape[0], input_shape[1]],
# data_format='channels_first')(deconv1)
# deconv2 = LeakyReLU(alpha=0.2)(deconv2)
# conv1 = Convolution2D(64, (5,5), data_format='channels_first', strides=(2,2), padding='valid')(deconv2)
# conv1 = LeakyReLU(alpha=0.2)(conv1)
# # (batch, 32, 5, 5)
#
# conv2 = Convolution2D(128, (3,3), data_format='channels_first', strides=(1,1), padding='valid')(conv1)
# conv2 = LeakyReLU(alpha=0.2)(conv2)
# # (batch, 128, 3, 3)
#
# flat = Flatten()(conv2)
# full = Dense(250)(flat)
# # full = LeakyReLU(alpha=0.2)(full)
# #
# # embed_feat = Concatenate(axis=1)([full, input_loc])
# # print(embed_feat.shape)
# # full = Dense(num_actions)(embed_feat) # output layer has node number = num_actions
# out = LeakyReLU(alpha=0.2)(full)
# model = Model(input = input_img, output = out)
# return model
def create_model(look_back_steps, input_shape, num_actions, model_name='q_network'):
with tf.name_scope(model_name):
input_img = Input(shape=(look_back_steps + 3,) + input_shape)
# Input shape = (batch, look_back_steps + 5, 84, 84)
# embeddings = []
# for i in range(look_back_steps + 4):
# ch_i = Lambda(lambda x: x[:,i,:,:])(input_img)
# embeddings.append(embedding(ch_i, input_shape, 128, 'embed_'+str(i)))
# embed_feat = Concatenate(axis=1)(embeddings)
# deconv1 = Conv2DTranspose(32, (5, 5), strides=(2, 2),
# input_shape=[look_back_steps + 4, input_shape[0], input_shape[1]],
# data_format='channels_first')(input_img)
# deconv1 = LeakyReLU(alpha=0.2)(deconv1)
# deconv2 = Conv2DTranspose(128, (5, 5), strides=(2, 2),
# input_shape=[look_back_steps + 4, input_shape[0], input_shape[1]],
# data_format='channels_first')(deconv1)
# deconv2 = LeakyReLU(alpha=0.2)(deconv2)
# conv1 = Convolution2D(64, (5, 5), data_format='channels_first', strides=(2, 2), padding='valid')(deconv2)
# conv1 = LeakyReLU(alpha=0.2)(conv1)
# # (batch, 32, 5, 5)
#
# conv2 = Convolution2D(128, (3, 3), data_format='channels_first', strides=(1, 1), padding='valid')(conv1)
# conv2 = LeakyReLU(alpha=0.2)(conv2)
# (batch, 128, 3, 3)
flat = Flatten()(input_img)
full = Dense(1280)(flat)
full = LeakyReLU(alpha=0.2)(full)
full = Dense(2560)(full)
full = LeakyReLU(alpha=0.2)(full)
out = Dense(25*num_actions)(full) # output layer has node number = num_actions
# out = LeakyReLU(alpha=0.2)(full)
model = Model(input=input_img, output=out)
return model
def embedding(input_placeholder, input_shape, embedding_dim, layer_name):
with tf.name_scope(layer_name):
# input_placeholder shape: (batch, 1, 15, 15)
reshaped = Reshape(target_shape=(1,)+input_shape)(input_placeholder)
conv1 = Convolution2D(4, (3,3), data_format='channels_first', strides=(1,1), padding='valid')(reshaped)
conv1 = LeakyReLU(alpha=0.1)(conv1)
# conv1 shape: (batch, 4, 7, 7)
return conv1
| 50.43617
| 115
| 0.581945
| 593
| 4,741
| 4.460371
| 0.158516
| 0.094518
| 0.079395
| 0.078639
| 0.757656
| 0.748204
| 0.73913
| 0.717958
| 0.717958
| 0.717958
| 0
| 0.061236
| 0.269774
| 4,741
| 93
| 116
| 50.978495
| 0.702773
| 0.697743
| 0
| 0.095238
| 0
| 0
| 0.020787
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.095238
| false
| 0
| 0.142857
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e0d379c75ed381a84be78bd4567f368f6816e331
| 80
|
py
|
Python
|
ConfigEnv/__init__.py
|
Nydareld/ConfigEnv
|
c4894d235d2f005b65fe2e5153d5acee2c7a65e4
|
[
"MIT"
] | null | null | null |
ConfigEnv/__init__.py
|
Nydareld/ConfigEnv
|
c4894d235d2f005b65fe2e5153d5acee2c7a65e4
|
[
"MIT"
] | 3
|
2018-09-12T13:04:56.000Z
|
2018-09-24T15:09:31.000Z
|
ConfigEnv/__init__.py
|
Nydareld/ConfigEnv
|
c4894d235d2f005b65fe2e5153d5acee2c7a65e4
|
[
"MIT"
] | null | null | null |
from .Config import Config
from .FileFormatException import FileFormatException
| 26.666667
| 52
| 0.875
| 8
| 80
| 8.75
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 80
| 2
| 53
| 40
| 0.972222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e0da801f48ca1480dad3542d71af51e5dccc22f7
| 68
|
py
|
Python
|
models/stylegan2/__init__.py
|
Aitical/ADspeech2face
|
2e811ff8cc7333729f4b77d1b1067296253e8e38
|
[
"MIT"
] | 1
|
2022-01-27T14:19:04.000Z
|
2022-01-27T14:19:04.000Z
|
models/stylegan2/__init__.py
|
Aitical/ADspeech2face
|
2e811ff8cc7333729f4b77d1b1067296253e8e38
|
[
"MIT"
] | null | null | null |
models/stylegan2/__init__.py
|
Aitical/ADspeech2face
|
2e811ff8cc7333729f4b77d1b1067296253e8e38
|
[
"MIT"
] | null | null | null |
from .model import Generator, Discriminator, EqualLinear, StyledConv
| 68
| 68
| 0.852941
| 7
| 68
| 8.285714
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 68
| 1
| 68
| 68
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e0e8b526de362112514227685ae4d9a26a9a2423
| 817
|
py
|
Python
|
objectModel/Python/cdm/utilities/symbol_set.py
|
aaron-emde/CDM
|
9472e9c7694821ac4a9bbe608557d2e65aabc73e
|
[
"CC-BY-4.0",
"MIT"
] | null | null | null |
objectModel/Python/cdm/utilities/symbol_set.py
|
aaron-emde/CDM
|
9472e9c7694821ac4a9bbe608557d2e65aabc73e
|
[
"CC-BY-4.0",
"MIT"
] | 3
|
2021-05-11T23:57:12.000Z
|
2021-08-04T05:03:05.000Z
|
objectModel/Python/cdm/utilities/symbol_set.py
|
aaron-emde/CDM
|
9472e9c7694821ac4a9bbe608557d2e65aabc73e
|
[
"CC-BY-4.0",
"MIT"
] | null | null | null |
# ----------------------------------------------------------------------
# Copyright (c) Microsoft Corporation.
# All rights reserved.
# ----------------------------------------------------------------------
# TODO: Consider just inheriting from Python's set type -MPL
class SymbolSet:
def __init__(self):
self._symbol_set_collection = set()
@property
def size(self):
return len(self._symbol_set_collection)
def add(self, new_symbol):
self._symbol_set_collection.add(new_symbol)
def merge(self, sym_set):
if sym_set is not None:
self._symbol_set_collection = self._symbol_set_collection.union(sym_set)
def copy(self):
return self._symbol_set_collection.copy()
def __iter__(self):
return iter(self._symbol_set_collection)
| 28.172414
| 84
| 0.567931
| 89
| 817
| 4.831461
| 0.438202
| 0.162791
| 0.211628
| 0.374419
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.188494
| 817
| 28
| 85
| 29.178571
| 0.648567
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
461623f42e3e08d7dbd58a3be3826f30ec7c47c1
| 96
|
py
|
Python
|
Codewars/CorrectTheMistakesOfTheCharacterRecognitionSoftware.py
|
SelvorWhim/competitive
|
b9daaf21920d6f7669dc0c525e903949f4e33b62
|
[
"Unlicense"
] | null | null | null |
Codewars/CorrectTheMistakesOfTheCharacterRecognitionSoftware.py
|
SelvorWhim/competitive
|
b9daaf21920d6f7669dc0c525e903949f4e33b62
|
[
"Unlicense"
] | null | null | null |
Codewars/CorrectTheMistakesOfTheCharacterRecognitionSoftware.py
|
SelvorWhim/competitive
|
b9daaf21920d6f7669dc0c525e903949f4e33b62
|
[
"Unlicense"
] | null | null | null |
fix = {'5':'S', '0':'O', '1':'I'}
def correct(s):
return "".join(fix.get(c, c) for c in s)
| 19.2
| 44
| 0.46875
| 20
| 96
| 2.25
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038961
| 0.197917
| 96
| 4
| 45
| 24
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
4616c0fc536476a8c3ef78110e7722ed54a7dd53
| 37
|
py
|
Python
|
hrm_api/ideas/factories/generators/__init__.py
|
unknowncoder05/HRM
|
2a0ad62373fdaefafe533727b2d586d8f6327e87
|
[
"MIT"
] | null | null | null |
hrm_api/ideas/factories/generators/__init__.py
|
unknowncoder05/HRM
|
2a0ad62373fdaefafe533727b2d586d8f6327e87
|
[
"MIT"
] | null | null | null |
hrm_api/ideas/factories/generators/__init__.py
|
unknowncoder05/HRM
|
2a0ad62373fdaefafe533727b2d586d8f6327e87
|
[
"MIT"
] | null | null | null |
from .full_idea import idea_generator
| 37
| 37
| 0.891892
| 6
| 37
| 5.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081081
| 37
| 1
| 37
| 37
| 0.911765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
1cd9230c6f186974f61e9420f80ad9931b9775f9
| 2,367
|
py
|
Python
|
tests/elements/cmake.py
|
gtristan/buildstream-plugins
|
96206318b2cade5329a64b0f15b362ed57222086
|
[
"Apache-2.0"
] | null | null | null |
tests/elements/cmake.py
|
gtristan/buildstream-plugins
|
96206318b2cade5329a64b0f15b362ed57222086
|
[
"Apache-2.0"
] | null | null | null |
tests/elements/cmake.py
|
gtristan/buildstream-plugins
|
96206318b2cade5329a64b0f15b362ed57222086
|
[
"Apache-2.0"
] | null | null | null |
# Pylint doesn't play well with fixtures and dependency injection from pytest
# pylint: disable=redefined-outer-name
import os
import pytest
from buildstream._testing.runcli import cli_integration as cli # pylint: disable=unused-import
from buildstream._testing.integration import integration_cache # pylint: disable=unused-import
from buildstream._testing.integration import assert_contains
from buildstream._testing._utils.site import HAVE_SANDBOX
pytestmark = pytest.mark.integration
DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "cmake")
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_cmake_build(cli, datafiles):
project = str(datafiles)
checkout = os.path.join(cli.directory, "checkout")
element_name = "cmakehello.bst"
result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
result = cli.run(
project=project,
args=["artifact", "checkout", element_name, "--directory", checkout],
)
assert result.exit_code == 0
assert_contains(checkout, ["/usr", "/usr/bin", "/usr/bin/hello"])
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_cmake_confroot_build(cli, datafiles):
project = str(datafiles)
checkout = os.path.join(cli.directory, "checkout")
element_name = "cmakeconfroothello.bst"
result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
result = cli.run(
project=project,
args=["artifact", "checkout", element_name, "--directory", checkout],
)
assert result.exit_code == 0
assert_contains(checkout, ["/usr", "/usr/bin", "/usr/bin/hello"])
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_cmake_run(cli, datafiles):
project = str(datafiles)
element_name = "cmakehello.bst"
result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
result = cli.run(project=project, args=["shell", element_name, "/usr/bin/hello"])
assert result.exit_code == 0
assert (
result.output
== """Hello World!
This is hello.
"""
)
| 31.986486
| 95
| 0.715674
| 304
| 2,367
| 5.427632
| 0.253289
| 0.06
| 0.043636
| 0.069091
| 0.746061
| 0.727273
| 0.710909
| 0.710909
| 0.710909
| 0.633333
| 0
| 0.003005
| 0.156316
| 2,367
| 73
| 96
| 32.424658
| 0.823235
| 0.072666
| 0
| 0.588235
| 0
| 0
| 0.165297
| 0.010046
| 0
| 0
| 0
| 0
| 0.196078
| 1
| 0.058824
| false
| 0
| 0.117647
| 0
| 0.176471
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1ced5beda187c06484e155a6fc3d30aeb0da289b
| 746
|
py
|
Python
|
src/study/190128_deco.py
|
jaeorin/Python
|
366f84b96cfa0ae7dabf7fdfd48c18535997e2f4
|
[
"MIT"
] | null | null | null |
src/study/190128_deco.py
|
jaeorin/Python
|
366f84b96cfa0ae7dabf7fdfd48c18535997e2f4
|
[
"MIT"
] | null | null | null |
src/study/190128_deco.py
|
jaeorin/Python
|
366f84b96cfa0ae7dabf7fdfd48c18535997e2f4
|
[
"MIT"
] | null | null | null |
import datetime
def iot_function1():
print("==========================")
print(datetime.datetime.now())
print("iot function1 start")
print(datetime.datetime.now())
def iot_function2():
print("==========================")
print(datetime.datetime.now())
print("iot function2 start")
print(datetime.datetime.now())
def iot_function3():
print("==========================")
print(datetime.datetime.now())
print("iot function3 start")
print(datetime.datetime.now())
def iot_function4():
print("==========================")
print(datetime.datetime.now())
print("iot function4 start")
print(datetime.datetime.now())
iot_function1()
iot_function2()
iot_function3()
iot_function4()
| 20.722222
| 39
| 0.576408
| 74
| 746
| 5.702703
| 0.148649
| 0.246446
| 0.398104
| 0.454976
| 0.668246
| 0.599526
| 0.599526
| 0
| 0
| 0
| 0
| 0.018927
| 0.150134
| 746
| 35
| 40
| 21.314286
| 0.646688
| 0
| 0
| 0.48
| 0
| 0
| 0.241287
| 0.13941
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16
| true
| 0
| 0.04
| 0
| 0.2
| 0.64
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
1cf09d0c61807312af38132e2c8f6110b6f6275b
| 70
|
py
|
Python
|
generator/__init__.py
|
milogert/bggrss
|
37d32eb65747b3831346f93a4f6aabd666d81d78
|
[
"MIT"
] | null | null | null |
generator/__init__.py
|
milogert/bggrss
|
37d32eb65747b3831346f93a4f6aabd666d81d78
|
[
"MIT"
] | 106
|
2019-10-10T13:45:24.000Z
|
2021-07-14T20:06:31.000Z
|
generator/__init__.py
|
milogert/bggrss
|
37d32eb65747b3831346f93a4f6aabd666d81d78
|
[
"MIT"
] | null | null | null |
from generator import generator, renderer, bgg
print(dir(generator))
| 17.5
| 46
| 0.8
| 9
| 70
| 6.222222
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 70
| 3
| 47
| 23.333333
| 0.903226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
1cf5d81fb72c6f948feb6d434533668969314651
| 131
|
py
|
Python
|
src/django_cloudtask/urls.py
|
adamchainz/django-cloudtask
|
e9663c66cd96e7a633d119157fd9a7fa2ca0072a
|
[
"BSD-3-Clause"
] | 22
|
2020-12-27T14:32:38.000Z
|
2022-02-06T20:33:14.000Z
|
src/django_cloudtask/urls.py
|
adamchainz/django-cloudtask
|
e9663c66cd96e7a633d119157fd9a7fa2ca0072a
|
[
"BSD-3-Clause"
] | 2
|
2020-11-03T00:45:12.000Z
|
2020-12-28T23:36:05.000Z
|
src/django_cloudtask/urls.py
|
adamchainz/django-cloudtask
|
e9663c66cd96e7a633d119157fd9a7fa2ca0072a
|
[
"BSD-3-Clause"
] | 2
|
2020-12-27T11:15:20.000Z
|
2021-06-04T13:50:21.000Z
|
from django.urls import path
from .views import execute_task
urlpatterns = [path("execute/", execute_task, name="task-execute")]
| 21.833333
| 67
| 0.763359
| 18
| 131
| 5.444444
| 0.555556
| 0.22449
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114504
| 131
| 5
| 68
| 26.2
| 0.844828
| 0
| 0
| 0
| 0
| 0
| 0.152672
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e8017f9ba45ded6769d20b0867c9bff100ecc5f3
| 2,972
|
py
|
Python
|
bspump/ipc/datagram.py
|
thatch/BitSwanPump
|
98a5b8d09f9b59d5361611cee0bd45e7b4c69e3f
|
[
"BSD-3-Clause"
] | null | null | null |
bspump/ipc/datagram.py
|
thatch/BitSwanPump
|
98a5b8d09f9b59d5361611cee0bd45e7b4c69e3f
|
[
"BSD-3-Clause"
] | null | null | null |
bspump/ipc/datagram.py
|
thatch/BitSwanPump
|
98a5b8d09f9b59d5361611cee0bd45e7b4c69e3f
|
[
"BSD-3-Clause"
] | null | null | null |
import asyncio
import logging
import socket
from ..abc.source import Source
from ..abc.sink import Sink
#
L = logging.getLogger(__name__)
#
class DatagramSource(Source):
ConfigDefaults = {
'address': '127.0.0.1:8888', # IPv4, IPv6 or unix socket path
'max_packet_size': 64 * 1024,
}
def __init__(self, app, pipeline, id=None, config=None):
super().__init__(app, pipeline, id=id, config=config)
self.Loop = app.Loop
# Create a UDP socket
self.Address = str(self.Config['address'])
if ":" in self.Address:
host, port = self.Address.rsplit(":", maxsplit=1)
(family, socktype, proto, canonname, sockaddr) = socket.getaddrinfo(host, port)[0]
self.Socket = socket.socket(family, socket.SOCK_DGRAM)
self.Socket.setblocking(False)
self.Socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.Socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
self.Socket.bind(sockaddr)
else:
self.Socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
self.Socket.setblocking(False)
self.Socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.Socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
self.Socket.bind(self.Address)
self.MaxPacketSize = int(self.Config['max_packet_size'])
async def main(self):
task = asyncio.ensure_future(self._receive(), loop=self.Loop)
await self.stopped()
task.cancel()
await task
self.Socket.close()
async def _receive(self):
while True:
try:
await self.Pipeline.ready()
event = await self.Loop.sock_recv(self.Socket, self.MaxPacketSize)
await self.Pipeline.ready()
await self.process(event)
except asyncio.CancelledError:
break
except Exception:
L.exception(f"Error in datagram source.")
raise
class DatagramSink(Sink):
ConfigDefaults = {
'address': '127.0.0.1:8888', # IPv4, IPv6 or unix socket path
'max_packet_size': 64 * 1024,
}
def __init__(self, app, pipeline, id=None, config=None):
super().__init__(app, pipeline, id=id, config=config)
self.Loop = app.Loop
# Create a UDP socket
self.Address = str(self.Config['address'])
if ":" in self.Address:
host, port = self.Address.rsplit(":", maxsplit=1)
(family, socktype, proto, canonname, sockaddr) = socket.getaddrinfo(host, port)[0]
self.Socket = socket.socket(family, socket.SOCK_DGRAM)
self.Socket.setblocking(False)
self.Socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.Socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
self.Socket.connect(sockaddr)
else:
self.Socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
self.Socket.setblocking(False)
self.Socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.Socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
self.Socket.connect(self.Address)
self.MaxPacketSize = int(self.Config['max_packet_size'])
def process(self, context, event):
self.Socket.send(event)
| 26.774775
| 85
| 0.721063
| 409
| 2,972
| 5.107579
| 0.239609
| 0.110101
| 0.076592
| 0.099569
| 0.741024
| 0.741024
| 0.741024
| 0.741024
| 0.741024
| 0.741024
| 0
| 0.018927
| 0.146703
| 2,972
| 110
| 86
| 27.018182
| 0.804811
| 0.033984
| 0
| 0.567568
| 0
| 0
| 0.050628
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040541
| false
| 0
| 0.067568
| 0
| 0.162162
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e8055095c827b1ba8ad9fa7faf22a6eaefca2e09
| 190
|
py
|
Python
|
prettyqt/core/threadpool.py
|
phil65/PrettyQt
|
26327670c46caa039c9bd15cb17a35ef5ad72e6c
|
[
"MIT"
] | 7
|
2019-05-01T01:34:36.000Z
|
2022-03-08T02:24:14.000Z
|
prettyqt/core/threadpool.py
|
phil65/PrettyQt
|
26327670c46caa039c9bd15cb17a35ef5ad72e6c
|
[
"MIT"
] | 141
|
2019-04-16T11:22:01.000Z
|
2021-04-14T15:12:36.000Z
|
prettyqt/core/threadpool.py
|
phil65/PrettyQt
|
26327670c46caa039c9bd15cb17a35ef5ad72e6c
|
[
"MIT"
] | 5
|
2019-04-17T11:48:19.000Z
|
2021-11-21T10:30:19.000Z
|
from __future__ import annotations
from prettyqt import core
from prettyqt.qt import QtCore
QtCore.QThreadPool.__bases__ = (core.Object,)
class ThreadPool(QtCore.QThreadPool):
pass
| 15.833333
| 45
| 0.794737
| 23
| 190
| 6.217391
| 0.608696
| 0.167832
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142105
| 190
| 11
| 46
| 17.272727
| 0.877301
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.166667
| 0.5
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
e82210520086ae774935b41c09d05a4cf9606970
| 138
|
py
|
Python
|
omnipod/records/__init__.py
|
mattprintz/omnipod
|
39aa16385ad7628821f77bfdd8b3bf629a1e389b
|
[
"MIT"
] | null | null | null |
omnipod/records/__init__.py
|
mattprintz/omnipod
|
39aa16385ad7628821f77bfdd8b3bf629a1e389b
|
[
"MIT"
] | null | null | null |
omnipod/records/__init__.py
|
mattprintz/omnipod
|
39aa16385ad7628821f77bfdd8b3bf629a1e389b
|
[
"MIT"
] | null | null | null |
from .generic import IBFRecord, EEPromRecord
from .profiles import Profile
from .programs import BasalPrograms
from .log_records import *
| 27.6
| 44
| 0.833333
| 17
| 138
| 6.705882
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123188
| 138
| 4
| 45
| 34.5
| 0.942149
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e8291e3d7e61068f49c8e9547e1b3c0d971ac309
| 180
|
py
|
Python
|
src/mbed_tools/cli/__init__.py
|
rwalton-arm/mbed-tools
|
131605540f4829116f977695a47dc10b3ac96450
|
[
"Apache-2.0"
] | null | null | null |
src/mbed_tools/cli/__init__.py
|
rwalton-arm/mbed-tools
|
131605540f4829116f977695a47dc10b3ac96450
|
[
"Apache-2.0"
] | null | null | null |
src/mbed_tools/cli/__init__.py
|
rwalton-arm/mbed-tools
|
131605540f4829116f977695a47dc10b3ac96450
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright (C) 2020 Arm Mbed. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
"""mbed_tools command line interface."""
from mbed_tools.cli.main import cli, LOGGER
| 22.5
| 51
| 0.738889
| 27
| 180
| 4.851852
| 0.851852
| 0.137405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038462
| 0.133333
| 180
| 7
| 52
| 25.714286
| 0.801282
| 0.672222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1c20d29499c60a956b35798816d9ad8b15b4a2bf
| 1,354
|
py
|
Python
|
tests/utils/test_anglecalculations.py
|
mfbehrens99/sailsim
|
800d71ec966345b0819a28921e14deb141bb3a35
|
[
"MIT"
] | 2
|
2021-01-13T13:31:41.000Z
|
2022-03-10T10:17:29.000Z
|
tests/utils/test_anglecalculations.py
|
mfbehrens99/sailsim
|
800d71ec966345b0819a28921e14deb141bb3a35
|
[
"MIT"
] | 37
|
2021-01-08T07:49:01.000Z
|
2022-02-08T22:22:50.000Z
|
tests/utils/test_anglecalculations.py
|
mfbehrens99/sailsim
|
800d71ec966345b0819a28921e14deb141bb3a35
|
[
"MIT"
] | 1
|
2021-01-03T15:07:29.000Z
|
2021-01-03T15:07:29.000Z
|
"""Test module sailsim.utils.anglecalculations."""
from pytest import approx
from math import pi
from sailsim.utils.anglecalculations import angleKeepInterval, directionKeepInterval
def testAngleKeepInterval():
assert angleKeepInterval( 0 + 0 * pi) == approx( 0)
assert angleKeepInterval( 0 + 2 * pi) == approx( 0)
assert angleKeepInterval( 0 - 2 * pi) == approx( 0)
assert angleKeepInterval( 1 + 0 * pi) == approx( 1)
assert angleKeepInterval( 1 + 2 * pi) == approx( 1)
assert angleKeepInterval( 1 - 2 * pi) == approx( 1)
assert angleKeepInterval(-1 + 0 * pi) == approx(-1)
assert angleKeepInterval(-1 + 2 * pi) == approx(-1)
assert angleKeepInterval(-1 - 2 * pi) == approx(-1)
def testDirectionKeepInterval():
assert directionKeepInterval( 0 + 0 * pi) == approx(0)
assert directionKeepInterval( 0 + 2 * pi) == approx(0)
assert directionKeepInterval( 0 - 2 * pi) == approx(0)
assert directionKeepInterval( 1 + 0 * pi) == approx(1)
assert directionKeepInterval( 1 + 2 * pi) == approx(1)
assert directionKeepInterval( 1 - 2 * pi) == approx(1)
assert directionKeepInterval( 3 + 0 * pi) == approx(3)
assert directionKeepInterval( 3 + 2 * pi) == approx(3)
assert directionKeepInterval( 3 - 2 * pi) == approx(3)
assert directionKeepInterval(-1 + 0 * pi) == approx(-1 + 2 * pi)
| 42.3125
| 84
| 0.661004
| 159
| 1,354
| 5.628931
| 0.132075
| 0.169832
| 0.12067
| 0.134078
| 0.70838
| 0.70838
| 0.703911
| 0.689385
| 0.689385
| 0.689385
| 0
| 0.053953
| 0.206056
| 1,354
| 31
| 85
| 43.677419
| 0.778605
| 0.032496
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.791667
| 1
| 0.083333
| true
| 0
| 0.125
| 0
| 0.208333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1c68302ee3e8356fbf69ab8d745eb2e9280c9dad
| 712
|
py
|
Python
|
pycontrast/networks/SGCN/skeleton_meta.py
|
hongfz16/HCMoCo
|
140968c66b72034ee2dff610a69be464d8e5866b
|
[
"MIT"
] | 28
|
2022-03-22T05:23:05.000Z
|
2022-03-29T07:45:23.000Z
|
pycontrast/networks/SGCN/skeleton_meta.py
|
hongfz16/HCMoCo
|
140968c66b72034ee2dff610a69be464d8e5866b
|
[
"MIT"
] | 1
|
2022-03-29T17:23:56.000Z
|
2022-03-30T02:35:41.000Z
|
pycontrast/networks/SGCN/skeleton_meta.py
|
hongfz16/HCMoCo
|
140968c66b72034ee2dff610a69be464d8e5866b
|
[
"MIT"
] | null | null | null |
import numpy as np
class mpii_skeleton:
parents_data = [1, 2, 6, 6, 3, 4, -1, 6, 7, 8, 11, 12, 8, 8, 13, 14]
def parents():
return mpii_skeleton.parents_data
def num_joints():
return len(mpii_skeleton.parents_data)
class coco_reduce_skeleton:
# Reduce: 0 - r ankle, 1 - r knee, 2 - r hip, 3 - l hip, 4 - l knee,
# 5 - l ankle, 6 - head top,
# 7 - r wrist, 8 - r elbow, 9 - r shoulder, 10 - l shoulder,
# 11 - l elbow, 12 - l wrist
parents_data = [1, 2, 9, 10, 3, 4, -1, 8, 9, 6, 6, 10, 11]
def parents():
return coco_reduce_skeleton.parents_data
def num_joints():
return len(coco_reduce_skeleton.parents_data)
| 29.666667
| 72
| 0.574438
| 118
| 712
| 3.322034
| 0.347458
| 0.168367
| 0.242347
| 0.17602
| 0.303571
| 0.204082
| 0.204082
| 0.204082
| 0
| 0
| 0
| 0.104839
| 0.303371
| 712
| 23
| 73
| 30.956522
| 0.685484
| 0.285112
| 0
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.307692
| false
| 0
| 0.076923
| 0.307692
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
98e1b0358fc99041b0229d9e2e9dfb62cf70c3a7
| 23
|
py
|
Python
|
buildmimic/bigquery/mimic-iv-dummy-file2.py
|
briangow/mimic-iv
|
cd8288d4c20becc474a8661827013213d4e6447b
|
[
"MIT"
] | null | null | null |
buildmimic/bigquery/mimic-iv-dummy-file2.py
|
briangow/mimic-iv
|
cd8288d4c20becc474a8661827013213d4e6447b
|
[
"MIT"
] | 61
|
2021-04-29T17:14:40.000Z
|
2021-05-14T14:11:12.000Z
|
buildmimic/bigquery/mimic-iv-dummy-file2.py
|
briangow/mimic-iv
|
cd8288d4c20becc474a8661827013213d4e6447b
|
[
"MIT"
] | null | null | null |
# MIMIC-IV dummy file 2
| 23
| 23
| 0.73913
| 5
| 23
| 3.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052632
| 0.173913
| 23
| 1
| 23
| 23
| 0.842105
| 0.913043
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
98e65d4e640969e84299b942c4a4c5c0b5b4a5e0
| 28,690
|
py
|
Python
|
Quartz/QuartzCore/_metadata.py
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
5668b5785296b314ea1321057420bcd077dba9ea
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
Quartz/QuartzCore/_metadata.py
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
5668b5785296b314ea1321057420bcd077dba9ea
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
Quartz/QuartzCore/_metadata.py
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
5668b5785296b314ea1321057420bcd077dba9ea
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
# This file is generated by objective.metadata
#
# Last update: Sat Jul 22 14:57:27 2017
import objc, sys
if sys.maxsize > 2 ** 32:
def sel32or64(a, b): return b
else:
def sel32or64(a, b): return a
if sys.byteorder == 'little':
def littleOrBig(a, b): return a
else:
def littleOrBig(a, b): return b
misc = {
}
misc.update({'CATransform3D': objc.createStructType('CATransform3D', sel32or64(b'{CATransform3D=ffffffffffffffff}', b'{CATransform3D=dddddddddddddddd}'), ['m11', 'm12', 'm13', 'm14', 'm21', 'm22', 'm23', 'm24', 'm31', 'm32', 'm33', 'm34', 'm41', 'm42', 'm43', 'm44'])})
constants = '''$CIDetectorAccuracy$CIDetectorAccuracyHigh$CIDetectorAccuracyLow$CIDetectorAspectRatio$CIDetectorEyeBlink$CIDetectorFocalLength$CIDetectorImageOrientation$CIDetectorMaxFeatureCount$CIDetectorMinFeatureSize$CIDetectorNumberOfAngles$CIDetectorReturnSubFeatures$CIDetectorSmile$CIDetectorTracking$CIDetectorTypeFace$CIDetectorTypeQRCode$CIDetectorTypeRectangle$CIDetectorTypeText$CIFeatureTypeFace$CIFeatureTypeQRCode$CIFeatureTypeRectangle$CIFeatureTypeText$kCAAlignmentCenter$kCAAlignmentJustified$kCAAlignmentLeft$kCAAlignmentNatural$kCAAlignmentRight$kCAAnimationCubic$kCAAnimationCubicPaced$kCAAnimationDiscrete$kCAAnimationLinear$kCAAnimationPaced$kCAAnimationRotateAuto$kCAAnimationRotateAutoReverse$kCAEmitterBehaviorAlignToMotion$kCAEmitterBehaviorAttractor$kCAEmitterBehaviorColorOverLife$kCAEmitterBehaviorDrag$kCAEmitterBehaviorLight$kCAEmitterBehaviorSimpleAttractor$kCAEmitterBehaviorValueOverLife$kCAEmitterBehaviorWave$kCAEmitterLayerAdditive$kCAEmitterLayerBackToFront$kCAEmitterLayerCircle$kCAEmitterLayerCuboid$kCAEmitterLayerLine$kCAEmitterLayerOldestFirst$kCAEmitterLayerOldestLast$kCAEmitterLayerOutline$kCAEmitterLayerPoint$kCAEmitterLayerPoints$kCAEmitterLayerRectangle$kCAEmitterLayerSphere$kCAEmitterLayerSurface$kCAEmitterLayerUnordered$kCAEmitterLayerVolume$kCAFillModeBackwards$kCAFillModeBoth$kCAFillModeForwards$kCAFillModeFrozen$kCAFillModeRemoved$kCAFillRuleEvenOdd$kCAFillRuleNonZero$kCAFilterLinear$kCAFilterNearest$kCAFilterTrilinear$kCAGradientLayerAxial$kCAGravityBottom$kCAGravityBottomLeft$kCAGravityBottomRight$kCAGravityCenter$kCAGravityLeft$kCAGravityResize$kCAGravityResizeAspect$kCAGravityResizeAspectFill$kCAGravityRight$kCAGravityTop$kCAGravityTopLeft$kCAGravityTopRight$kCALineCapButt$kCALineCapRound$kCALineCapSquare$kCALineJoinBevel$kCALineJoinMiter$kCALineJoinRound$kCAMediaTimingFunctionDefault$kCAMediaTimingFunctionEaseIn$kCAMediaTimingFunctionEaseInEaseOut$kCAMediaTimingFunctionEaseOut$kCAMediaTimingFunctionLinear$kCAOnOrderIn$kCAOnOrderOut$kCARendererColorSpace$kCAScrollBoth$kCAScrollHorizontally$kCAScrollNone$kCAScrollVertically$kCATransactionAnimationDuration$kCATransactionAnimationTimingFunction$kCATransactionCompletionBlock$kCATransactionDisableActions$kCATransition$kCATransitionFade$kCATransitionFromBottom$kCATransitionFromLeft$kCATransitionFromRight$kCATransitionFromTop$kCATransitionMoveIn$kCATransitionPush$kCATransitionReveal$kCATruncationEnd$kCATruncationMiddle$kCATruncationNone$kCATruncationStart$kCAValueFunctionRotateX$kCAValueFunctionRotateY$kCAValueFunctionRotateZ$kCAValueFunctionScale$kCAValueFunctionScaleX$kCAValueFunctionScaleY$kCAValueFunctionScaleZ$kCAValueFunctionTranslate$kCAValueFunctionTranslateX$kCAValueFunctionTranslateY$kCAValueFunctionTranslateZ$kCIActiveKeys$kCIApplyOptionColorSpace$kCIApplyOptionDefinition$kCIApplyOptionExtent$kCIApplyOptionUserInfo$kCIAttributeClass$kCIAttributeDefault$kCIAttributeDescription$kCIAttributeDisplayName$kCIAttributeFilterAvailable_Mac$kCIAttributeFilterAvailable_iOS$kCIAttributeFilterCategories$kCIAttributeFilterDisplayName$kCIAttributeFilterName$kCIAttributeIdentity$kCIAttributeMax$kCIAttributeMin$kCIAttributeName$kCIAttributeReferenceDocumentation$kCIAttributeSliderMax$kCIAttributeSliderMin$kCIAttributeType$kCIAttributeTypeAngle$kCIAttributeTypeBoolean$kCIAttributeTypeColor$kCIAttributeTypeCount$kCIAttributeTypeDistance$kCIAttributeTypeGradient$kCIAttributeTypeImage$kCIAttributeTypeInteger$kCIAttributeTypeOffset$kCIAttributeTypeOpaqueColor$kCIAttributeTypePosition$kCIAttributeTypePosition3$kCIAttributeTypeRectangle$kCIAttributeTypeScalar$kCIAttributeTypeTime$kCIAttributeTypeTransform$kCICategoryBlur$kCICategoryBuiltIn$kCICategoryColorAdjustment$kCICategoryColorEffect$kCICategoryCompositeOperation$kCICategoryDistortionEffect$kCICategoryFilterGenerator$kCICategoryGenerator$kCICategoryGeometryAdjustment$kCICategoryGradient$kCICategoryHalftoneEffect$kCICategoryHighDynamicRange$kCICategoryInterlaced$kCICategoryNonSquarePixels$kCICategoryReduction$kCICategorySharpen$kCICategoryStillImage$kCICategoryStylize$kCICategoryTileEffect$kCICategoryTransition$kCICategoryVideo$kCIContextCacheIntermediates$kCIContextHighQualityDownsample$kCIContextOutputColorSpace$kCIContextOutputPremultiplied$kCIContextPriorityRequestLow$kCIContextUseSoftwareRenderer$kCIContextWorkingColorSpace$kCIContextWorkingFormat$kCIFilterGeneratorExportedKey$kCIFilterGeneratorExportedKeyName$kCIFilterGeneratorExportedKeyTargetObject$kCIFormatA16@i$kCIFormatA8@i$kCIFormatABGR8@i$kCIFormatARGB8@i$kCIFormatAf@i$kCIFormatAh@i$kCIFormatBGRA8@i$kCIFormatL16@i$kCIFormatL8@i$kCIFormatLA16@i$kCIFormatLA8@i$kCIFormatLAf@i$kCIFormatLAh@i$kCIFormatLf@i$kCIFormatLh@i$kCIFormatR16@i$kCIFormatR8@i$kCIFormatRG16@i$kCIFormatRG8@i$kCIFormatRGBA16@i$kCIFormatRGBA8@i$kCIFormatRGBAf@i$kCIFormatRGBAh@i$kCIFormatRGf@i$kCIFormatRGh@i$kCIFormatRf@i$kCIFormatRh@i$kCIImageApplyOrientationProperty$kCIImageAutoAdjustCrop$kCIImageAutoAdjustEnhance$kCIImageAutoAdjustFeatures$kCIImageAutoAdjustLevel$kCIImageAutoAdjustRedEye$kCIImageAuxiliaryDepth$kCIImageAuxiliaryDisparity$kCIImageColorSpace$kCIImageNearestSampling$kCIImageProperties$kCIImageProviderTileSize$kCIImageProviderUserInfo$kCIImageRepresentationAVDepthData$kCIImageRepresentationDepthImage$kCIImageRepresentationDisparityImage$kCIImageTextureFormat$kCIImageTextureTarget$kCIInputAllowDraftModeKey$kCIInputAngleKey$kCIInputAspectRatioKey$kCIInputBackgroundImageKey$kCIInputBaselineExposureKey$kCIInputBiasKey$kCIInputBoostKey$kCIInputBoostShadowAmountKey$kCIInputBrightnessKey$kCIInputCenterKey$kCIInputColorKey$kCIInputColorNoiseReductionAmountKey$kCIInputContrastKey$kCIInputDecoderVersionKey$kCIInputDepthImageKey$kCIInputDisableGamutMapKey$kCIInputDisparityImageKey$kCIInputEVKey$kCIInputEnableChromaticNoiseTrackingKey$kCIInputEnableSharpeningKey$kCIInputEnableVendorLensCorrectionKey$kCIInputExtentKey$kCIInputGradientImageKey$kCIInputIgnoreImageOrientationKey$kCIInputImageKey$kCIInputImageOrientationKey$kCIInputIntensityKey$kCIInputLinearSpaceFilter$kCIInputLuminanceNoiseReductionAmountKey$kCIInputMaskImageKey$kCIInputMoireAmountKey$kCIInputNeutralChromaticityXKey$kCIInputNeutralChromaticityYKey$kCIInputNeutralLocationKey$kCIInputNeutralTemperatureKey$kCIInputNeutralTintKey$kCIInputNoiseReductionAmountKey$kCIInputNoiseReductionContrastAmountKey$kCIInputNoiseReductionDetailAmountKey$kCIInputNoiseReductionSharpnessAmountKey$kCIInputRadiusKey$kCIInputRefractionKey$kCIInputSaturationKey$kCIInputScaleFactorKey$kCIInputScaleKey$kCIInputShadingImageKey$kCIInputSharpnessKey$kCIInputTargetImageKey$kCIInputTimeKey$kCIInputTransformKey$kCIInputVersionKey$kCIInputWeightsKey$kCIInputWidthKey$kCIOutputImageKey$kCIOutputNativeSizeKey$kCISamplerAffineMatrix$kCISamplerColorSpace$kCISamplerFilterLinear$kCISamplerFilterMode$kCISamplerFilterNearest$kCISamplerWrapBlack$kCISamplerWrapClamp$kCISamplerWrapMode$kCISupportedDecoderVersionsKey$kCIUIParameterSet$kCIUISetAdvanced$kCIUISetBasic$kCIUISetDevelopment$kCIUISetIntermediate$'''
constants = constants + '$CATransform3DIdentity@%s$'%(sel32or64('{CATransform3D=ffffffffffffffff}', '{CATransform3D=dddddddddddddddd}'),)
enums = '''$CA_WARN_DEPRECATED@1$CIDataMatrixCodeECCVersion000@0$CIDataMatrixCodeECCVersion050@50$CIDataMatrixCodeECCVersion080@80$CIDataMatrixCodeECCVersion100@100$CIDataMatrixCodeECCVersion140@140$CIDataMatrixCodeECCVersion200@200$CIQRCodeErrorCorrectionLevelH@72$CIQRCodeErrorCorrectionLevelL@76$CIQRCodeErrorCorrectionLevelM@77$CIQRCodeErrorCorrectionLevelQ@81$CIRenderDestinationAlphaNone@0$CIRenderDestinationAlphaPremultiplied@1$CIRenderDestinationAlphaUnpremultiplied@2$kCAConstraintHeight@7$kCAConstraintMaxX@2$kCAConstraintMaxY@6$kCAConstraintMidX@1$kCAConstraintMidY@5$kCAConstraintMinX@0$kCAConstraintMinY@4$kCAConstraintWidth@3$kCALayerBottomEdge@4$kCALayerHeightSizable@16$kCALayerLeftEdge@1$kCALayerMaxXMargin@4$kCALayerMaxXMaxYCorner@8$kCALayerMaxXMinYCorner@2$kCALayerMaxYMargin@32$kCALayerMinXMargin@1$kCALayerMinXMaxYCorner@4$kCALayerMinXMinYCorner@1$kCALayerMinYMargin@8$kCALayerNotSizable@0$kCALayerRightEdge@2$kCALayerTopEdge@8$kCALayerWidthSizable@2$'''
misc.update({})
functions={'CATransform3DIsAffine': (sel32or64(b'B{CATransform3D=ffffffffffffffff}', b'B{CATransform3D=dddddddddddddddd}'),), 'CATransform3DInvert': (sel32or64(b'{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}', b'{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}'),), 'CATransform3DIsIdentity': (sel32or64(b'B{CATransform3D=ffffffffffffffff}', b'B{CATransform3D=dddddddddddddddd}'),), 'CATransform3DMakeScale': (sel32or64(b'{CATransform3D=ffffffffffffffff}fff', b'{CATransform3D=dddddddddddddddd}ddd'),), 'CATransform3DTranslate': (sel32or64(b'{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}fff', b'{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}ddd'),), 'CATransform3DEqualToTransform': (sel32or64(b'B{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}', b'B{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}'),), 'CATransform3DRotate': (sel32or64(b'{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}ffff', b'{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}dddd'),), 'CACurrentMediaTime': (b'd',), 'CATransform3DMakeRotation': (sel32or64(b'{CATransform3D=ffffffffffffffff}ffff', b'{CATransform3D=dddddddddddddddd}dddd'),), 'CATransform3DConcat': (sel32or64(b'{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}', b'{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}'),), 'CATransform3DScale': (sel32or64(b'{CATransform3D=ffffffffffffffff}{CATransform3D=ffffffffffffffff}fff', b'{CATransform3D=dddddddddddddddd}{CATransform3D=dddddddddddddddd}ddd'),), 'CATransform3DMakeTranslation': (sel32or64(b'{CATransform3D=ffffffffffffffff}fff', b'{CATransform3D=dddddddddddddddd}ddd'),), 'CATransform3DGetAffineTransform': (sel32or64(b'{CGAffineTransform=ffffff}{CATransform3D=ffffffffffffffff}', b'{CGAffineTransform=dddddd}{CATransform3D=dddddddddddddddd}'),), 'CATransform3DMakeAffineTransform': (sel32or64(b'{CATransform3D=ffffffffffffffff}{CGAffineTransform=ffffff}', b'{CATransform3D=dddddddddddddddd}{CGAffineTransform=dddddd}'),)}
r = objc.registerMetaDataForSelector
objc._updatingMetadata(True)
try:
r(b'CAAnimation', b'isRemovedOnCompletion', {'retval': {'type': b'Z'}})
r(b'CAAnimation', b'setEnabled:', {'arguments': {2: {'type': 'Z'}}})
r(b'CAAnimation', b'setRemovedOnCompletion:', {'arguments': {2: {'type': b'Z'}}})
r(b'CAAnimation', b'shouldArchiveValueForKey:', {'retval': {'type': b'Z'}})
r(b'CAEmitterBehavior', b'isEnabled', {'retval': {'type': b'Z'}})
r(b'CAEmitterBehavior', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}})
r(b'CAEmitterCell', b'isEnabled', {'retval': {'type': b'Z'}})
r(b'CAEmitterCell', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}})
r(b'CAEmitterCell', b'shouldArchiveValueForKey:', {'retval': {'type': b'Z'}})
r(b'CAEmitterLayer', b'preservesDepth', {'retval': {'type': b'Z'}})
r(b'CAEmitterLayer', b'setPreservesDepth:', {'arguments': {2: {'type': b'Z'}}})
r(b'CALayer', b'containsPoint:', {'retval': {'type': b'Z'}})
r(b'CALayer', b'contentsAreFlipped', {'retval': {'type': b'Z'}})
r(b'CALayer', b'drawsAsynchronously', {'retval': {'type': b'Z'}})
r(b'CALayer', b'isDoubleSided', {'retval': {'type': b'Z'}})
r(b'CALayer', b'isGeometryFlipped', {'retval': {'type': b'Z'}})
r(b'CALayer', b'isHidden', {'retval': {'type': b'Z'}})
r(b'CALayer', b'isOpaque', {'retval': {'type': b'Z'}})
r(b'CALayer', b'masksToBounds', {'retval': {'type': b'Z'}})
r(b'CALayer', b'needsDisplay', {'retval': {'type': b'Z'}})
r(b'CALayer', b'needsDisplayForKey:', {'retval': {'type': b'Z'}})
r(b'CALayer', b'needsDisplayOnBoundsChange', {'retval': {'type': b'Z'}})
r(b'CALayer', b'needsLayout', {'retval': {'type': b'Z'}})
r(b'CALayer', b'setDoubleSided:', {'arguments': {2: {'type': b'Z'}}})
r(b'CALayer', b'setDrawsAsynchronously:', {'arguments': {2: {'type': b'Z'}}})
r(b'CALayer', b'setGeometryFlipped:', {'arguments': {2: {'type': b'Z'}}})
r(b'CALayer', b'setHidden:', {'arguments': {2: {'type': b'Z'}}})
r(b'CALayer', b'setMasksToBounds:', {'arguments': {2: {'type': b'Z'}}})
r(b'CALayer', b'setNeedsDisplayOnBoundsChange:', {'arguments': {2: {'type': b'Z'}}})
r(b'CALayer', b'setOpaque:', {'arguments': {2: {'type': b'Z'}}})
r(b'CALayer', b'setShouldRasterize:', {'arguments': {2: {'type': b'Z'}}})
r(b'CALayer', b'setWantsExtendedDynamicRangeContent:', {'arguments': {2: {'type': 'Z'}}})
r(b'CALayer', b'shouldArchiveValueForKey:', {'retval': {'type': b'Z'}})
r(b'CALayer', b'shouldRasterize', {'retval': {'type': b'Z'}})
r(b'CALayer', b'wantsExtendedDynamicRangeContent', {'retval': {'type': 'Z'}})
r(b'CAMetalLayer', b'allowsNextDrawableTimeout', {'retval': {'type': 'Z'}})
r(b'CAMetalLayer', b'displaySyncEnabled', {'retval': {'type': 'Z'}})
r(b'CAMetalLayer', b'framebufferOnly', {'retval': {'type': 'Z'}})
r(b'CAMetalLayer', b'presentsWithTransaction', {'retval': {'type': 'Z'}})
r(b'CAMetalLayer', b'setAllowsNextDrawableTimeout:', {'arguments': {2: {'type': 'Z'}}})
r(b'CAMetalLayer', b'setDisplaySyncEnabled:', {'arguments': {2: {'type': 'Z'}}})
r(b'CAMetalLayer', b'setFramebufferOnly:', {'arguments': {2: {'type': 'Z'}}})
r(b'CAMetalLayer', b'setPresentsWithTransaction:', {'arguments': {2: {'type': 'Z'}}})
r(b'CAMetalLayer', b'setWantsExtendedDynamicRangeContent:', {'arguments': {2: {'type': 'Z'}}})
r(b'CAMetalLayer', b'wantsExtendedDynamicRangeContent', {'retval': {'type': 'Z'}})
r(b'CAOpenGLLayer', b'canDrawInCGLContext:pixelFormat:forLayerTime:displayTime:', {'retval': {'type': b'Z'}, 'arguments': {5: {'type': sel32or64(b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssLLLssss}QQ}', b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}'), 'type_modifier': b'n'}}})
r(b'CAOpenGLLayer', b'drawInCGLContext:pixelFormat:forLayerTime:displayTime:', {'arguments': {5: {'type': sel32or64(b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssLLLssss}QQ}', b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}'), 'type_modifier': b'n'}}})
r(b'CAOpenGLLayer', b'isAsynchronous', {'retval': {'type': b'Z'}})
r(b'CAOpenGLLayer', b'setAsynchronous:', {'arguments': {2: {'type': b'Z'}}})
r(b'CAOpenGLLayer', b'setWantsExtendedDynamicRangeContent:', {'arguments': {2: {'type': 'Z'}}})
r(b'CAOpenGLLayer', b'wantsExtendedDynamicRangeContent', {'retval': {'type': 'Z'}})
r(b'CAPropertyAnimation', b'isAdditive', {'retval': {'type': b'Z'}})
r(b'CAPropertyAnimation', b'isCumulative', {'retval': {'type': b'Z'}})
r(b'CAPropertyAnimation', b'setAdditive:', {'arguments': {2: {'type': b'Z'}}})
r(b'CAPropertyAnimation', b'setCumulative:', {'arguments': {2: {'type': b'Z'}}})
r(b'CARenderer', b'beginFrameAtTime:timeStamp:', {'arguments': {3: {'type': sel32or64(b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssLLLssss}QQ}', b'^{_CVTimeStamp=IiqQdq{CVSMPTETime=ssIIIssss}QQ}'), 'type_modifier': b'n'}}})
r(b'CARenderer', b'rendererWithCGLContext:options:', {'arguments': {2: {'type': '^{_CGLContextObject=}'}}})
r(b'CAReplicatorLayer', b'preservesDepth', {'retval': {'type': b'Z'}})
r(b'CAReplicatorLayer', b'setPreservesDepth:', {'arguments': {2: {'type': b'Z'}}})
r(b'CATextLayer', b'allowsFontSubpixelQuantization', {'retval': {'type': 'Z'}})
r(b'CATextLayer', b'font', {'retval': {'type': b'@'}})
r(b'CATextLayer', b'isWrapped', {'retval': {'type': b'Z'}})
r(b'CATextLayer', b'setAllowsFontSubpixelQuantization:', {'arguments': {2: {'type': 'Z'}}})
r(b'CATextLayer', b'setFont:', {'arguments': {2: {'type': b'@'}}})
r(b'CATextLayer', b'setWrapped:', {'arguments': {2: {'type': b'Z'}}})
r(b'CATransaction', b'completionBlock', {'retval': {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}})
r(b'CATransaction', b'disableActions', {'retval': {'type': b'Z'}})
r(b'CATransaction', b'setCompletionBlock:', {'arguments': {2: {'callable': {'retval': {'type': b'v'}, 'arguments': {0: {'type': b'^v'}}}}}})
r(b'CATransaction', b'setDisableActions:', {'arguments': {2: {'type': b'Z'}}})
r(b'CIAztecCodeDescriptor', b'isCompact', {'retval': {'type': 'Z'}})
r(b'CIColor', b'components', {'retval': {'c_array_of_variable_length': True}})
r(b'CIContext', b'createCGImage:fromRect:format:colorSpace:deferred:', {'retval': {'already_cfretained': True}, 'arguments': {6: {'type': 'Z'}}})
r(b'CIContext', b'createCGLayerWithSize:info:', {'retval': {'already_cfretained': True}})
r(b'CIContext', b'prepareRender:fromRect:toDestination:atPoint:error:', {'retval': {'type': 'Z'}, 'arguments': {6: {'type_modifier': b'o'}}})
r(b'CIContext', b'render:toBitmap:rowBytes:bounds:format:colorSpace:', {'arguments': {3: {'type_modifier': b'o', 'c_array_of_variable_length': True}}})
r(b'CIContext', b'startTaskToClear:error:', {'arguments': {3: {'type_modifier': b'o'}}})
r(b'CIContext', b'startTaskToRender:fromRect:toDestination:atPoint:error:', {'arguments': {6: {'type_modifier': b'o'}}})
r(b'CIContext', b'startTaskToRender:toDestination:error:', {'arguments': {4: {'type_modifier': b'o'}}})
r(b'CIFaceFeature', b'hasFaceAngle', {'retval': {'type': b'Z'}})
r(b'CIFaceFeature', b'hasLeftEyePosition', {'retval': {'type': b'Z'}})
r(b'CIFaceFeature', b'hasMouthPosition', {'retval': {'type': b'Z'}})
r(b'CIFaceFeature', b'hasRightEyePosition', {'retval': {'type': b'Z'}})
r(b'CIFaceFeature', b'hasSmile', {'retval': {'type': b'Z'}})
r(b'CIFaceFeature', b'hasTrackingFrameCount', {'retval': {'type': b'Z'}})
r(b'CIFaceFeature', b'hasTrackingID', {'retval': {'type': b'Z'}})
r(b'CIFaceFeature', b'leftEyeClosed', {'retval': {'type': b'Z'}})
r(b'CIFaceFeature', b'rightEyeClosed', {'retval': {'type': b'Z'}})
r(b'CIFilter', b'apply:', {'c_array_delimited_by_null': True, 'variadic': True})
r(b'CIFilter', b'filterArrayFromSerializedXMP:inputImageExtent:error:', {'arguments': {4: {'type_modifier': b'o'}}})
r(b'CIFilter', b'filterWithName:keysAndValues:', {'c_array_delimited_by_null': True, 'variadic': True})
r(b'CIFilter', b'isEnabled', {'retval': {'type': b'Z'}})
r(b'CIFilter', b'setEnabled:', {'arguments': {2: {'type': b'Z'}}})
r(b'CIFilterGenerator', b'writeToURL:atomically:', {'retval': {'type': b'Z'}, 'arguments': {3: {'type': b'Z'}}})
r(b'CIFilterShape', b'transformBy:interior:', {'arguments': {3: {'type': b'Z'}}})
r(b'CIImage', b'imageWithTexture:size:flipped:colorSpace:', {'arguments': {4: {'type': b'Z'}}})
r(b'CIImage', b'imageWithTexture:size:flipped:options:', {'arguments': {4: {'type': b'Z'}}})
r(b'CIImage', b'initWithTexture:size:flipped:colorSpace:', {'arguments': {4: {'type': b'Z'}}})
r(b'CIImage', b'initWithTexture:size:flipped:options:', {'arguments': {4: {'type': b'Z'}}})
r(b'CIImage', b'writeHEIFRepresentationOfImage:toURL:format:colorSpace:options:error:', {'retval': {'type': 'Z'}, 'arguments': {7: {'type_modifier': b'o'}}})
r(b'CIContext', b'writeHEIFRepresentationOfImage:toURL:format:colorSpace:options:error:', {'retval': {'type': 'Z'}, 'arguments': {7: {'type_modifier': b'o'}}})
r(b'CIImage', b'writeJPEGRepresentationOfImage:toURL:colorSpace:options:error:', {'retval': {'type': 'Z'}, 'arguments': {6: {'type_modifier': b'o'}}})
r(b'CIContext', b'writeJPEGRepresentationOfImage:toURL:colorSpace:options:error:', {'retval': {'type': 'Z'}, 'arguments': {6: {'type_modifier': b'o'}}})
r(b'CIImage', b'writePNGRepresentationOfImage:toURL:format:colorSpace:options:error:', {'retval': {'type': 'Z'}, 'arguments': {7: {'type_modifier': b'o'}}})
r(b'CIContext', b'writePNGRepresentationOfImage:toURL:format:colorSpace:options:error:', {'retval': {'type': 'Z'}, 'arguments': {7: {'type_modifier': b'o'}}})
r(b'CIImageProcessorKernel', b'applyWithExtent:inputs:arguments:error:', {'arguments': {5: {'type_modifier': b'o'}}})
r(b'CIImageProcessorKernel', b'processWithInputs:arguments:output:error:', {'retval': {'type': 'Z'}, 'arguments': {5: {'type_modifier': b'o'}}})
r(b'CIKernel', b'applyWithExtent:roiCallback:arguments:', {'arguments': {3: {'callable': {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'i'}, 2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}}}})
r(b'CIKernel', b'kernelWithFunctionName:fromMetalLibraryData:error:', {'arguments': {4: {'type_modifier': b'o'}}})
r(b'CIKernel', b'kernelWithFunctionName:fromMetalLibraryData:outputPixelFormat:error:', {'arguments': {5: {'type_modifier': b'o'}}})
r(b'CIKernel', b'setROISelector:', {'arguments': {2: {'sel_of_type': sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}@:i{CGRect={CGPoint=ff}{CGSize=ff}}@', b'{CGRect={CGPoint=dd}{CGSize=dd}}@:i{CGRect={CGPoint=dd}{CGSize=dd}}@')}}})
r(b'CIPDF417CodeDescriptor', b'isCompact', {'retval': {'type': 'Z'}})
r(b'CIPlugIn', b'loadPlugIn:allowExecutableCode:', {'arguments': {3: {'type': b'Z'}}})
r(b'CIPlugIn', b'loadPlugIn:allowNonExecutable:', {'arguments': {3: {'type': b'Z'}}})
r(b'CIRenderDestination', b'blendsInDestinationColorSpace', {'retval': {'type': 'Z'}})
r(b'CIRenderDestination', b'initWithWidth:height:pixelFormat:commandBuffer:mtlTextureProvider:', {'arguments': {6: {'callable': {'retval': {'type': b'@'}, 'arguments': {0: {'type': b'^v'}}}}}})
r(b'CIRenderDestination', b'isClamped', {'retval': {'type': 'Z'}})
r(b'CIRenderDestination', b'isDithered', {'retval': {'type': 'Z'}})
r(b'CIRenderDestination', b'isFlipped', {'retval': {'type': 'Z'}})
r(b'CIRenderDestination', b'setBlendsInDestinationColorSpace:', {'arguments': {2: {'type': 'Z'}}})
r(b'CIRenderDestination', b'setClamped:', {'arguments': {2: {'type': 'Z'}}})
r(b'CIRenderDestination', b'setDithered:', {'arguments': {2: {'type': 'Z'}}})
r(b'CIRenderDestination', b'setFlipped:', {'arguments': {2: {'type': 'Z'}}})
r(b'CIRenderTask', b'waitUntilCompletedAndReturnError:', {'arguments': {2: {'type_modifier': b'o'}}})
r(b'CISampler', b'initWithImage:keysAndValues:', {'c_array_delimited_by_null': True, 'variadic': True})
r(b'CISampler', b'samplerWithImage:keysAndValues:', {'c_array_delimited_by_null': True, 'variadic': True})
r(b'CIVector', b'initWithValues:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}})
r(b'CIVector', b'vectorWithValues:count:', {'arguments': {2: {'type_modifier': b'n', 'c_array_length_in_arg': 3}}})
r(b'CIWarpKernel', b'applyWithExtent:roiCallback:inputImage:arguments:', {'arguments': {3: {'callable': {'retval': {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}, 'arguments': {0: {'type': b'^v'}, 1: {'type': b'i'}, 2: {'type': sel32or64(b'{_NSRect={_NSPoint=ff}{_NSSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}}}}}})
r(b'NSObject', b'actionForLayer:forKey:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}})
r(b'NSObject', b'animationDidStart:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}})
r(b'NSObject', b'animationDidStop:finished:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'Z'}}})
r(b'NSObject', b'autoreverses', {'required': True, 'retval': {'type': b'Z'}})
r(b'NSObject', b'baseAddress', {'retval': {'type': '^v', 'c_array_of_variable_length': True}})
r(b'NSObject', b'beginTime', {'required': True, 'retval': {'type': b'd'}})
r(b'NSObject', b'bytesPerRow', {'retval': {'type': 'L'}})
r(b'NSObject', b'displayLayer:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}})
r(b'NSObject', b'drawLayer:inContext:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'^{CGContext=}'}}})
r(b'NSObject', b'duration', {'required': True, 'retval': {'type': b'd'}})
r(b'NSObject', b'fillMode', {'required': True, 'retval': {'type': b'@'}})
r(b'NSObject', b'filterWithName:', {'required': True, 'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}}})
r(b'NSObject', b'format', {'retval': {'type': sel32or64(b'i', b'q')}})
r(b'NSObject', b'invalidateLayoutOfLayer:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}})
r(b'NSObject', b'layoutSublayersOfLayer:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}})
r(b'NSObject', b'load:', {'required': True, 'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'^v'}}})
r(b'NSObject', b'preferredSizeOfLayer:', {'retval': {'type': sel32or64(b'{CGSize=ff}', b'{CGSize=dd}')}, 'arguments': {2: {'type': b'@'}}})
r(b'NSObject', b'provideImageData:bytesPerRow:origin::size::userInfo:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'^v', 'type_modifier': b'o', 'c_array_of_variable_length': True}, 3: {'type': sel32or64(b'L', b'Q')}, 4: {'type': sel32or64(b'L', b'Q')}, 5: {'type': sel32or64(b'L', b'Q')}, 6: {'type': sel32or64(b'L', b'Q')}, 7: {'type': sel32or64(b'L', b'Q')}, 8: {'type': b'@'}}})
r(b'NSObject', b'region', {'retval': {'type': sel32or64(b'{CGRect={CGPoint=ff}{CGSize=ff}}', b'{CGRect={CGPoint=dd}{CGSize=dd}}')}})
r(b'NSObject', b'repeatCount', {'required': True, 'retval': {'type': b'f'}})
r(b'NSObject', b'repeatDuration', {'required': True, 'retval': {'type': b'd'}})
r(b'NSObject', b'runActionForKey:object:arguments:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}})
r(b'NSObject', b'setAutoreverses:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'Z'}}})
r(b'NSObject', b'setBeginTime:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'd'}}})
r(b'NSObject', b'setDuration:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'd'}}})
r(b'NSObject', b'setFillMode:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}})
r(b'NSObject', b'setRepeatCount:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'f'}}})
r(b'NSObject', b'setRepeatDuration:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'd'}}})
r(b'NSObject', b'setSpeed:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'f'}}})
r(b'NSObject', b'setTimeOffset:', {'required': True, 'retval': {'type': b'v'}, 'arguments': {2: {'type': b'd'}}})
r(b'NSObject', b'speed', {'required': True, 'retval': {'type': b'f'}})
r(b'NSObject', b'timeOffset', {'required': True, 'retval': {'type': b'd'}})
finally:
objc._updatingMetadata(False)
protocols={'CAAnimationDelegate': objc.informal_protocol('CAAnimationDelegate', [objc.selector(None, b'animationDidStart:', b'v@:@', isRequired=False), objc.selector(None, b'animationDidStop:finished:', b'v@:@Z', isRequired=False)]), 'CALayerDelegate': objc.informal_protocol('CALayerDelegate', [objc.selector(None, b'drawLayer:inContext:', b'v@:@^{CGContext=}', isRequired=False), objc.selector(None, b'actionForLayer:forKey:', b'@@:@@', isRequired=False), objc.selector(None, b'displayLayer:', b'v@:@', isRequired=False), objc.selector(None, b'layoutSublayersOfLayer:', b'v@:@', isRequired=False)]), 'CIImageProvider': objc.informal_protocol('CIImageProvider', [objc.selector(None, b'provideImageData:bytesPerRow:origin::size::userInfo:', sel32or64(b'v@:^vLLLLL@', b'v@:^vQQQQQ@'), isRequired=False)]), 'CALayoutManager': objc.informal_protocol('CALayoutManager', [objc.selector(None, b'preferredSizeOfLayer:', sel32or64(b'{CGSize=ff}@:@', b'{CGSize=dd}@:@'), isRequired=False), objc.selector(None, b'layoutSublayersOfLayer:', b'v@:@', isRequired=False), objc.selector(None, b'invalidateLayoutOfLayer:', b'v@:@', isRequired=False)])}
expressions = {}
# END OF FILE
| 148.65285
| 7,012
| 0.727292
| 2,873
| 28,690
| 7.229377
| 0.249217
| 0.015407
| 0.013577
| 0.022244
| 0.422244
| 0.402648
| 0.353683
| 0.307896
| 0.230669
| 0.180982
| 0
| 0.016967
| 0.069397
| 28,690
| 192
| 7,013
| 149.427083
| 0.760965
| 0.003276
| 0
| 0.01087
| 1
| 0.021739
| 0.694659
| 0.476828
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021739
| false
| 0
| 0.005435
| 0.021739
| 0.027174
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c705d9f52c3b02a03f542d80f95e1f26757cd546
| 270
|
py
|
Python
|
main/tasks.py
|
nmota/public-contracts
|
b809df82147e5e4fa746416c0f9d51db2c6db05a
|
[
"BSD-3-Clause"
] | null | null | null |
main/tasks.py
|
nmota/public-contracts
|
b809df82147e5e4fa746416c0f9d51db2c6db05a
|
[
"BSD-3-Clause"
] | null | null | null |
main/tasks.py
|
nmota/public-contracts
|
b809df82147e5e4fa746416c0f9d51db2c6db05a
|
[
"BSD-3-Clause"
] | null | null | null |
import django_rq
from django_rq import job
import contracts.tasks
import law.tasks
import deputies.tasks
@job
def update():
django_rq.enqueue(law.tasks.update)
django_rq.enqueue(contracts.tasks.update)
django_rq.enqueue(deputies.tasks.recompute_analysis)
| 19.285714
| 56
| 0.796296
| 39
| 270
| 5.358974
| 0.358974
| 0.191388
| 0.200957
| 0.301435
| 0.248804
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122222
| 270
| 13
| 57
| 20.769231
| 0.881857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| true
| 0
| 0.5
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c70f9bc9923fae8735cd90b24e72f574dc9bfd66
| 5,186
|
py
|
Python
|
tools/Vitis-AI-Quantizer/vai_q_pytorch/nndct_shared/quantization/commander.py
|
hito0512/Vitis-AI
|
996459fb96cb077ed2f7e789d515893b1cccbc95
|
[
"Apache-2.0"
] | 1
|
2022-02-17T22:13:23.000Z
|
2022-02-17T22:13:23.000Z
|
tools/Vitis-AI-Quantizer/vai_q_pytorch/nndct_shared/quantization/commander.py
|
hito0512/Vitis-AI
|
996459fb96cb077ed2f7e789d515893b1cccbc95
|
[
"Apache-2.0"
] | null | null | null |
tools/Vitis-AI-Quantizer/vai_q_pytorch/nndct_shared/quantization/commander.py
|
hito0512/Vitis-AI
|
996459fb96cb077ed2f7e789d515893b1cccbc95
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import math
from nndct_shared.utils import BaseCommander
from nndct_shared.base import NNDCT_OP
from nndct_shared import nndct_graph as graph_utils
class QuantConfigerCommander(BaseCommander):
def create_commands(self):
# def SoftFuseClamp(graph, quant_groups):
# return graph_utils.group_up(graph, quant_groups, NNDCT_OP.CLAMP)
def SoftFuseBatchSpaceNdToConv(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups,
NNDCT_OP.BATCH_TO_SPACE_ND, NNDCT_OP.CONV2D)
def SoftFuseConvToSpaceBatchNd(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.CONV2D,
NNDCT_OP.SPACE_TO_BATCH_ND)
def SoftFuseHardtanh(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.HARDTANH)
def SoftFuseRelu(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.RELU)
def SoftFuseLeakyRelu(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.LEAKY_RELU)
def SoftFuseRelu6(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.RELU6)
def SoftFuseReluk(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.RELUK)
def SoftFuseChannelScale(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.CHANNEL_SCALE)
def SoftFuseFlatten(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.FLATTEN)
def SoftFuseSqueeze(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.SQUEEZE)
def SoftFusePixelShuffle(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.PIXEL_SHUFFLE)
def SoftFuseReshape(graph, quant_groups):
def is_reshape_parent(node):
if node.op.type == NNDCT_OP.SHAPE:
return False
elif node.op.type in [NNDCT_OP.MULTIPLY]:
for p in graph.parents(node.name):
return is_reshape_parent(p)
else:
return True
for n in graph.nodes:
if not n.in_quant_part:
continue
for p in graph.parents(n.name):
if is_reshape_parent(p):
if quant_groups[
n.name][0] == n.name and n.op.type == NNDCT_OP.RESHAPE:
start_node = quant_groups[p.name][0]
groups = graph_utils.glue_group_members(graph, quant_groups,
start_node, n.name)
return quant_groups
def SoftFuseSplit(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.SPLIT)
def SoftFuseStrideSlice(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.STRIDED_SLICE)
def SoftFuseTranspose(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.TRANSPOSE)
def SoftFuseTile(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.TILE)
def SoftFuseUpSampling(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.UP_SAMPLING)
def SoftFuseDropout(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.DROPOUT)
def SoftFuseContiguous(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.CONTIGUOUS)
def SoftFuseChunk(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.CHUNK)
def SoftFusePermute(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.PERMUTE)
def SoftFuseDivide(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.DIV)
def SoftFuseExp(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.EXP)
def SoftFuseExpand(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.EXPAND)
def SoftFuseInplaceCopy(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.INPLACE_COPY)
def SoftFuseRepeat(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.REPEAT)
# def SoftFuseSelect(graph, quant_groups):
# return graph_utils.group_up(graph, quant_groups, NNDCT_OP.SELECT)
def SoftFuseUnsqueeze(graph, quant_groups):
return graph_utils.group_up(graph, quant_groups, NNDCT_OP.UNSQUEEZE)
return locals()
| 36.780142
| 78
| 0.722329
| 693
| 5,186
| 5.142857
| 0.242424
| 0.188272
| 0.260382
| 0.17284
| 0.497194
| 0.487093
| 0.487093
| 0.487093
| 0.487093
| 0.487093
| 0
| 0.003373
| 0.199576
| 5,186
| 140
| 79
| 37.042857
| 0.855216
| 0.14732
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.358025
| false
| 0
| 0.049383
| 0.320988
| 0.802469
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
c71504ccff0df55650d28a540ed98e6a02a69b48
| 522
|
py
|
Python
|
pylxd/models/__init__.py
|
AdamIsrael/pylxd
|
d5d47a4d1185b4956e997d70e09d649ea73ba26b
|
[
"Apache-2.0"
] | null | null | null |
pylxd/models/__init__.py
|
AdamIsrael/pylxd
|
d5d47a4d1185b4956e997d70e09d649ea73ba26b
|
[
"Apache-2.0"
] | null | null | null |
pylxd/models/__init__.py
|
AdamIsrael/pylxd
|
d5d47a4d1185b4956e997d70e09d649ea73ba26b
|
[
"Apache-2.0"
] | null | null | null |
from pylxd.models.cluster import (Cluster, ClusterMember) # NOQA
from pylxd.models.certificate import Certificate # NOQA
from pylxd.models.container import Container, Snapshot # NOQA
from pylxd.models.image import Image # NOQA
from pylxd.models.network import Network # NOQA
from pylxd.models.operation import Operation # NOQA
from pylxd.models.profile import Profile # NOQA
from pylxd.models.storage_pool import ( # NOQA
StoragePool, # NOQA
StorageResources, # NOQA
StorageVolume, # NOQA
) # NOQA
| 40.153846
| 65
| 0.764368
| 65
| 522
| 6.123077
| 0.292308
| 0.180905
| 0.301508
| 0.334171
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164751
| 522
| 12
| 66
| 43.5
| 0.912844
| 0.113027
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c763243bff5f87a536413fe4f3fe2a9034e6aafb
| 601
|
py
|
Python
|
src/counter.py
|
lifelongjourney/pipeline
|
93cc50fec3bdd77a5bbbbdd3d332dc00b0ab3020
|
[
"Apache-2.0"
] | 1
|
2021-12-11T11:00:38.000Z
|
2021-12-11T11:00:38.000Z
|
src/counter.py
|
lifelongjourney/pipeline
|
93cc50fec3bdd77a5bbbbdd3d332dc00b0ab3020
|
[
"Apache-2.0"
] | null | null | null |
src/counter.py
|
lifelongjourney/pipeline
|
93cc50fec3bdd77a5bbbbdd3d332dc00b0ab3020
|
[
"Apache-2.0"
] | null | null | null |
from multiprocessing import Value
class AtomicCounter(object):
def __init__(self, init_value=0):
self._val = Value('i', init_value)
def increase(self, incr=1):
with self._val.get_lock():
self._val.value += incr
return self._val.value
def decrease(self, decr=1):
with self._val.get_lock():
self._val.value -= decr
return self._val.value
@property
def value(self):
with self._val.get_lock():
return self._val.value
@property
def lock(self):
return self._val.get_lock()
| 23.115385
| 42
| 0.592346
| 77
| 601
| 4.363636
| 0.298701
| 0.208333
| 0.214286
| 0.166667
| 0.410714
| 0.357143
| 0.184524
| 0.184524
| 0.184524
| 0
| 0
| 0.007109
| 0.297837
| 601
| 25
| 43
| 24.04
| 0.7891
| 0
| 0
| 0.421053
| 0
| 0
| 0.001664
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.263158
| false
| 0
| 0.052632
| 0.052632
| 0.578947
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
c77fd6f3f56c4caed16a281df0654ae13b5775af
| 80
|
py
|
Python
|
python/testData/paramInfo/StarredParamAndArg.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/paramInfo/StarredParamAndArg.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/paramInfo/StarredParamAndArg.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
def foo(a, b, *c):
pass
x = (5,6)
foo(<arg1>1, <arg2>2, <arg3>4, <arg4>*x)
| 10
| 40
| 0.4875
| 18
| 80
| 2.166667
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0.2125
| 80
| 7
| 41
| 11.428571
| 0.47619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.25
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
c7852ac36a7aaaa4fd8614c9117a8d39993e7df0
| 171
|
py
|
Python
|
examples/hookiocli.py
|
Marak/hook.io-sdk-python
|
722b04eb0832ef712d5dcd491899996088e1aa8b
|
[
"Unlicense"
] | 1
|
2021-06-15T11:52:44.000Z
|
2021-06-15T11:52:44.000Z
|
examples/hookiocli.py
|
Marak/hook.io-sdk-python
|
722b04eb0832ef712d5dcd491899996088e1aa8b
|
[
"Unlicense"
] | null | null | null |
examples/hookiocli.py
|
Marak/hook.io-sdk-python
|
722b04eb0832ef712d5dcd491899996088e1aa8b
|
[
"Unlicense"
] | null | null | null |
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.exit(load_entry_point('hookio', 'console_scripts', 'hookiocli')(sys.argv))
| 28.5
| 83
| 0.736842
| 24
| 171
| 4.708333
| 0.75
| 0.159292
| 0.247788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134503
| 171
| 5
| 84
| 34.2
| 0.756757
| 0
| 0
| 0
| 0
| 0
| 0.230303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.5
| null | null | 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c7e7310619d963b59af9cac2a3fe9f2a78da2b18
| 29,314
|
py
|
Python
|
test/geometry/test_conversions.py
|
aardvarkkrill/kornia
|
e36ca3d15883a1dbbb0e7413719c0965a4b63cee
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
test/geometry/test_conversions.py
|
aardvarkkrill/kornia
|
e36ca3d15883a1dbbb0e7413719c0965a4b63cee
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
test/geometry/test_conversions.py
|
aardvarkkrill/kornia
|
e36ca3d15883a1dbbb0e7413719c0965a4b63cee
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-05-15T03:22:24.000Z
|
2021-05-15T03:22:24.000Z
|
from typing import Optional
import pytest
import numpy as np
import kornia
from kornia.testing import tensor_to_gradcheck_var, create_eye_batch
import torch
from torch.autograd import gradcheck
from torch.testing import assert_allclose
# based on:
# https://github.com/ceres-solver/ceres-solver/blob/master/internal/ceres/rotation_test.cc#L271
class TestAngleAxisToQuaternion:
def test_smoke(self, device, dtype):
angle_axis = torch.zeros(3)
quaternion = kornia.angle_axis_to_quaternion(angle_axis)
assert quaternion.shape == (4,)
@pytest.mark.parametrize("batch_size", (1, 3, 8))
def test_smoke_batch(self, batch_size, device, dtype):
angle_axis = torch.zeros(batch_size, 3, device=device, dtype=dtype)
quaternion = kornia.angle_axis_to_quaternion(angle_axis)
assert quaternion.shape == (batch_size, 4)
def test_zero_angle(self, device, dtype):
angle_axis = torch.tensor([0., 0., 0.], device=device, dtype=dtype)
expected = torch.tensor([1., 0., 0., 0.], device=device, dtype=dtype)
quaternion = kornia.angle_axis_to_quaternion(angle_axis)
assert_allclose(quaternion, expected, atol=1e-4, rtol=1e-4)
def test_small_angle(self, device, dtype):
theta = 1e-2
angle_axis = torch.tensor([theta, 0., 0.], device=device, dtype=dtype)
expected = torch.tensor([np.cos(theta / 2), np.sin(theta / 2), 0., 0.], device=device, dtype=dtype)
quaternion = kornia.angle_axis_to_quaternion(angle_axis)
assert_allclose(quaternion, expected, atol=1e-4, rtol=1e-4)
def test_x_rotation(self, device, dtype):
half_sqrt2 = 0.5 * np.sqrt(2)
angle_axis = torch.tensor([kornia.pi / 2, 0., 0.], device=device, dtype=dtype)
expected = torch.tensor([half_sqrt2, half_sqrt2, 0., 0.], device=device, dtype=dtype)
quaternion = kornia.angle_axis_to_quaternion(angle_axis)
assert_allclose(quaternion, expected, atol=1e-4, rtol=1e-4)
def test_gradcheck(self, device, dtype):
eps = 1e-12
angle_axis = torch.tensor([0., 0., 0.], device=device, dtype=dtype) + eps
angle_axis = tensor_to_gradcheck_var(angle_axis)
# evaluate function gradient
assert gradcheck(kornia.angle_axis_to_quaternion, (angle_axis,),
raise_exception=True)
class TestRotationMatrixToQuaternion:
@pytest.mark.parametrize("batch_size", (1, 3, 8))
def test_smoke_batch(self, batch_size, device, dtype):
matrix = torch.zeros(batch_size, 3, 3, device=device, dtype=dtype)
quaternion = kornia.rotation_matrix_to_quaternion(matrix)
assert quaternion.shape == (batch_size, 4)
def test_identity(self, device, dtype):
matrix = torch.tensor([
[1., 0., 0.],
[0., 1., 0.],
[0., 0., 1.],
], device=device, dtype=dtype)
expected = torch.tensor(
[0., 0., 0., 1.], device=device, dtype=dtype)
quaternion = kornia.rotation_matrix_to_quaternion(matrix)
assert_allclose(quaternion, expected, atol=1e-4, rtol=1e-4)
def test_rot_x_45(self, device, dtype):
matrix = torch.tensor([
[1., 0., 0.],
[0., 0., -1.],
[0., 1., 0.],
], device=device, dtype=dtype)
pi_half2 = torch.cos(kornia.pi / 4).to(device=device, dtype=dtype)
expected = torch.tensor(
[pi_half2, 0., 0., pi_half2], device=device, dtype=dtype)
quaternion = kornia.rotation_matrix_to_quaternion(matrix)
assert_allclose(quaternion, expected, atol=1e-4, rtol=1e-4)
def test_back_and_forth(self, device, dtype):
matrix = torch.tensor([
[1., 0., 0.],
[0., 0., -1.],
[0., 1., 0.],
], device=device, dtype=dtype)
quaternion = kornia.rotation_matrix_to_quaternion(matrix)
matrix_hat = kornia.quaternion_to_rotation_matrix(quaternion)
assert_allclose(matrix, matrix_hat)
def test_corner_case(self, device, dtype):
matrix = torch.tensor([
[-0.7799533010, -0.5432914495, 0.3106555045],
[0.0492402576, -0.5481169224, -0.8349509239],
[0.6238971353, -0.6359263659, 0.4542570710]
], device=device, dtype=dtype)
quaternion_true = torch.tensor([0.280136495828629, -0.440902262926102,
0.834015488624573, 0.177614107728004], device=device, dtype=dtype)
quaternion = kornia.rotation_matrix_to_quaternion(matrix)
torch.set_printoptions(precision=10)
assert_allclose(quaternion_true, quaternion)
def test_gradcheck(self, device, dtype):
matrix = torch.eye(3, device=device, dtype=dtype)
matrix = tensor_to_gradcheck_var(matrix)
# evaluate function gradient
assert gradcheck(kornia.rotation_matrix_to_quaternion, (matrix,),
raise_exception=True)
@pytest.mark.skip(reason="turn off all jit for a while")
def test_jit(self, device, dtype):
op = kornia.quaternion_log_to_exp
op_script = torch.jit.script(op)
quaternion = torch.tensor([0., 0., 1.], device=device, dtype=dtype)
actual = op_script(quaternion)
expected = op(quaternion)
assert_allclose(actual, expected, atol=1e-4, rtol=1e-4)
class TestQuaternionToRotationMatrix:
@pytest.mark.parametrize("batch_size", (1, 3, 8))
def test_smoke_batch(self, batch_size, device, dtype):
quaternion = torch.zeros(batch_size, 4, device=device, dtype=dtype)
matrix = kornia.quaternion_to_rotation_matrix(quaternion)
assert matrix.shape == (batch_size, 3, 3)
def test_unit_quaternion(self, device, dtype):
quaternion = torch.tensor([0., 0., 0., 1.], device=device, dtype=dtype)
expected = torch.tensor([
[1., 0., 0.],
[0., 1., 0.],
[0., 0., 1.],
], device=device, dtype=dtype)
matrix = kornia.quaternion_to_rotation_matrix(quaternion)
assert_allclose(matrix, expected, atol=1e-4, rtol=1e-4)
def test_x_rotation(self, device, dtype):
quaternion = torch.tensor([1., 0., 0., 0.], device=device, dtype=dtype)
expected = torch.tensor([
[1., 0., 0.],
[0., -1., 0.],
[0., 0., -1.],
], device=device, dtype=dtype)
matrix = kornia.quaternion_to_rotation_matrix(quaternion)
assert_allclose(matrix, expected, atol=1e-4, rtol=1e-4)
def test_y_rotation(self, device, dtype):
quaternion = torch.tensor([0., 1., 0., 0.], device=device, dtype=dtype)
expected = torch.tensor([
[-1., 0., 0.],
[0., 1., 0.],
[0., 0., -1.],
], device=device, dtype=dtype)
matrix = kornia.quaternion_to_rotation_matrix(quaternion)
assert_allclose(matrix, expected, atol=1e-4, rtol=1e-4)
def test_z_rotation(self, device, dtype):
quaternion = torch.tensor([0., 0., 1., 0.], device=device, dtype=dtype)
expected = torch.tensor([
[-1., 0., 0.],
[0., -1., 0.],
[0., 0., 1.],
], device=device, dtype=dtype)
matrix = kornia.quaternion_to_rotation_matrix(quaternion)
assert_allclose(matrix, expected, atol=1e-4, rtol=1e-4)
def test_gradcheck(self, device, dtype):
quaternion = torch.tensor([0., 0., 0., 1.], device=device, dtype=dtype)
quaternion = tensor_to_gradcheck_var(quaternion)
# evaluate function gradient
assert gradcheck(kornia.quaternion_to_rotation_matrix, (quaternion,),
raise_exception=True)
@pytest.mark.skip(reason="turn off all jit for a while")
def test_jit(self, device, dtype):
@torch.jit.script
def op_script(input):
return kornia.quaternion_to_rotation_matrix(input)
quaternion = torch.tensor([0., 0., 1., 0.], device=device, dtype=dtype)
actual = op_script(quaternion)
expected = kornia.quaternion_to_rotation_matrix(quaternion)
assert_allclose(actual, expected, atol=1e-4, rtol=1e-4)
class TestQuaternionLogToExp:
@pytest.mark.parametrize("batch_size", (1, 3, 8))
def test_smoke_batch(self, batch_size, device, dtype):
quaternion_log = torch.zeros(batch_size, 3, device=device, dtype=dtype)
quaternion_exp = kornia.quaternion_log_to_exp(quaternion_log)
assert quaternion_exp.shape == (batch_size, 4)
def test_unit_quaternion(self, device, dtype):
quaternion_log = torch.tensor([0., 0., 0.], device=device, dtype=dtype)
expected = torch.tensor([0., 0., 0., 1.], device=device, dtype=dtype)
assert_allclose(kornia.quaternion_log_to_exp(quaternion_log), expected)
def test_pi_quaternion(self, device, dtype):
one = torch.tensor(1., device=device, dtype=dtype)
quaternion_log = torch.tensor([1., 0., 0.], device=device, dtype=dtype)
expected = torch.tensor([torch.sin(one), 0., 0., torch.cos(one)], device=device, dtype=dtype)
assert_allclose(kornia.quaternion_log_to_exp(quaternion_log), expected)
def test_back_and_forth(self, device, dtype):
quaternion_log = torch.tensor([0., 0., 0.], device=device, dtype=dtype)
quaternion_exp = kornia.quaternion_log_to_exp(quaternion_log)
quaternion_log_hat = kornia.quaternion_exp_to_log(quaternion_exp)
assert_allclose(quaternion_log, quaternion_log_hat)
def test_gradcheck(self, device, dtype):
quaternion = torch.tensor([0., 0., 1.], device=device, dtype=dtype)
quaternion = tensor_to_gradcheck_var(quaternion)
# evaluate function gradient
assert gradcheck(kornia.quaternion_log_to_exp, (quaternion,),
raise_exception=True)
@pytest.mark.skip(reason="turn off all jit for a while")
def test_jit(self, device, dtype):
op = kornia.quaternion_log_to_exp
op_script = torch.jit.script(op)
quaternion = torch.tensor([0., 0., 1.], device=device, dtype=dtype)
actual = op_script(quaternion)
expected = op(quaternion)
assert_allclose(actual, expected, atol=1e-4, rtol=1e-4)
class TestQuaternionExpToLog:
@pytest.mark.parametrize("batch_size", (1, 3, 8))
def test_smoke_batch(self, batch_size, device, dtype):
quaternion_exp = torch.zeros(batch_size, 4, device=device, dtype=dtype)
quaternion_log = kornia.quaternion_exp_to_log(quaternion_exp)
assert quaternion_log.shape == (batch_size, 3)
def test_unit_quaternion(self, device, dtype):
quaternion_exp = torch.tensor([0., 0., 0., 1.], device=device, dtype=dtype)
expected = torch.tensor([0., 0., 0.], device=device, dtype=dtype)
assert_allclose(kornia.quaternion_exp_to_log(quaternion_exp), expected, atol=1e-4, rtol=1e-4)
def test_pi_quaternion(self, device, dtype):
quaternion_exp = torch.tensor([1., 0., 0., 0.], device=device, dtype=dtype)
expected = torch.tensor([kornia.pi / 2, 0., 0.], device=device, dtype=dtype)
assert_allclose(kornia.quaternion_exp_to_log(quaternion_exp), expected, atol=1e-4, rtol=1e-4)
def test_back_and_forth(self, device, dtype):
quaternion_exp = torch.tensor([1., 0., 0., 0.], device=device, dtype=dtype)
quaternion_log = kornia.quaternion_exp_to_log(quaternion_exp)
quaternion_exp_hat = kornia.quaternion_log_to_exp(quaternion_log)
assert_allclose(quaternion_exp, quaternion_exp_hat, atol=1e-4, rtol=1e-4)
def test_gradcheck(self, device, dtype):
quaternion = torch.tensor([1., 0., 0., 0.], device=device, dtype=dtype)
quaternion = tensor_to_gradcheck_var(quaternion)
# evaluate function gradient
assert gradcheck(kornia.quaternion_exp_to_log, (quaternion,),
raise_exception=True)
@pytest.mark.skip(reason="turn off all jit for a while")
def test_jit(self, device, dtype):
op = kornia.quaternion_exp_to_log
op_script = torch.jit.script(op)
quaternion = torch.tensor([0., 0., 1., 0.], device=device, dtype=dtype)
actual = op_script(quaternion)
expected = op(quaternion)
assert_allclose(actual, expected, atol=1e-4, rtol=1e-4)
class TestQuaternionToAngleAxis:
def test_smoke(self, device, dtype):
quaternion = torch.zeros(4, device=device, dtype=dtype)
angle_axis = kornia.quaternion_to_angle_axis(quaternion)
assert angle_axis.shape == (3,)
@pytest.mark.parametrize("batch_size", (1, 3, 8))
def test_smoke_batch(self, batch_size, device, dtype):
quaternion = torch.zeros(batch_size, 4, device=device, dtype=dtype)
angle_axis = kornia.quaternion_to_angle_axis(quaternion)
assert angle_axis.shape == (batch_size, 3)
def test_unit_quaternion(self, device, dtype):
quaternion = torch.tensor([1., 0., 0., 0.], device=device, dtype=dtype)
expected = torch.tensor([0., 0., 0.], device=device, dtype=dtype)
angle_axis = kornia.quaternion_to_angle_axis(quaternion)
assert_allclose(angle_axis, expected, atol=1e-4, rtol=1e-4)
def test_y_rotation(self, device, dtype):
quaternion = torch.tensor([0., 0., 1., 0.], device=device, dtype=dtype)
expected = torch.tensor([0., kornia.pi, 0.], device=device, dtype=dtype)
angle_axis = kornia.quaternion_to_angle_axis(quaternion)
assert_allclose(angle_axis, expected, atol=1e-4, rtol=1e-4)
def test_z_rotation(self, device, dtype):
quaternion = torch.tensor([np.sqrt(3) / 2, 0., 0., 0.5], device=device, dtype=dtype)
expected = torch.tensor([0., 0., kornia.pi / 3], device=device, dtype=dtype)
angle_axis = kornia.quaternion_to_angle_axis(quaternion)
assert_allclose(angle_axis, expected, atol=1e-4, rtol=1e-4)
def test_small_angle(self, device, dtype):
theta = 1e-2
quaternion = torch.tensor([np.cos(theta / 2), np.sin(theta / 2), 0., 0.], device=device, dtype=dtype)
expected = torch.tensor([theta, 0., 0.], device=device, dtype=dtype)
angle_axis = kornia.quaternion_to_angle_axis(quaternion)
assert_allclose(angle_axis, expected, atol=1e-4, rtol=1e-4)
def test_gradcheck(self, device, dtype):
eps = 1e-12
quaternion = torch.tensor([1., 0., 0., 0.], device=device, dtype=dtype) + eps
quaternion = tensor_to_gradcheck_var(quaternion)
# evaluate function gradient
assert gradcheck(kornia.quaternion_to_angle_axis, (quaternion,),
raise_exception=True)
def test_pi():
assert_allclose(kornia.pi, 3.141592)
@pytest.mark.parametrize("batch_shape", [
(2, 3), (1, 2, 3), (2, 3, 3), (5, 5, 3), ])
def test_rad2deg(batch_shape, device, dtype):
# generate input data
x_rad = kornia.pi * torch.rand(batch_shape, device=device, dtype=dtype)
# convert radians/degrees
x_deg = kornia.rad2deg(x_rad)
x_deg_to_rad = kornia.deg2rad(x_deg)
# compute error
assert_allclose(x_rad, x_deg_to_rad)
# evaluate function gradient
assert gradcheck(kornia.rad2deg, (tensor_to_gradcheck_var(x_rad),),
raise_exception=True)
@pytest.mark.parametrize("batch_shape", [
(2, 3), (1, 2, 3), (2, 3, 3), (5, 5, 3), ])
def test_deg2rad(batch_shape, device, dtype):
# generate input data
x_deg = 180. * torch.rand(batch_shape, device=device, dtype=dtype)
# convert radians/degrees
x_rad = kornia.deg2rad(x_deg)
x_rad_to_deg = kornia.rad2deg(x_rad)
assert_allclose(x_deg, x_rad_to_deg, atol=1e-4, rtol=1e-4)
assert gradcheck(kornia.deg2rad, (tensor_to_gradcheck_var(x_deg),),
raise_exception=True)
class TestPolCartConversions:
def test_smoke(self, device, dtype):
x = torch.ones(1, 1, 1, 1, device=device, dtype=dtype)
assert kornia.pol2cart(x, x) is not None
assert kornia.cart2pol(x, x) is not None
@pytest.mark.parametrize("batch_shape", [
(2, 3), (1, 2, 3), (2, 3, 3), (5, 5, 3), ])
def test_pol2cart(self, batch_shape, device, dtype):
# generate input data
rho = torch.rand(batch_shape, dtype=dtype)
phi = kornia.pi * torch.rand(batch_shape, dtype=dtype)
rho = rho.to(device)
phi = phi.to(device)
# convert pol/cart
x_pol2cart, y_pol2cart = kornia.pol2cart(rho, phi)
rho_pol2cart, phi_pol2cart = kornia.cart2pol(x_pol2cart, y_pol2cart, 0)
assert_allclose(rho, rho_pol2cart)
assert_allclose(phi, phi_pol2cart)
assert gradcheck(kornia.pol2cart, (tensor_to_gradcheck_var(rho),
tensor_to_gradcheck_var(phi), ), raise_exception=True)
@pytest.mark.parametrize("batch_shape", [
(2, 3), (1, 2, 3), (2, 3, 3), (5, 5, 3), ])
def test_cart2pol(self, batch_shape, device, dtype):
# generate input data
x = torch.rand(batch_shape, dtype=dtype)
y = torch.rand(batch_shape, dtype=dtype)
x = x.to(device)
y = y.to(device)
# convert cart/pol
rho_cart2pol, phi_cart2pol = kornia.cart2pol(x, y, 0)
x_cart2pol, y_cart2pol = kornia.pol2cart(rho_cart2pol, phi_cart2pol)
assert_allclose(x, x_cart2pol)
assert_allclose(y, y_cart2pol)
assert gradcheck(kornia.cart2pol, (tensor_to_gradcheck_var(x),
tensor_to_gradcheck_var(y), ), raise_exception=True)
class TestConvertPointsToHomogeneous:
def test_convert_points(self, device, dtype):
# generate input data
points_h = torch.tensor([
[1., 2., 1.],
[0., 1., 2.],
[2., 1., 0.],
[-1., -2., -1.],
[0., 1., -2.],
], device=device, dtype=dtype)
expected = torch.tensor([
[1., 2., 1., 1.],
[0., 1., 2., 1.],
[2., 1., 0., 1.],
[-1., -2., -1., 1.],
[0., 1., -2., 1.],
], device=device, dtype=dtype)
# to euclidean
points = kornia.convert_points_to_homogeneous(points_h)
assert_allclose(points, expected, atol=1e-4, rtol=1e-4)
def test_convert_points_batch(self, device, dtype):
# generate input data
points_h = torch.tensor([[
[2., 1., 0.],
], [
[0., 1., 2.],
], [
[0., 1., -2.],
]], device=device, dtype=dtype)
expected = torch.tensor([[
[2., 1., 0., 1.],
], [
[0., 1., 2., 1.],
], [
[0., 1., -2., 1.],
]], device=device, dtype=dtype)
# to euclidean
points = kornia.convert_points_to_homogeneous(points_h)
assert_allclose(points, expected, atol=1e-4, rtol=1e-4)
@pytest.mark.parametrize("batch_shape", [
(2, 3), (1, 2, 3), (2, 3, 3), (5, 5, 3), ])
def test_gradcheck(self, batch_shape, device, dtype):
points_h = torch.rand(batch_shape, device=device, dtype=dtype)
# evaluate function gradient
points_h = tensor_to_gradcheck_var(points_h) # to var
assert gradcheck(kornia.convert_points_to_homogeneous, (points_h,),
raise_exception=True)
@pytest.mark.skip(reason="turn off all jit for a while")
def test_jit(self, device, dtype):
op = kornia.convert_points_to_homogeneous
op_script = torch.jit.script(op)
points_h = torch.zeros(1, 2, 3, device=device, dtype=dtype)
actual = op_script(points_h)
expected = op(points_h)
assert_allclose(actual, expected, atol=1e-4, rtol=1e-4)
class TestConvertAtoH:
def test_convert_points(self, device, dtype):
# generate input data
A = torch.tensor([
[1., 0., 0.],
[0., 1., 0.],
], device=device, dtype=dtype).view(1, 2, 3)
expected = torch.tensor([
[1., 0., 0.],
[0., 1., 0.],
[0., 0., 1.],
], device=device, dtype=dtype).view(1, 3, 3)
# to euclidean
H = kornia.geometry.conversions.convert_affinematrix_to_homography(A)
assert_allclose(H, expected)
@pytest.mark.parametrize("batch_shape", [
(10, 2, 3), (16, 2, 3)])
def test_gradcheck(self, batch_shape, device, dtype):
points_h = torch.rand(batch_shape, device=device, dtype=dtype)
# evaluate function gradient
points_h = tensor_to_gradcheck_var(points_h) # to var
assert gradcheck(kornia.convert_affinematrix_to_homography, (points_h,),
raise_exception=True)
@pytest.mark.skip(reason="turn off all jit for a while")
def test_jit(self, device, dtype):
op = kornia.convert_affinematrix_to_homography
op_script = torch.jit.script(op)
points_h = torch.zeros(1, 2, 3, device=device, dtype=dtype)
actual = op_script(points_h)
expected = op(points_h)
assert_allclose(actual, expected, atol=1e-4, rtol=1e-4)
class TestConvertPointsFromHomogeneous:
@pytest.mark.parametrize("batch_shape", [
(2, 3), (1, 2, 3), (2, 3, 3), (5, 5, 3), ])
def test_cardinality(self, device, dtype, batch_shape):
points_h = torch.rand(batch_shape, device=device, dtype=dtype)
points = kornia.convert_points_from_homogeneous(points_h)
assert points.shape == points.shape[:-1] + (2,)
def test_points(self, device, dtype):
# generate input data
points_h = torch.tensor([
[1., 2., 1.],
[0., 1., 2.],
[2., 1., 0.],
[-1., -2., -1.],
[0., 1., -2.],
], device=device, dtype=dtype)
expected = torch.tensor([
[1., 2.],
[0., 0.5],
[2., 1.],
[1., 2.],
[0., -0.5],
], device=device, dtype=dtype)
# to euclidean
points = kornia.convert_points_from_homogeneous(points_h)
assert_allclose(points, expected, atol=1e-4, rtol=1e-4)
def test_points_batch(self, device, dtype):
# generate input data
points_h = torch.tensor([[
[2., 1., 0.],
], [
[0., 1., 2.],
], [
[0., 1., -2.],
]], device=device, dtype=dtype)
expected = torch.tensor([[
[2., 1.],
], [
[0., 0.5],
], [
[0., -0.5],
]], device=device, dtype=dtype)
# to euclidean
points = kornia.convert_points_from_homogeneous(points_h)
assert_allclose(points, expected, atol=1e-4, rtol=1e-4)
def test_gradcheck(self, device, dtype):
points_h = torch.ones(1, 10, 3, device=device, dtype=dtype)
# evaluate function gradient
points_h = tensor_to_gradcheck_var(points_h) # to var
assert gradcheck(kornia.convert_points_from_homogeneous, (points_h,),
raise_exception=True)
@pytest.mark.skip("RuntimeError: Jacobian mismatch for output 0 with respect to input 0,")
def test_gradcheck_zvec_zeros(self, device, dtype):
# generate input data
points_h = torch.tensor([
[1., 2., 0.],
[0., 1., 0.1],
[2., 1., 0.1],
], device=device, dtype=dtype)
# evaluate function gradient
points_h = tensor_to_gradcheck_var(points_h) # to var
assert gradcheck(kornia.convert_points_from_homogeneous, (points_h,),
raise_exception=True)
def test_jit(self, device, dtype):
op = kornia.convert_points_from_homogeneous
op_script = torch.jit.script(op)
points_h = torch.zeros(1, 2, 3, device=device, dtype=dtype)
actual = op_script(points_h)
expected = op(points_h)
assert_allclose(actual, expected, atol=1e-4, rtol=1e-4)
@pytest.mark.parametrize("batch_size", [1, 2, 5])
def test_angle_axis_to_rotation_matrix(batch_size, device, dtype):
# generate input data
angle_axis = torch.rand(batch_size, 3, device=device, dtype=dtype)
eye_batch = create_eye_batch(batch_size, 3, device=device, dtype=dtype)
# apply transform
rotation_matrix = kornia.angle_axis_to_rotation_matrix(angle_axis)
rotation_matrix_eye = torch.matmul(
rotation_matrix, rotation_matrix.transpose(1, 2))
assert_allclose(rotation_matrix_eye, eye_batch, atol=1e-4, rtol=1e-4)
# evaluate function gradient
angle_axis = tensor_to_gradcheck_var(angle_axis) # to var
assert gradcheck(kornia.angle_axis_to_rotation_matrix, (angle_axis,),
raise_exception=True)
'''@pytest.mark.parametrize("batch_size", [1, 2, 5])
def test_rotation_matrix_to_angle_axis_gradcheck(batch_size, device_type):
# generate input data
rmat = torch.rand(batch_size, 3, 3).to(torch.device(device_type))
# evaluate function gradient
rmat = tensor_to_gradcheck_var(rmat) # to var
assert gradcheck(kornia.rotation_matrix_to_angle_axis,
(rmat,), raise_exception=True)'''
'''def test_rotation_matrix_to_angle_axis(device_type):
device = torch.device(device_type)
rmat_1 = torch.tensor([[-0.30382753, -0.95095137, -0.05814062],
[-0.71581715, 0.26812278, -0.64476041],
[0.62872461, -0.15427791, -0.76217038]])
rvec_1 = torch.tensor([1.50485376, -2.10737739, 0.7214174])
rmat_2 = torch.tensor([[0.6027768, -0.79275544, -0.09054801],
[-0.67915707, -0.56931658, 0.46327563],
[-0.41881476, -0.21775548, -0.88157628]])
rvec_2 = torch.tensor([-2.44916812, 1.18053411, 0.4085298])
rmat = torch.stack([rmat_2, rmat_1], dim=0, device=device, dtype=dtype)
rvec = torch.stack([rvec_2, rvec_1], dim=0, device=device, dtype=dtype)
assert_allclose(kornia.rotation_matrix_to_angle_axis(rmat), rvec)'''
class TestNormalizePixelCoordinates:
def test_tensor_bhw2(self, device, dtype):
height, width = 3, 4
grid = kornia.utils.create_meshgrid(
height, width, normalized_coordinates=False, device=device).to(dtype=dtype)
expected = kornia.utils.create_meshgrid(
height, width, normalized_coordinates=True, device=device).to(dtype=dtype)
grid_norm = kornia.normalize_pixel_coordinates(
grid, height, width)
assert_allclose(grid_norm, expected, atol=1e-4, rtol=1e-4)
def test_list(self, device, dtype):
height, width = 3, 4
grid = kornia.utils.create_meshgrid(
height, width, normalized_coordinates=False, device=device).to(dtype=dtype)
grid = grid.contiguous().view(-1, 2)
expected = kornia.utils.create_meshgrid(
height, width, normalized_coordinates=True, device=device).to(dtype=dtype)
expected = expected.contiguous().view(-1, 2)
grid_norm = kornia.normalize_pixel_coordinates(
grid, height, width)
assert_allclose(grid_norm, expected, atol=1e-4, rtol=1e-4)
@pytest.mark.skip(reason="turn off all jit for a while")
def test_jit(self, device, dtype):
op = kornia.normalize_pixel_coordinates
op_script = torch.jit.script(op)
height, width = 3, 4
grid = kornia.utils.create_meshgrid(
height, width, normalized_coordinates=True, device=device).to(dtype=dtype)
actual = op_script(grid, height, width)
expected = op(grid, height, width)
assert_allclose(actual, expected, atol=1e-4, rtol=1e-4)
class TestDenormalizePixelCoordinates:
def test_tensor_bhw2(self, device, dtype):
height, width = 3, 4
grid = kornia.utils.create_meshgrid(
height, width, normalized_coordinates=True, device=device).to(dtype=dtype)
expected = kornia.utils.create_meshgrid(
height, width, normalized_coordinates=False, device=device).to(dtype=dtype)
grid_norm = kornia.denormalize_pixel_coordinates(
grid, height, width)
assert_allclose(grid_norm, expected, atol=1e-4, rtol=1e-4)
def test_list(self, device, dtype):
height, width = 3, 4
grid = kornia.utils.create_meshgrid(
height, width, normalized_coordinates=True, device=device).to(dtype=dtype)
grid = grid.contiguous().view(-1, 2)
expected = kornia.utils.create_meshgrid(
height, width, normalized_coordinates=False, device=device).to(dtype=dtype)
expected = expected.contiguous().view(-1, 2)
grid_norm = kornia.denormalize_pixel_coordinates(
grid, height, width)
assert_allclose(grid_norm, expected, atol=1e-4, rtol=1e-4)
@pytest.mark.skip(reason="turn off all jit for a while")
def test_jit(self, device, dtype):
op = kornia.denormalize_pixel_coordinates
op_script = torch.jit.script(op)
height, width = 3, 4
grid = kornia.utils.create_meshgrid(
height, width, normalized_coordinates=True, device=device).to(dtype=dtype)
actual = op_script(grid, height, width)
expected = op(grid, height, width)
assert_allclose(actual, expected, atol=1e-4, rtol=1e-4)
| 39.774763
| 109
| 0.628505
| 3,780
| 29,314
| 4.685185
| 0.060317
| 0.091304
| 0.079673
| 0.103106
| 0.834783
| 0.795991
| 0.754602
| 0.735008
| 0.698814
| 0.683625
| 0
| 0.049195
| 0.241386
| 29,314
| 736
| 110
| 39.828804
| 0.74719
| 0.029269
| 0
| 0.694656
| 0
| 0
| 0.016214
| 0
| 0
| 0
| 0
| 0
| 0.141221
| 1
| 0.125954
| false
| 0
| 0.015267
| 0.001908
| 0.166031
| 0.001908
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1be073c3ebc3f4f79057a2c497d86d3890899968
| 66
|
py
|
Python
|
Chapter 02/ch2_40.py
|
bpbpublications/TEST-YOUR-SKILLS-IN-PYTHON-LANGUAGE
|
f6a4194684515495d00aa38347a725dd08f39a0c
|
[
"MIT"
] | null | null | null |
Chapter 02/ch2_40.py
|
bpbpublications/TEST-YOUR-SKILLS-IN-PYTHON-LANGUAGE
|
f6a4194684515495d00aa38347a725dd08f39a0c
|
[
"MIT"
] | null | null | null |
Chapter 02/ch2_40.py
|
bpbpublications/TEST-YOUR-SKILLS-IN-PYTHON-LANGUAGE
|
f6a4194684515495d00aa38347a725dd08f39a0c
|
[
"MIT"
] | null | null | null |
import math
print(math.e)
# using print() to print the result
| 16.5
| 36
| 0.69697
| 11
| 66
| 4.181818
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.212121
| 66
| 4
| 36
| 16.5
| 0.884615
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
1be22ac75a6adaf06ea62fc8b265976233de9447
| 17,216
|
py
|
Python
|
test/test_chapter3.py
|
laikuaut/lang_100_knock
|
359d68cd28cd453f1fe484c56b6381927f513c21
|
[
"MIT"
] | null | null | null |
test/test_chapter3.py
|
laikuaut/lang_100_knock
|
359d68cd28cd453f1fe484c56b6381927f513c21
|
[
"MIT"
] | null | null | null |
test/test_chapter3.py
|
laikuaut/lang_100_knock
|
359d68cd28cd453f1fe484c56b6381927f513c21
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
import unittest
import glob
from util import util
from nlp100.chapter3.Q020 import *
from nlp100.chapter3.Q021 import *
from nlp100.chapter3.Q022 import *
from nlp100.chapter3.Q023 import *
from nlp100.chapter3.Q024 import *
from nlp100.chapter3.Q025 import *
from nlp100.chapter3.Q026 import *
from nlp100.chapter3.Q027 import *
from nlp100.chapter3.Q028 import *
from nlp100.chapter3.Q029 import *
class Test_Chapter3(unittest.TestCase):
def test_Q_020(self):
util.exe_cmd('rm data/Britain.txt')
Q_020()
with open('data/Britain.txt', 'r') as result_f, \
open('test/data/Britain.json', 'r') as current_f:
current_data = json.loads(current_f.readline())
result = result_f.read().rstrip()
self.assertEqual(result, current_data['text'])
def test_Q_021(self):
current = ['[[Category:イギリス|*]]',
'[[Category:英連邦王国|*]]',
'[[Category:G8加盟国]]',
'[[Category:欧州連合加盟国]]',
'[[Category:海洋国家]]',
'[[Category:君主国]]',
'[[Category:島国|くれいとふりてん]]',
'[[Category:1801年に設立された州・地域]]']
result = Q_021()
self.assertEqual(result, current)
def test_Q_022(self):
current = [
'イギリス|*',
'英連邦王国|*',
'G8加盟国',
'欧州連合加盟国',
'海洋国家',
'君主国',
'島国|くれいとふりてん',
'1801年に設立された州・地域']
self.assertEqual(Q_022(), current)
def test_Q_023(self):
current = [('国名',1),
('歴史',1),
('地理',1),
('気候',2),
('政治',1),
('外交と軍事',1),
('地方行政区分',1),
('主要都市',2),
('科学技術',1),
('経済',1),
('鉱業',2),
('農業',2),
('貿易',2),
('通貨',2),
('企業',2),
('交通',1),
('道路',2),
('鉄道',2),
('海運',2),
('航空',2),
('通信',1),
('国民',1),
('言語',2),
('宗教',2),
(' 婚姻 ',2),
('教育',2),
('文化',1),
('食文化',2),
('文学',2),
(' 哲学 ',2),
('音楽',2),
('イギリスのポピュラー音楽',3),
('映画',2),
('コメディ',2),
('国花',2),
('世界遺産',2),
('祝祭日',2),
('スポーツ',1),
('サッカー',2),
('競馬',2),
('モータースポーツ',2),
('脚注',1),
('関連項目',1),
('外部リンク',1)]
self.assertEqual(Q_023(), current)
def test_Q_024(self):
current = [
"Royal Coat of Arms of the United Kingdom.svg",
"Battle of Waterloo 1815.PNG",
"The British Empire.png",
"Uk topo en.jpg",
"BenNevis2005.jpg",
"Elizabeth II greets NASA GSFC employees, May 8, 2007 edit.jpg",
"Palace of Westminster, London - Feb 2007.jpg",
"David Cameron and Barack Obama at the G20 Summit in Toronto.jpg",
"Soldiers Trooping the Colour, 16th June 2007.jpg",
"Scotland Parliament Holyrood.jpg",
"London.bankofengland.arp.jpg",
"City of London skyline from London City Hall - Oct 2008.jpg",
"Oil platform in the North SeaPros.jpg",
"Eurostar at St Pancras Jan 2008.jpg",
"Heathrow T5.jpg",
"Anglospeak.svg",
"CHANDOS3.jpg",
"The Fabs.JPG",
"Wembley Stadium, illuminated.jpg"
]
self.assertEqual(Q_024(), current)
def test_Q_025(self):
current = {
'略名' : 'イギリス\n',
'日本語国名' : 'グレートブリテン及び北アイルランド連合王国\n',
'公式国名' : '{{lang|en|United Kingdom of Great Britain and Northern Ireland}}<ref>英語以外での正式国名:<br/>\n' \
'*{{lang|gd|An Rìoghachd Aonaichte na Breatainn Mhòr agus Eirinn mu Thuath}}([[スコットランド・ゲール語]])<br/>\n' \
'*{{lang|cy|Teyrnas Gyfunol Prydain Fawr a Gogledd Iwerddon}}([[ウェールズ語]])<br/>\n' \
'*{{lang|ga|Ríocht Aontaithe na Breataine Móire agus Tuaisceart na hÉireann}}([[アイルランド語]])<br/>\n' \
'*{{lang|kw|An Rywvaneth Unys a Vreten Veur hag Iwerdhon Glédh}}([[コーンウォール語]])<br/>\n' \
'*{{lang|sco|Unitit Kinrick o Great Breetain an Northren Ireland}}([[スコットランド語]])<br/>\n' \
'**{{lang|sco|Claught Kängrick o Docht Brätain an Norlin Airlann}}、{{lang|sco|Unitet Kängdom o Great Brittain an Norlin Airlann}}(アルスター・スコットランド語)</ref>\n',
'国旗画像' : 'Flag of the United Kingdom.svg\n',
'国章画像' : '[[ファイル:Royal Coat of Arms of the United Kingdom.svg|85px|イギリスの国章]]\n',
'国章リンク' : '([[イギリスの国章|国章]])\n',
'標語' : '{{lang|fr|Dieu et mon droit}}<br/>([[フランス語]]:神と私の権利)\n',
'国歌' : '[[女王陛下万歳|神よ女王陛下を守り給え]]\n',
'位置画像' : 'Location_UK_EU_Europe_001.svg\n',
'公用語' : '[[英語]](事実上)\n',
'首都' : '[[ロンドン]]\n',
'最大都市' : 'ロンドン\n',
'元首等肩書' : '[[イギリスの君主|女王]]\n',
'元首等氏名' : '[[エリザベス2世]]\n',
'首相等肩書' : '[[イギリスの首相|首相]]\n',
'首相等氏名' : '[[デーヴィッド・キャメロン]]\n',
'面積順位' : '76\n',
'面積大きさ' : '1 E11\n',
'面積値' : '244,820\n',
'水面積率' : '1.3%\n',
'人口統計年' : '2011\n',
'人口順位' : '22\n',
'人口大きさ' : '1 E7\n',
'人口値' : '63,181,775<ref>[http://esa.un.org/unpd/wpp/Excel-Data/population.htm United Nations Department of Economic and Social Affairs>Population Division>Data>Population>Total Population]</ref>\n',
'人口密度値' : '246\n',
'GDP統計年元' : '2012\n',
'GDP値元' : '1兆5478億<ref name="imf-statistics-gdp">[http://www.imf.org/external/pubs/ft/weo/2012/02/weodata/weorept.aspx?pr.x=70&pr.y=13&sy=2010&ey=2012&scsm=1&ssd=1&sort=country&ds=.&br=1&c=112&s=NGDP%2CNGDPD%2CPPPGDP%2CPPPPC&grp=0&a= IMF>Data and Statistics>World Economic Outlook Databases>By Countrise>United Kingdom]</ref>\n',
'GDP統計年MER' : '2012\n',
'GDP順位MER' : '5\n',
'GDP値MER' : '2兆4337億<ref name="imf-statistics-gdp" />\n',
'GDP統計年' : '2012\n',
'GDP順位' : '6\n',
'GDP値' : '2兆3162億<ref name="imf-statistics-gdp" />\n',
'GDP/人' : '36,727<ref name="imf-statistics-gdp" />\n',
'建国形態' : '建国\n',
'確立形態1' : '[[イングランド王国]]/[[スコットランド王国]]<br />(両国とも[[連合法 (1707年)|1707年連合法]]まで)\n',
'確立年月日1' : '[[927年]]/[[843年]]\n',
'確立形態2' : '[[グレートブリテン王国]]建国<br />([[連合法 (1707年)|1707年連合法]])\n',
'確立年月日2' : '[[1707年]]\n',
'確立形態3' : '[[グレートブリテン及びアイルランド連合王国]]建国<br />([[連合法 (1800年)|1800年連合法]])\n',
'確立年月日3' : '[[1801年]]\n',
'確立形態4' : "現在の国号「'''グレートブリテン及び北アイルランド連合王国'''」に変更\n",
'確立年月日4' : '[[1927年]]\n',
'通貨' : '[[スターリング・ポンド|UKポンド]] (£)\n',
'通貨コード' : 'GBP\n',
'時間帯' : '±0\n',
'夏時間' : '+1\n',
'ISO 3166-1' : 'GB / GBR\n',
'ccTLD' : '[[.uk]] / [[.gb]]<ref>使用は.ukに比べ圧倒的少数。</ref>\n',
'国際電話番号' : '44\n',
'注記' : '<references />\n'
}
result = Q_025()
for key in result.keys():
self.assertEqual(result[key], current[key])
def test_Q_026(self):
current = {
'略名' : 'イギリス\n',
'日本語国名' : 'グレートブリテン及び北アイルランド連合王国\n',
'公式国名' : '{{lang|en|United Kingdom of Great Britain and Northern Ireland}}<ref>英語以外での正式国名:<br/>\n' \
'*{{lang|gd|An Rìoghachd Aonaichte na Breatainn Mhòr agus Eirinn mu Thuath}}([[スコットランド・ゲール語]])<br/>\n' \
'*{{lang|cy|Teyrnas Gyfunol Prydain Fawr a Gogledd Iwerddon}}([[ウェールズ語]])<br/>\n' \
'*{{lang|ga|Ríocht Aontaithe na Breataine Móire agus Tuaisceart na hÉireann}}([[アイルランド語]])<br/>\n' \
'*{{lang|kw|An Rywvaneth Unys a Vreten Veur hag Iwerdhon Glédh}}([[コーンウォール語]])<br/>\n' \
'*{{lang|sco|Unitit Kinrick o Great Breetain an Northren Ireland}}([[スコットランド語]])<br/>\n' \
'**{{lang|sco|Claught Kängrick o Docht Brätain an Norlin Airlann}}、{{lang|sco|Unitet Kängdom o Great Brittain an Norlin Airlann}}(アルスター・スコットランド語)</ref>\n',
'国旗画像' : 'Flag of the United Kingdom.svg\n',
'国章画像' : '[[ファイル:Royal Coat of Arms of the United Kingdom.svg|85px|イギリスの国章]]\n',
'国章リンク' : '([[イギリスの国章|国章]])\n',
'標語' : '{{lang|fr|Dieu et mon droit}}<br/>([[フランス語]]:神と私の権利)\n',
'国歌' : '[[女王陛下万歳|神よ女王陛下を守り給え]]\n',
'位置画像' : 'Location_UK_EU_Europe_001.svg\n',
'公用語' : '[[英語]](事実上)\n',
'首都' : '[[ロンドン]]\n',
'最大都市' : 'ロンドン\n',
'元首等肩書' : '[[イギリスの君主|女王]]\n',
'元首等氏名' : '[[エリザベス2世]]\n',
'首相等肩書' : '[[イギリスの首相|首相]]\n',
'首相等氏名' : '[[デーヴィッド・キャメロン]]\n',
'面積順位' : '76\n',
'面積大きさ' : '1 E11\n',
'面積値' : '244,820\n',
'水面積率' : '1.3%\n',
'人口統計年' : '2011\n',
'人口順位' : '22\n',
'人口大きさ' : '1 E7\n',
'人口値' : '63,181,775<ref>[http://esa.un.org/unpd/wpp/Excel-Data/population.htm United Nations Department of Economic and Social Affairs>Population Division>Data>Population>Total Population]</ref>\n',
'人口密度値' : '246\n',
'GDP統計年元' : '2012\n',
'GDP値元' : '1兆5478億<ref name="imf-statistics-gdp">[http://www.imf.org/external/pubs/ft/weo/2012/02/weodata/weorept.aspx?pr.x=70&pr.y=13&sy=2010&ey=2012&scsm=1&ssd=1&sort=country&ds=.&br=1&c=112&s=NGDP%2CNGDPD%2CPPPGDP%2CPPPPC&grp=0&a= IMF>Data and Statistics>World Economic Outlook Databases>By Countrise>United Kingdom]</ref>\n',
'GDP統計年MER' : '2012\n',
'GDP順位MER' : '5\n',
'GDP値MER' : '2兆4337億<ref name="imf-statistics-gdp" />\n',
'GDP統計年' : '2012\n',
'GDP順位' : '6\n',
'GDP値' : '2兆3162億<ref name="imf-statistics-gdp" />\n',
'GDP/人' : '36,727<ref name="imf-statistics-gdp" />\n',
'建国形態' : '建国\n',
'確立形態1' : '[[イングランド王国]]/[[スコットランド王国]]<br />(両国とも[[連合法 (1707年)|1707年連合法]]まで)\n',
'確立年月日1' : '[[927年]]/[[843年]]\n',
'確立形態2' : '[[グレートブリテン王国]]建国<br />([[連合法 (1707年)|1707年連合法]])\n',
'確立年月日2' : '[[1707年]]\n',
'確立形態3' : '[[グレートブリテン及びアイルランド連合王国]]建国<br />([[連合法 (1800年)|1800年連合法]])\n',
'確立年月日3' : '[[1801年]]\n',
'確立形態4' : '現在の国号「グレートブリテン及び北アイルランド連合王国」に変更\n',
'確立年月日4' : '[[1927年]]\n',
'通貨' : '[[スターリング・ポンド|UKポンド]] (£)\n',
'通貨コード' : 'GBP\n',
'時間帯' : '±0\n',
'夏時間' : '+1\n',
'ISO 3166-1' : 'GB / GBR\n',
'ccTLD' : '[[.uk]] / [[.gb]]<ref>使用は.ukに比べ圧倒的少数。</ref>\n',
'国際電話番号' : '44\n',
'注記' : '<references />\n'
}
result = Q_026()
for key in result.keys():
self.assertEqual(result[key], current[key])
def test_Q_027(self):
current = {
'略名' : 'イギリス\n',
'日本語国名' : 'グレートブリテン及び北アイルランド連合王国\n',
'公式国名' : '{{lang|en|United Kingdom of Great Britain and Northern Ireland}}<ref>英語以外での正式国名:<br/>\n' \
'*{{lang|gd|An Rìoghachd Aonaichte na Breatainn Mhòr agus Eirinn mu Thuath}}(スコットランド・ゲール語)<br/>\n' \
'*{{lang|cy|Teyrnas Gyfunol Prydain Fawr a Gogledd Iwerddon}}(ウェールズ語)<br/>\n' \
'*{{lang|ga|Ríocht Aontaithe na Breataine Móire agus Tuaisceart na hÉireann}}(アイルランド語)<br/>\n' \
'*{{lang|kw|An Rywvaneth Unys a Vreten Veur hag Iwerdhon Glédh}}(コーンウォール語)<br/>\n' \
'*{{lang|sco|Unitit Kinrick o Great Breetain an Northren Ireland}}(スコットランド語)<br/>\n' \
'**{{lang|sco|Claught Kängrick o Docht Brätain an Norlin Airlann}}、{{lang|sco|Unitet Kängdom o Great Brittain an Norlin Airlann}}(アルスター・スコットランド語)</ref>\n',
'国旗画像' : 'Flag of the United Kingdom.svg\n',
'国章画像' : '[[ファイル:Royal Coat of Arms of the United Kingdom.svg|85px|イギリスの国章]]\n',
'国章リンク' : '(国章)\n',
'標語' : '{{lang|fr|Dieu et mon droit}}<br/>(フランス語:神と私の権利)\n',
'国歌' : '神よ女王陛下を守り給え\n',
'位置画像' : 'Location_UK_EU_Europe_001.svg\n',
'公用語' : '英語(事実上)\n',
'首都' : 'ロンドン\n',
'最大都市' : 'ロンドン\n',
'元首等肩書' : '女王\n',
'元首等氏名' : 'エリザベス2世\n',
'首相等肩書' : '首相\n',
'首相等氏名' : 'デーヴィッド・キャメロン\n',
'面積順位' : '76\n',
'面積大きさ' : '1 E11\n',
'面積値' : '244,820\n',
'水面積率' : '1.3%\n',
'人口統計年' : '2011\n',
'人口順位' : '22\n',
'人口大きさ' : '1 E7\n',
'人口値' : '63,181,775<ref>[http://esa.un.org/unpd/wpp/Excel-Data/population.htm United Nations Department of Economic and Social Affairs>Population Division>Data>Population>Total Population]</ref>\n',
'人口密度値' : '246\n',
'GDP統計年元' : '2012\n',
'GDP値元' : '1兆5478億<ref name="imf-statistics-gdp">[http://www.imf.org/external/pubs/ft/weo/2012/02/weodata/weorept.aspx?pr.x=70&pr.y=13&sy=2010&ey=2012&scsm=1&ssd=1&sort=country&ds=.&br=1&c=112&s=NGDP%2CNGDPD%2CPPPGDP%2CPPPPC&grp=0&a= IMF>Data and Statistics>World Economic Outlook Databases>By Countrise>United Kingdom]</ref>\n',
'GDP統計年MER' : '2012\n',
'GDP順位MER' : '5\n',
'GDP値MER' : '2兆4337億<ref name="imf-statistics-gdp" />\n',
'GDP統計年' : '2012\n',
'GDP順位' : '6\n',
'GDP値' : '2兆3162億<ref name="imf-statistics-gdp" />\n',
'GDP/人' : '36,727<ref name="imf-statistics-gdp" />\n',
'建国形態' : '建国\n',
'確立形態1' : 'イングランド王国/スコットランド王国<br />(両国とも1707年連合法まで)\n',
'確立年月日1' : '927年/843年\n',
'確立形態2' : 'グレートブリテン王国建国<br />(1707年連合法)\n',
'確立年月日2' : '1707年\n',
'確立形態3' : 'グレートブリテン及びアイルランド連合王国建国<br />(1800年連合法)\n',
'確立年月日3' : '1801年\n',
'確立形態4' : '現在の国号「グレートブリテン及び北アイルランド連合王国」に変更\n',
'確立年月日4' : '1927年\n',
'通貨' : 'UKポンド (£)\n',
'通貨コード' : 'GBP\n',
'時間帯' : '±0\n',
'夏時間' : '+1\n',
'ISO 3166-1' : 'GB / GBR\n',
'ccTLD' : '.uk / .gb<ref>使用は.ukに比べ圧倒的少数。</ref>\n',
'国際電話番号' : '44\n',
'注記' : '<references />\n'
}
result = Q_027()
for key in result.keys():
self.assertEqual(result[key], current[key])
def test_Q_028(self):
current = {
'略名' : 'イギリス\n',
'日本語国名' : 'グレートブリテン及び北アイルランド連合王国\n',
'公式国名' : 'United Kingdom of Great Britain and Northern Ireland\n',
'国旗画像' : 'Flag of the United Kingdom.svg\n',
'国章画像' : 'Royal Coat of Arms of the United Kingdom.svg\n',
'国章リンク' : '(国章)\n',
'標語' : 'Dieu et mon droit(フランス語:神と私の権利)\n',
'国歌' : '神よ女王陛下を守り給え\n',
'位置画像' : 'Location_UK_EU_Europe_001.svg\n',
'公用語' : '英語(事実上)\n',
'首都' : 'ロンドン\n',
'最大都市' : 'ロンドン\n',
'元首等肩書' : '女王\n',
'元首等氏名' : 'エリザベス2世\n',
'首相等肩書' : '首相\n',
'首相等氏名' : 'デーヴィッド・キャメロン\n',
'面積順位' : '76\n',
'面積大きさ' : '1 E11\n',
'面積値' : '244,820\n',
'水面積率' : '1.3%\n',
'人口統計年' : '2011\n',
'人口順位' : '22\n',
'人口大きさ' : '1 E7\n',
'人口値' : '63,181,775\n',
'人口密度値' : '246\n',
'GDP統計年元' : '2012\n',
'GDP値元' : '1兆5478億\n',
'GDP統計年MER' : '2012\n',
'GDP順位MER' : '5\n',
'GDP値MER' : '2兆4337億\n',
'GDP統計年' : '2012\n',
'GDP順位' : '6\n',
'GDP値' : '2兆3162億\n',
'GDP/人' : '36,727\n',
'建国形態' : '建国\n',
'確立形態1' : 'イングランド王国/スコットランド王国(両国とも1707年連合法まで)\n',
'確立年月日1' : '927年/843年\n',
'確立形態2' : 'グレートブリテン王国建国(1707年連合法)\n',
'確立年月日2' : '1707年\n',
'確立形態3' : 'グレートブリテン及びアイルランド連合王国建国(1800年連合法)\n',
'確立年月日3' : '1801年\n',
'確立形態4' : '現在の国号「グレートブリテン及び北アイルランド連合王国」に変更\n',
'確立年月日4' : '1927年\n',
'通貨' : 'UKポンド (£)\n',
'通貨コード' : 'GBP\n',
'時間帯' : '±0\n',
'夏時間' : '+1\n',
'ISO 3166-1' : 'GB / GBR\n',
'ccTLD' : '.uk / .gb\n',
'国際電話番号' : '44\n',
'注記' : '\n'
}
result = Q_028()
for key in result.keys():
self.assertEqual(result[key], current[key])
def test_Q_029(self):
self.assertEqual(Q_029(), 'https://upload.wikimedia.org/wikipedia/en/a/ae/Flag_of_the_United_Kingdom.svg')
if __name__ == '__main__':
unittest.main()
| 45.305263
| 341
| 0.475139
| 2,004
| 17,216
| 4.057884
| 0.226547
| 0.00664
| 0.015494
| 0.029513
| 0.78394
| 0.780005
| 0.776808
| 0.761313
| 0.74422
| 0.716675
| 0
| 0.069723
| 0.331029
| 17,216
| 379
| 342
| 45.424802
| 0.634801
| 0.001975
| 0
| 0.584022
| 0
| 0.041322
| 0.516123
| 0.12858
| 0
| 0
| 0
| 0
| 0.027548
| 1
| 0.027548
| false
| 0
| 0.035813
| 0
| 0.066116
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
40009c62c55e2f2c0a2ff3eadb52ec643b0da208
| 224
|
py
|
Python
|
diffstar/tests/test_stars.py
|
ArgonneCPAC/diffstar
|
4d15a5b2fd2faa86311c543a151fee73a14bd7f1
|
[
"BSD-3-Clause"
] | 2
|
2021-12-01T00:47:22.000Z
|
2021-12-01T03:15:35.000Z
|
diffstar/tests/test_stars.py
|
ArgonneCPAC/diffstar
|
4d15a5b2fd2faa86311c543a151fee73a14bd7f1
|
[
"BSD-3-Clause"
] | null | null | null |
diffstar/tests/test_stars.py
|
ArgonneCPAC/diffstar
|
4d15a5b2fd2faa86311c543a151fee73a14bd7f1
|
[
"BSD-3-Clause"
] | null | null | null |
"""
"""
from ..stars import DEFAULT_SFR_PARAMS, _SFR_PARAM_BOUNDS
def test_sfh_parameter_bounds():
for key, val in DEFAULT_SFR_PARAMS.items():
assert _SFR_PARAM_BOUNDS[key][0] < val < _SFR_PARAM_BOUNDS[key][1]
| 24.888889
| 74
| 0.727679
| 34
| 224
| 4.323529
| 0.588235
| 0.163265
| 0.285714
| 0.231293
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010526
| 0.151786
| 224
| 8
| 75
| 28
| 0.763158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
40074593eb22707c49ed2db34cd19bd4bc406466
| 29
|
py
|
Python
|
.history/src/Simulador_20200707125832.py
|
eduardodut/Trabalho_final_estatistica_cd
|
fbedbbea6bdd7a79e1d62030cde0fab4e93fc338
|
[
"MIT"
] | null | null | null |
.history/src/Simulador_20200707125832.py
|
eduardodut/Trabalho_final_estatistica_cd
|
fbedbbea6bdd7a79e1d62030cde0fab4e93fc338
|
[
"MIT"
] | null | null | null |
.history/src/Simulador_20200707125832.py
|
eduardodut/Trabalho_final_estatistica_cd
|
fbedbbea6bdd7a79e1d62030cde0fab4e93fc338
|
[
"MIT"
] | null | null | null |
class Simulador():
pass
| 7.25
| 18
| 0.62069
| 3
| 29
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.275862
| 29
| 4
| 19
| 7.25
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
40217dae1512925ba0f386cb5d40b13d23dee81c
| 7,362
|
py
|
Python
|
tests/test_feap_base.py
|
basic-ph/feat
|
0660a34e5eeeab920d1ce8e139ab486e63bd419b
|
[
"MIT"
] | 2
|
2020-07-13T11:59:19.000Z
|
2020-07-13T12:02:05.000Z
|
tests/test_feap_base.py
|
basic-ph/feat
|
0660a34e5eeeab920d1ce8e139ab486e63bd419b
|
[
"MIT"
] | null | null | null |
tests/test_feap_base.py
|
basic-ph/feat
|
0660a34e5eeeab920d1ce8e139ab486e63bd419b
|
[
"MIT"
] | null | null | null |
"""This module contains tests regarding simple problems resolved using FEAP
software from University of California, Berkeley. Results obtained with
this program are used as validation comparing them with those obtained
using feat python code.
This file is used for testing the base module.
"""
import logging
import meshio
import numpy as np
import pytest
from scipy import sparse
from scipy.sparse import linalg
from feat import base
from feat import boundary as bc
from feat import vector
def test_feap_1():
#LOGGING
main_log = logging.getLogger(__name__)
main_log.setLevel(logging.DEBUG)
main_handler = logging.StreamHandler() # main_log handler
main_handler.setLevel(logging.DEBUG)
main_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') # main_log formatter
main_handler.setFormatter(main_formatter)
main_log.addHandler(main_handler)
feat_log = logging.getLogger("feat")
feat_log.setLevel(logging.DEBUG)
feat_handler = logging.StreamHandler()
feat_handler.setLevel(logging.DEBUG)
feat_formatter = logging.Formatter('%(name)s - %(levelname)s - %(message)s')
feat_handler.setFormatter(feat_formatter)
feat_log.addHandler(feat_handler)
# SETTINGS
mesh_path = "tests/data/msh/feap_1.msh"
main_log.info("MESH FILE: %s", mesh_path)
# DATA
element_type = "triangle"
load_condition = "plane strain" # "plane stress" or "plane strain"
thickness = 1
main_log.info("LOAD CONDITION: %s", load_condition)
main_log.info("THICKNESS: %s", thickness)
# MATERIAL
cheese = base.Material("cheese", 70, 0.3, load_condition) #FIXME
main_log.info("MATERIALS: TODO")
# MESH
mesh = meshio.read(mesh_path)
elements = mesh.cells_dict[element_type]
nodal_coord = mesh.points[:,:2]
print(type(nodal_coord))
print(nodal_coord)
num_elements = elements.shape[0]
num_nodes = nodal_coord.shape[0]
material_map = mesh.cell_data_dict["gmsh:physical"][element_type] - 1 # element-material map
main_log.info("MESH INFO: %d elements, %d nodes", num_elements, num_nodes)
# BOUNDARY CONDITIONS INSTANCES
left_side = bc.DirichletBC("left side", mesh, [0], 0.0)
bl_corner = bc.DirichletBC("bottom left corner", mesh, [1], 0.0)
right_side = bc.DirichletBC("right side", mesh, [0], 1.0)
main_log.info("BOUNDARY CONDITIONS: TODO")
# ASSEMBLY
E_material = base.compute_E_material(num_elements, material_map, mesh.field_data, cheese)
K = np.zeros((num_nodes * 2, num_nodes * 2))
R = np.zeros(num_nodes * 2)
K = base.assembly(K, num_elements, elements, nodal_coord, material_map, E_material, thickness, element_type)
main_log.debug("STIFFNESS MATRIX (K) BEFORE BC:\n %s\n", K)
# contrained dof rows of K are saved now
reaction_dof = bc.dirichlet_dof(left_side, bl_corner)
K_rows = K[reaction_dof, :]
# BOUNDARY CONDITIONS APPLICATION
K, R = bc.apply_dirichlet(K, R, left_side, bl_corner, right_side)
main_log.debug("STIFFNESS MATRIX (K) AFTER BC:\n %s\n", K)
main_log.debug("LOAD VECTOR (R) BEFORE BC:\n %s\n", R)
# SOLVER
D = np.linalg.solve(K, R)
main_log.info("DISPLACEMENTS VECTOR (D):\n %s\n", D)
reactions = np.dot(K_rows, D)
main_log.debug("REACTIONS (dirichlet dofs):\n %s\n", reactions)
modulus = base.compute_modulus(nodal_coord, right_side, reactions, thickness)
main_log.info("RESULTING ELASTIC MODULUS: %f", modulus)
comparable_dofs = [0, 1, 2, 4, 5, 6, 7]
D_true = np.array([
0.0, 0.0,
1.0, np.NaN,
1.0, -4.28571429e-01,
0.0, -4.28571429e-01,
])
reactions_true = np.array([-3.84615385e+01, -3.84615385e+01, -7.10542736e-15])
np.testing.assert_allclose(reactions_true, reactions)
np.testing.assert_allclose(D_true[comparable_dofs], D[comparable_dofs])
@pytest.mark.parametrize(
"poisson,D_true,reactions_true",
[
(
0.3,
np.array([0.0, 0.0, 7.28e-02, 2.76e-01, 0.0, 8.0e-03]),
np.array([-0.64, 0.24, -0.4]),
),
(
0.0,
np.array([0.0, 0.0, 1.04e-01, 2.3815385e-01, 0.0, 4.307692e-02]),
np.array([-0.64, 0.24, -0.4]),
)
],
)
def test_feap_2(poisson, D_true, reactions_true):
# LOGGING
main_log = logging.getLogger(__name__)
main_log.setLevel(logging.DEBUG)
main_handler = logging.StreamHandler() # main_log handler
main_handler.setLevel(logging.DEBUG)
main_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') # main_log formatter
main_handler.setFormatter(main_formatter)
main_log.addHandler(main_handler)
feat_log = logging.getLogger("feat")
feat_log.setLevel(logging.DEBUG)
feat_handler = logging.StreamHandler()
feat_handler.setLevel(logging.DEBUG)
feat_formatter = logging.Formatter('%(name)s - %(levelname)s - %(message)s')
feat_handler.setFormatter(feat_formatter)
feat_log.addHandler(feat_handler)
# SETTINGS
mesh_path = "tests/data/msh/feap_2.msh"
main_log.info("MESH FILE: %s", mesh_path)
# DATA
element_type = "triangle"
load_condition = "plane strain" # "plane stress" or "plane strain"
thickness = 1
main_log.info("LOAD CONDITION: %s", load_condition)
main_log.info("THICKNESS: %s", thickness)
# MATERIAL
rubber = base.Material("rubber", 10, poisson, load_condition) #FIXME
main_log.info("MATERIALS: TODO")
# MESH
mesh = meshio.read(mesh_path)
elements = mesh.cells_dict[element_type]
nodal_coord = mesh.points[:,:2]
num_elements = elements.shape[0]
num_nodes = nodal_coord.shape[0]
material_map = mesh.cell_data_dict["gmsh:physical"][element_type] - 1 # element-material map
main_log.info("MESH INFO: %d elements, %d nodes", num_elements, num_nodes)
# BOUNDARY CONDITIONS INSTANCES
left_side = bc.DirichletBC("left side", mesh, [0], 0.0)
b_corner = bc.DirichletBC("bottom corner", mesh, [1], 0.0)
r_corner_x = bc.NeumannBC("right corner", mesh, [0], 0.4)
r_corner_y = bc.NeumannBC("right corner", mesh, [1], 0.4)
main_log.info("BOUNDARY CONDITIONS: TODO")
# ASSEMBLY
E_material = base.compute_E_material(num_elements, material_map, mesh.field_data, rubber)
main_log.debug("E array:\n %s\n", E_material)
K = np.zeros((num_nodes * 2, num_nodes * 2))
R = np.zeros(num_nodes * 2)
K = base.assembly(K, num_elements, elements, nodal_coord, material_map, E_material, thickness, element_type)
main_log.debug("STIFFNESS MATRIX (K) BEFORE BC:\n %s\n", K)
# contrained dof rows of K are saved now
reaction_dof = bc.dirichlet_dof(left_side, b_corner)
K_rows = K[reaction_dof, :]
# BOUNDARY CONDITIONS APPLICATION
K, R = bc.apply_dirichlet(K, R, left_side, b_corner)
R = bc.apply_neumann(R, r_corner_x, r_corner_y)
main_log.debug("STIFFNESS MATRIX (K) AFTER BC:\n %s\n", K)
main_log.debug("LOAD VECTOR (R) BEFORE BC:\n %s\n", R)
# SOLVER
D = np.linalg.solve(K, R)
main_log.info("DISPLACEMENTS VECTOR (D):\n %s\n", D)
reactions = np.dot(K_rows, D)
main_log.debug("REACTIONS (dirichlet dofs):\n %s\n", reactions)
np.testing.assert_allclose(D_true, D)
np.testing.assert_allclose(reactions_true, reactions)
| 36.994975
| 116
| 0.678892
| 1,075
| 7,362
| 4.461395
| 0.170233
| 0.049625
| 0.034404
| 0.006255
| 0.77794
| 0.751251
| 0.748957
| 0.713511
| 0.707256
| 0.707256
| 0
| 0.031701
| 0.190166
| 7,362
| 199
| 117
| 36.994975
| 0.772727
| 0.106221
| 0
| 0.609929
| 0
| 0
| 0.160733
| 0.01207
| 0
| 0
| 0
| 0.005025
| 0.028369
| 1
| 0.014184
| false
| 0
| 0.06383
| 0
| 0.078014
| 0.014184
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
403c6843245ecd18b2594cd30a38fa251a9082ba
| 50
|
py
|
Python
|
nitorch/io/volumes/tiff/__init__.py
|
liamchalcroft/nitorch
|
0de179aff97244a82213c528f0d6393725c868c9
|
[
"MIT"
] | 46
|
2020-07-31T10:14:05.000Z
|
2022-03-24T12:51:46.000Z
|
nitorch/io/volumes/tiff/__init__.py
|
liamchalcroft/nitorch
|
0de179aff97244a82213c528f0d6393725c868c9
|
[
"MIT"
] | 36
|
2020-10-06T19:01:38.000Z
|
2022-02-03T18:07:35.000Z
|
nitorch/io/volumes/tiff/__init__.py
|
liamchalcroft/nitorch
|
0de179aff97244a82213c528f0d6393725c868c9
|
[
"MIT"
] | 6
|
2021-01-05T14:59:05.000Z
|
2021-11-18T18:26:45.000Z
|
from .array import TiffArray
from . import array
| 12.5
| 28
| 0.78
| 7
| 50
| 5.571429
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.18
| 50
| 3
| 29
| 16.666667
| 0.95122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
40572a47cd6ddc72fba3874e28ff4a458b198bc9
| 23,001
|
py
|
Python
|
partialView/test/test_partialView.py
|
robzenn92/EpTODocker
|
7e3f17bf2d914ee8aa5c7d6393cb65d48177bd71
|
[
"MIT"
] | null | null | null |
partialView/test/test_partialView.py
|
robzenn92/EpTODocker
|
7e3f17bf2d914ee8aa5c7d6393cb65d48177bd71
|
[
"MIT"
] | 26
|
2017-10-23T08:04:00.000Z
|
2021-06-10T18:46:22.000Z
|
partialView/test/test_partialView.py
|
robzenn92/EpTODocker
|
7e3f17bf2d914ee8aa5c7d6393cb65d48177bd71
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python2
import os
import json
import unittest
from partialView.partialView import PartialView, PodDescriptor
class TestPartialView(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
def setUp(self):
self.partialView = PartialView("172.0.1.0")
self.descriptors = []
self.ips = ["172.0.1.1", "172.0.1.2", "172.0.1.3", "172.0.1.4", "172.0.1.5"]
for ip in self.ips:
self.descriptors.append(PodDescriptor(ip))
# Limit should be equal to VIEW_LIMIT and shuffle_length should be equal to SHUFFLE_LENGTH
def test_set_up_ok(self):
self.assertEqual(self.partialView.limit, int(os.environ['VIEW_LIMIT']))
self.assertEqual(self.partialView.shuffle_length, int(os.environ['SHUFFLE_LENGTH']))
# Initial partialView should be empty
def test_initial_partial_view_empty(self):
self.assertEqual(self.partialView.size, 0)
self.assertTrue(self.partialView.is_empty())
# Method is_full should return false if partial view is not full
def test_initial_partial_view_should_not_be_full(self):
self.assertFalse(self.partialView.is_full())
# Method is_full should return true if partial view is full
def test_is_full_should_return_true_if_full(self):
for i in range(self.partialView.limit):
self.partialView.add_peer(self.descriptors[i])
self.assertTrue(self.partialView.is_full())
self.assertEqual(self.partialView.size, self.partialView.limit)
# Method add_peer should return false if peer already contained
def test_add_peer_should_return_false_if_peer_already_contained(self):
peer = PodDescriptor("A new IP")
self.partialView.add_peer(peer)
size = self.partialView.size
duplicated = PodDescriptor("A new IP")
success = self.partialView.add_peer(duplicated)
self.assertFalse(success)
self.assertEqual(self.partialView.size, size)
self.assertEqual(self.partialView.size, len(self.partialView.peer_list))
# Method add_peer should not allow to insert a self entry
def test_add_peer_should_not_allow_self_entry(self):
ip = "my ip"
p1 = PartialView(ip)
peer = PodDescriptor(ip)
size = self.partialView.size
success = p1.add_peer(peer)
self.assertFalse(success)
self.assertFalse(p1.contains_ip(ip))
self.assertEqual(p1.size, size)
# Method add_peer should allow to insert a self entry if forced
def test_add_peer_with_allow_self_should_allow_self_entry(self):
ip = "my ip"
p1 = PartialView(ip)
peer = PodDescriptor(ip)
size = self.partialView.size
success = p1.add_peer(peer, True)
self.assertTrue(success)
self.assertTrue(p1.contains_ip(ip))
self.assertEqual(p1.size, size + 1)
# Method add_peer should increment size if view is not full
def test_add_peer_should_increment_size_if_not_full(self):
size = self.partialView.size
peer = PodDescriptor("A new IP")
self.partialView.add_peer(peer)
self.assertTrue(self.partialView.contains(peer))
self.assertEqual(self.partialView.size, size + 1)
self.assertEqual(self.partialView.size, len(self.partialView.peer_list))
# Method add_peer should not increment size if view is full
def test_add_peer_should_not_increment_size_if_full(self):
peer = PodDescriptor("A new IP")
for i in range(self.partialView.limit):
self.partialView.add_peer(self.descriptors[i])
size = self.partialView.size
success = self.partialView.add_peer(peer)
self.assertFalse(success)
self.assertFalse(self.partialView.contains(peer))
self.assertEqual(self.partialView.size, size)
self.assertEqual(self.partialView.size, len(self.partialView.peer_list))
# Method add_peer_ip should return false if peer already contained
def test_add_peer_ip_should_return_false_if_peer_already_contained(self):
peer = "A new IP"
self.partialView.add_peer_ip(peer)
size = self.partialView.size
duplicated = "A new IP"
success = self.partialView.add_peer_ip(duplicated)
self.assertFalse(success)
self.assertEqual(self.partialView.size, size)
self.assertEqual(self.partialView.size, len(self.partialView.peer_list))
# Method add_peer_ip should not allow to insert a self entry
def test_add_peer_ip_should_not_allow_self_entry(self):
ip = "my ip"
p1 = PartialView(ip)
size = self.partialView.size
success = p1.add_peer_ip(ip)
self.assertFalse(success)
self.assertFalse(p1.contains_ip(ip))
self.assertEqual(p1.size, size)
# Method add_peer_ip should allow to insert a self entry if forced
def test_add_peer_ip_with_allow_self_should_allow_self_entry(self):
ip = "my ip"
p1 = PartialView(ip)
size = self.partialView.size
success = p1.add_peer_ip(ip, True)
self.assertTrue(success)
self.assertTrue(p1.contains_ip(ip))
self.assertEqual(p1.size, size + 1)
# Method add_peer_ip should increment size if view is not full
def test_add_peer_ip_should_increment_size_if_not_full(self):
size = self.partialView.size
peer = "A new IP"
self.partialView.add_peer_ip(peer)
self.assertTrue(self.partialView.contains_ip(peer))
self.assertEqual(self.partialView.size, size + 1)
self.assertEqual(self.partialView.size, len(self.partialView.peer_list))
# Method add_peer_ip should not increment size if view is full
def test_add_peer_ip_should_not_increment_size_if_full(self):
peer = "A new IP"
for i in range(self.partialView.limit):
self.partialView.add_peer(self.descriptors[i])
size = self.partialView.size
success = self.partialView.add_peer_ip(peer)
self.assertFalse(success)
self.assertFalse(self.partialView.contains_ip(peer))
self.assertEqual(self.partialView.size, size)
self.assertEqual(self.partialView.size, len(self.partialView.peer_list))
# Initial age should be zero
def test_initial_age_peer(self):
self.partialView.add_peer(PodDescriptor("172.0.1.5"))
self.assertEqual(self.partialView.peer_list[0].age, 0)
# Initial age should be zero
def test_initial_age_peer_ip(self):
self.partialView.add_peer_ip("172.0.1.5")
self.assertEqual(self.partialView.peer_list[0].age, 0)
# Method get_peer_ip_list should return a list of ips
def test_get_peer_ip_list_returns_ips(self):
for ip in self.ips:
self.partialView.add_peer_ip(ip)
self.assertEqual(self.partialView.get_peer_ip_list(), self.ips[:self.partialView.limit])
# Initial age should be zero
def test_partial_view_size_limit(self):
for ip in self.ips:
self.partialView.add_peer_ip(ip)
self.assertEqual(self.partialView.size, self.partialView.limit)
self.assertTrue(self.partialView.is_full())
for i in range(self.partialView.limit):
self.assertEqual(self.partialView.peer_list[i].ip, self.ips[i])
def test_contains_return_true_if_contained(self):
for descr in self.descriptors:
self.partialView.add_peer(descr)
for descr in self.descriptors[:self.partialView.size]:
self.assertTrue(self.partialView.contains(descr))
for descr in self.descriptors[self.partialView.size:]:
self.assertFalse(self.partialView.contains(descr))
def test_contains_ip_return_true_if_contained(self):
for ip in self.ips:
self.partialView.add_peer_ip(ip)
for ip in self.ips[:self.partialView.size]:
self.assertTrue(self.partialView.contains_ip(ip))
for ip in self.ips[self.partialView.size:]:
self.assertFalse(self.partialView.contains_ip(ip))
# Age should be incremented by one
def test_increment(self):
self.partialView.add_peer(PodDescriptor("172.0.1.5", 1))
self.partialView.add_peer(PodDescriptor("172.0.1.7", 3))
self.partialView.increment()
self.assertEqual(self.partialView.peer_list[0].age, 2)
self.assertEqual(self.partialView.peer_list[1].age, 4)
# Sort should sort view by peer's age
def test_sort(self):
self.partialView.add_peer(PodDescriptor("172.0.1.5", 2))
self.partialView.add_peer(PodDescriptor("172.0.1.4", 3))
self.partialView.add_peer(PodDescriptor("172.0.1.8", 1))
self.partialView.sort()
self.assertEqual(self.partialView.peer_list[0].ip, "172.0.1.8")
self.assertEqual(self.partialView.peer_list[1].ip, "172.0.1.5")
self.assertEqual(self.partialView.peer_list[2].ip, "172.0.1.4")
# Method sample_descriptors should return an empty list if view is empty
def test_sample_descriptors_should_return_empty_list_if_empty_view(self):
sample = self.partialView.sample_descriptors(3)
self.assertEqual(len(sample), 0)
self.assertEqual(sample, [])
self.assertTrue(isinstance(sample, list))
# Method sample_descriptors should return a list of size element if the view's size is less than the limit given as parameter
def test_sample_descriptors_should_return_less_than_limit_peers_if_size_less_than_limit(self):
self.partialView.add_peer(PodDescriptor("172.0.1.5", 2))
self.partialView.add_peer(PodDescriptor("172.0.1.4", 3))
size = self.partialView.size
sample = self.partialView.sample_descriptors(3)
self.assertEqual(len(sample), size)
# Method sample_descriptors should return a list of limit peers despite the size of the the view is greater then limit
def test_sample_descriptors_should_return_no_more_than_limit_peers(self):
self.partialView.add_peer(PodDescriptor("172.0.1.5", 2))
self.partialView.add_peer(PodDescriptor("172.0.1.4", 3))
self.partialView.add_peer(PodDescriptor("172.0.1.9", 4))
limit = 2
size = self.partialView.size
sample = self.partialView.sample_descriptors(limit)
self.assertNotEqual(limit, size)
self.assertEqual(len(sample), limit)
# Method sample_descriptors should return a list of 1 peer and avoid the peer given as parameter
def test_sample_descriptors_with_avoid_peer_more_than_limit(self):
to_avoid = PodDescriptor("172.0.1.9", 4)
self.partialView.add_peer(PodDescriptor("172.0.1.5", 2))
self.partialView.add_peer(PodDescriptor("172.0.1.4", 3))
self.partialView.add_peer(to_avoid)
limit = 1
sample = self.partialView.sample_descriptors(limit, to_avoid)
self.assertEqual(len(sample), limit)
self.assertFalse(to_avoid in sample)
# Method sample_descriptors should return a list of 2 peers and avoid the peer given as parameter
def test_sample_descriptors_with_avoid_peer_less_than_limit(self):
to_avoid = PodDescriptor("172.0.1.9", 4)
self.partialView.add_peer(PodDescriptor("172.0.1.5", 2))
self.partialView.add_peer(PodDescriptor("172.0.1.4", 3))
self.partialView.add_peer(to_avoid)
limit = 3
sample = self.partialView.sample_descriptors(limit, to_avoid)
self.assertEqual(len(sample), 2)
self.assertFalse(to_avoid in sample)
# Method sample_descriptors should return a list of 3 peers if the peer to avoid is not contained in the view
def test_sample_descriptors_with_avoid_peer_not_in_view(self):
to_avoid = PodDescriptor("172.0.1.9", 4)
self.partialView.add_peer(PodDescriptor("172.0.1.5", 2))
self.partialView.add_peer(PodDescriptor("172.0.1.4", 3))
self.partialView.add_peer(PodDescriptor("172.0.1.10", 8))
limit = 3
sample = self.partialView.sample_descriptors(limit, to_avoid)
self.assertEqual(len(sample), limit)
self.assertFalse(to_avoid in sample)
# Method sample_ips should return a list of 3 ips
def test_sample_ips_should_return_a_list_of_ips(self):
self.partialView.add_peer(PodDescriptor("172.0.1.5", 2))
self.partialView.add_peer(PodDescriptor("172.0.1.4", 3))
self.partialView.add_peer(PodDescriptor("172.0.1.10", 8))
limit = 3
sample = self.partialView.sample_ips(limit)
self.assertIn("172.0.1.5", sample)
self.assertIn("172.0.1.4", sample)
self.assertIn("172.0.1.10", sample)
# Method sample_ips should return a list of 3 ips loadable by epto
# def test_sample_ips_should_return_a_list_of_ips_loadable_by_epto(self):
# self.partialView.add_peer(PodDescriptor("172.0.1.5", 2))
# self.partialView.add_peer(PodDescriptor("172.0.1.4", 3))
# self.partialView.add_peer(PodDescriptor("172.0.1.10", 8))
# sample = json.dumps(self.partialView.sample_ips(2))
# # EpTO's code when EpTO invokes get_k_view()
# view = [ip.encode('ascii', 'ignore') for ip in json.loads(sample)]
# for destination in view:
# self.assertIsInstance(destination, str)
# self.assertIn(destination, self.partialView.get_peer_ip_list())
# Test the exchange of views.
# P1 plays the role of P while P2 plays the role of Q described in comments
def test_exchange_views(self):
p1 = PartialView("First IP", 4, 3)
p1.add_peer(PodDescriptor("172.0.1.6", 0))
p1.add_peer(PodDescriptor("172.0.1.3", 2))
p1.add_peer(PodDescriptor("172.0.1.5", 3))
p1.add_peer(PodDescriptor("Second IP", 5))
p2 = PartialView("Second IP", 4, 3)
p2.add_peer(PodDescriptor("172.0.1.3", 0))
p2.add_peer(PodDescriptor("172.0.1.5", 1))
p2.add_peer(PodDescriptor("172.0.1.2", 2))
p2.add_peer(PodDescriptor("172.0.1.1", 4))
########################
# P1 starts the exchange
########################
# 1) Increase by one the age of all neighbors
p1.increment()
# 2) Select neighbor Q with the highest age among all neighbors.
oldest = p1.get_oldest_peer()
# 3) Select l - 1 other random neighbors (meaning avoid oldest).
request = p1.select_neighbors_for_request(oldest)
# 4) Replace Q's entry with a new entry of age 0 and with P's address.
request.add_peer_ip(p1.ip, allow_self_ip=True)
self.assertTrue(request.is_full())
self.assertEqual(request.size, p1.shuffle_length)
################################################
# P2 receives neighbors and prepares a reply
################################################
reply = p2.select_neighbors_for_reply()
self.assertTrue(request.is_full())
self.assertEqual(request.size, p1.shuffle_length)
# Note that in p1 the oldest is p2
# p1 and p2 know two peers in common
# p2 does not have an entry with p1's ip
# p1.merge should:
# - Discard 172.0.1.3 and 172.0.1.5
# - Put in unknown list 172.0.1.2, 172.0.1.1
# 6) I remove the oldest peer from my view
p1.remove_peer(oldest)
p1.merge(request, reply)
self.assertTrue(p1.is_full())
for peer in reply.get_peer_list():
self.assertTrue(p1.contains(peer))
self.assertLessEqual(self.partialView.size, self.partialView.limit)
# Test the exchange of views.
# P1 plays the role of P while P2 plays the role of Q described in comments
# def test_exchange_views_2(self):
#
# p1 = PartialView("First IP", 4, 3)
# p1.add_peer(PodDescriptor("172.0.1.6", 0))
# p1.add_peer(PodDescriptor("172.0.1.3", 2))
# p1.add_peer(PodDescriptor("172.0.1.5", 3))
# p1.add_peer(PodDescriptor("Second IP", 5))
#
# p2 = PartialView("Second IP", 4, 3)
# p2.add_peer(PodDescriptor("172.0.1.3", 0))
# p2.add_peer(PodDescriptor("172.0.1.5", 1))
# p2.add_peer(PodDescriptor("172.0.1.2", 2))
# p2.add_peer(PodDescriptor("First IP", 4))
#
# ########################
# # P1 starts the exchange
# ########################
#
# # 1) Increase by one the age of all neighbors
# p1.increment()
# # 2) Select neighbor Q with the highest age among all neighbors.
# oldest = p1.get_oldest_peer()
# # 3) Select l - 1 other random neighbors (meaning avoid oldest).
# request = p1.select_neighbors_for_request(oldest)
# # 4) Replace Q's entry with a new entry of age 0 and with P's address.
# request.add_peer_ip(p1.ip, allow_self_ip=True)
#
# self.assertTrue(request.is_full())
# self.assertEqual(request.size, p1.shuffle_length)
#
# ################################################
# # P2 receives neighbors and prepares a reply
# ################################################
#
# reply = p2.select_neighbors_for_reply()
#
# self.assertTrue(request.is_full())
# self.assertEqual(request.size, p1.shuffle_length)
#
# # Note that in p1 the oldest is p2
# # p1 and p2 know two peers in common
# # p2 does have an entry with p1's ip
# # p1.merge should:
# # - Discard 172.0.1.3 and 172.0.1.5 because are well known
# # - Discard First IP because self ip is not allowed
#
# # 6) I remove the oldest peer from my view
# p1.remove_peer(oldest)
# p1.merge(request, reply)
#
# for peer in reply.get_peer_list():
# if peer != p1.ip:
# self.assertTrue(p1.contains(peer))
#
# self.assertLessEqual(self.partialView.size, self.partialView.limit)
# Method get_oldest_peer should return a PodDescriptor
def test_get_oldest_peer_should_return_none_if_empty_view(self):
oldest = self.partialView.get_oldest_peer()
self.assertEqual(oldest, None)
# Method get_oldest_peer should return a PodDescriptor
def test_get_oldest_peer_should_return_a_pod_descriptor(self):
self.partialView.add_peer(PodDescriptor("172.0.1.6", 2))
self.partialView.add_peer(PodDescriptor("172.0.1.4", 1))
self.partialView.add_peer(PodDescriptor("172.0.1.5", 4))
oldest = self.partialView.get_oldest_peer()
self.assertTrue(isinstance(oldest, PodDescriptor))
# Method get_oldest_peer should return the peer with the highest age
def test_get_oldest_peer(self):
self.partialView.add_peer(PodDescriptor("172.0.1.6", 2))
self.partialView.add_peer(PodDescriptor("172.0.1.4", 1))
self.partialView.add_peer(PodDescriptor("172.0.1.5", 4))
oldest = self.partialView.get_oldest_peer()
self.assertEqual(oldest.ip, "172.0.1.5")
self.assertEqual(oldest.age, 4)
def test_select_neighbors_for_request_should_return_a_non_full_view(self):
oldest = PodDescriptor("172.0.1.4", 1)
self.partialView.add_peer(PodDescriptor("172.0.1.6", 2))
self.partialView.add_peer(oldest)
self.partialView.add_peer(PodDescriptor("172.0.1.5", 4))
neighbors = self.partialView.select_neighbors_for_request(oldest)
self.assertFalse(neighbors.is_full())
self.assertEqual(neighbors.size, neighbors.shuffle_length - 1)
def test_select_neighbors_for_request_should_not_contain_oldest_peer(self):
oldest = PodDescriptor("172.0.1.4", 1)
self.partialView.add_peer(PodDescriptor("172.0.1.6", 2))
self.partialView.add_peer(oldest)
self.partialView.add_peer(PodDescriptor("172.0.1.5", 4))
neighbors = self.partialView.select_neighbors_for_request(oldest)
self.assertFalse(neighbors.contains(oldest))
def test_select_neighbors_for_request_and_add_peer_should_return_full_view(self):
oldest = PodDescriptor("172.0.1.4", 1)
self.partialView.add_peer(PodDescriptor("172.0.1.6", 2))
self.partialView.add_peer(oldest)
self.partialView.add_peer(PodDescriptor("172.0.1.5", 4))
neighbors = self.partialView.select_neighbors_for_request(oldest)
neighbors.add_peer_ip(self.partialView.ip, allow_self_ip=True)
self.assertEqual(neighbors.size, self.partialView.shuffle_length)
self.assertTrue(neighbors.is_full())
def test_select_neighbors_for_reply_should_return_a_full_view(self):
oldest = PodDescriptor("172.0.1.4", 1)
self.partialView.add_peer(PodDescriptor("172.0.1.6", 2))
self.partialView.add_peer(oldest)
self.partialView.add_peer(PodDescriptor("172.0.1.5", 4))
neighbors = self.partialView.select_neighbors_for_reply(oldest)
self.assertTrue(neighbors.is_full())
self.assertEqual(neighbors.size, neighbors.shuffle_length)
def test_select_neighbors_for_reply_should_contain_avoid_peer_if_size_eq_shuffle_length(self):
oldest = PodDescriptor("172.0.1.4", 1)
self.partialView.add_peer(PodDescriptor("172.0.1.6", 2))
self.partialView.add_peer(oldest)
neighbors = self.partialView.select_neighbors_for_reply(oldest)
self.assertTrue(neighbors.is_full())
self.assertEqual(neighbors.size, neighbors.shuffle_length)
def test_select_neighbors_for_reply_should_not_contain_oldest_peer(self):
oldest = PodDescriptor("172.0.1.4", 1)
self.partialView.add_peer(PodDescriptor("172.0.1.6", 2))
self.partialView.add_peer(oldest)
self.partialView.add_peer(PodDescriptor("172.0.1.5", 4))
neighbors = self.partialView.select_neighbors_for_reply(oldest)
self.assertFalse(neighbors.contains(oldest))
def test_empty_partial_view_to_json(self):
jsonized = self.partialView.to_json()
self.assertEqual(jsonized, {"ip": "172.0.1.0", "limit": 3, "shuffle_length": 2, "peer_list": [], "size": 0})
def test_unmarshal_partial_view(self):
for ip in self.ips:
self.partialView.add_peer_ip(ip)
jsonized = self.partialView.to_json()
partial_view = PartialView.from_dict(jsonized)
self.assertIsInstance(partial_view, PartialView)
for peer in partial_view.peer_list:
self.assertIsInstance(peer, PodDescriptor)
self.assertEqual(partial_view.ip, self.partialView.ip)
self.assertEqual(partial_view.size, 3)
self.assertEqual(partial_view.limit, 3)
for i in range(self.partialView.limit):
self.assertEqual(partial_view.peer_list[i].ip, self.descriptors[i].ip)
self.assertEqual(partial_view.peer_list[i].age, self.descriptors[i].age)
# Every time the view size is checked if it is equal to the actual size
def tearDown(self):
self.assertEqual(len(self.partialView.peer_list), self.partialView.size)
if __name__ == '__main__':
unittest.main()
| 45.277559
| 129
| 0.670536
| 3,190
| 23,001
| 4.647962
| 0.062696
| 0.167937
| 0.028327
| 0.097929
| 0.840494
| 0.805895
| 0.764686
| 0.728131
| 0.688609
| 0.659945
| 0
| 0.039392
| 0.21086
| 23,001
| 507
| 130
| 45.366864
| 0.777478
| 0.223773
| 0
| 0.52322
| 0
| 0
| 0.045879
| 0
| 0
| 0
| 0
| 0
| 0.306502
| 1
| 0.136223
| false
| 0.003096
| 0.012384
| 0
| 0.151703
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
40940b4893d075c559e3320262f58f18fcc38d2f
| 242
|
py
|
Python
|
core/views.py
|
menezesluiz/Django_Framework
|
a23319167b4e9e11ea96f39a74727e1a38e0ce9f
|
[
"MIT"
] | null | null | null |
core/views.py
|
menezesluiz/Django_Framework
|
a23319167b4e9e11ea96f39a74727e1a38e0ce9f
|
[
"MIT"
] | null | null | null |
core/views.py
|
menezesluiz/Django_Framework
|
a23319167b4e9e11ea96f39a74727e1a38e0ce9f
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
def index(request):
context = {
'curso': 'Programação Web com Django Framework'
}
return render(request, 'index.html')
def contato(request):
return render(request, 'contato.html')
| 22
| 55
| 0.681818
| 28
| 242
| 5.892857
| 0.607143
| 0.145455
| 0.230303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.206612
| 242
| 10
| 56
| 24.2
| 0.859375
| 0
| 0
| 0
| 0
| 0
| 0.260331
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.125
| 0.125
| 0.625
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
409ec6bfb8dab2359666ec8ce7e81fb33589dafd
| 161
|
py
|
Python
|
projects/Gruul/gruul/__init__.py
|
sm047/detectron2
|
1036cce320ce0f2adbce7f143566462d3222bd5a
|
[
"Apache-2.0"
] | 5
|
2020-06-16T11:31:22.000Z
|
2021-11-08T03:07:47.000Z
|
projects/Gruul/gruul/__init__.py
|
fangchengji/detectron2
|
1036cce320ce0f2adbce7f143566462d3222bd5a
|
[
"Apache-2.0"
] | null | null | null |
projects/Gruul/gruul/__init__.py
|
fangchengji/detectron2
|
1036cce320ce0f2adbce7f143566462d3222bd5a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# @Time : 27/5/20 3:33 PM
# @Author : fangcheng.ji
# @FileName: __init__.py
from .classification_network import ClassificationNetwork
| 26.833333
| 57
| 0.732919
| 22
| 161
| 5.136364
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065217
| 0.142857
| 161
| 6
| 57
| 26.833333
| 0.753623
| 0.590062
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
40b6f94e0e6eeefca3f101cdd081e7c9c3114b40
| 2,541
|
py
|
Python
|
DjangoAPI/MyApi/ImageProcessing_By_AWS.py
|
sni710/Django_api
|
a40d049586d9396c3b1bea4cd82177c573b24c17
|
[
"Apache-2.0"
] | 2
|
2020-08-27T11:26:35.000Z
|
2021-03-20T16:27:20.000Z
|
DjangoAPI/MyApi/ImageProcessing_By_AWS.py
|
ankit98040/Django-ML-Project
|
3e50f51e56aa34bb8a7ae31f4955a10e57176ea7
|
[
"Apache-2.0"
] | null | null | null |
DjangoAPI/MyApi/ImageProcessing_By_AWS.py
|
ankit98040/Django-ML-Project
|
3e50f51e56aa34bb8a7ae31f4955a10e57176ea7
|
[
"Apache-2.0"
] | null | null | null |
import boto3
import requests
import cv2
def ObjectDetection(imagePath, Service):
session = boto3.Session(profile_name="default")
Service = session.client("rekognition")
image = open(imagePath, "rb").read() #read in byte
imgH, imgW = cv2.imread(imagePath).shape[:2]
MyImage = cv2.imread(imagePath)
response = Service.detect_labels(Image = {"Bytes": image})
#response = Service.recognize_celebrities(Image={"Bytes": image})
for objects in response["Labels"]:
if objects["Instances"]:
for boxs in objects["Instances"]:
objectName = objects["Name"]
box = boxs["BoundingBox"]
x = int(imgW * box["Left"])
y = int(imgH * box["Top"])
w = int(imgW * box["Width"])
h = int(imgH * box["Height"])
print(x,y,w,h)
MyImage = cv2.rectangle(MyImage, (x,y), (x+w, y+h), (0,200,13), 2)
MyImage = cv2.putText(MyImage, objectName, (x,y-20), cv2.FONT_HERSHEY_SIMPLEX, 0.9, [0,0,255], 2)
while True:
cv2.imshow("This is the image you selected", MyImage)
if cv2.waitKey(1) == ord("q"):
break
#print(objects["Name"], "---", objects["Confidence"])
def Celebrities_Detection(imagePath, Service):
session = boto3.Session(profile_name="default")
Service = session.client("rekognition")
image = open(imagePath, "rb").read() #read in byte
imgH, imgW = cv2.imread(imagePath).shape[:2]
MyImage = cv2.imread(imagePath)
#response = Service.detect_labels(Image = {"Bytes": image})
response = Service.recognize_celebrities(Image={"Bytes": image})
for objects in response["CelebrityFaces"]:
CelName = objects["Name"]
Face = objects["Face"]
objectName = objects["Name"]
box = Face["BoundingBox"]
x = int(imgW * box["Left"])
y = int(imgH * box["Top"])
w = int(imgW * box["Width"])
h = int(imgH * box["Height"])
print(x,y,w,h)
MyImage = cv2.rectangle(MyImage, (x,y), (x+w, y+h), (0,200,13), 2)
MyImage = cv2.putText(MyImage, CelName, (x,y), cv2.FONT_HERSHEY_SIMPLEX, 0.9, [0,0,255], 2)
while True:
cv2.imshow("This is the image you selected", MyImage)
if cv2.waitKey(1) == ord("q"):
break
image = "/Users/ankit/Desktop/Projects/DJANGO/Django-ML-Project/DjangoAPI/MyApi/ima1.jpg"
ObjectDetection(image, "Object Detection")
Celebrities_Detection(image, "Object Detection")
| 33.88
| 113
| 0.593467
| 313
| 2,541
| 4.779553
| 0.290735
| 0.040107
| 0.048128
| 0.037433
| 0.712567
| 0.712567
| 0.712567
| 0.712567
| 0.712567
| 0.712567
| 0
| 0.028796
| 0.248327
| 2,541
| 74
| 114
| 34.337838
| 0.75445
| 0.077922
| 0
| 0.615385
| 0
| 0.019231
| 0.143285
| 0.03379
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038462
| false
| 0
| 0.057692
| 0
| 0.096154
| 0.038462
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
40c24dcd9a39e668e9455810445f2b4c4eb4133a
| 369
|
py
|
Python
|
backend/ecomm/comm_app/serializer.py
|
Aradhya-Tripathi/symmetrical-chainsaw
|
2e5f552b478c67ea34bd594b918620b3cf520881
|
[
"MIT"
] | null | null | null |
backend/ecomm/comm_app/serializer.py
|
Aradhya-Tripathi/symmetrical-chainsaw
|
2e5f552b478c67ea34bd594b918620b3cf520881
|
[
"MIT"
] | 12
|
2021-05-08T21:01:47.000Z
|
2021-05-14T23:07:03.000Z
|
backend/ecomm/comm_app/serializer.py
|
saxenabhishek/symmetrical-chainsaw
|
9b7f77b4d39867f410929d776d5b363518f71429
|
[
"MIT"
] | 1
|
2021-06-01T21:00:13.000Z
|
2021-06-01T21:00:13.000Z
|
from rest_framework import serializers
from django.contrib.auth.models import User
class CreateUserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ["username", "email", "password"]
class AuthUserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ["email", "password"]
| 21.705882
| 56
| 0.688347
| 35
| 369
| 7.228571
| 0.571429
| 0.205534
| 0.245059
| 0.27668
| 0.395257
| 0.395257
| 0.395257
| 0
| 0
| 0
| 0
| 0
| 0.219512
| 369
| 16
| 57
| 23.0625
| 0.878472
| 0
| 0
| 0.4
| 0
| 0
| 0.092141
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.2
| 0.2
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
40cbe21ccd5453bf35ab6f553a773b46094496aa
| 1,755
|
py
|
Python
|
free_style/tf_play/bench/test/load.py
|
yudongqiu/gomoku
|
4a95f2a5008f31fed5cb92c6bd6d55f9669ddd06
|
[
"MIT"
] | 3
|
2018-06-12T09:03:41.000Z
|
2019-01-14T05:34:57.000Z
|
free_style/tf_play/bench/test/load.py
|
yudongqiu/gomoku
|
4a95f2a5008f31fed5cb92c6bd6d55f9669ddd06
|
[
"MIT"
] | null | null | null |
free_style/tf_play/bench/test/load.py
|
yudongqiu/gomoku
|
4a95f2a5008f31fed5cb92c6bd6d55f9669ddd06
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
import tflearn
import random
import numpy as np
X = np.random.random([10,4,4,2])
Y = [[0] for x in X]
g1 = tf.Graph()
#with g1.as_default():
if True:
input_layer = tflearn.input_data(shape=[None, 4, 4, 2])
net = tflearn.conv_2d(input_layer, 256, 3, activation=None)
net = tflearn.batch_normalization(net)
net = tflearn.activation(net, activation='relu')
# block 2
tmp = tflearn.conv_2d(net, 256, 3, activation=None)
tmp = tflearn.batch_normalization(tmp)
tmp = tflearn.activation(tmp, activation='relu')
tmp = tflearn.conv_2d(tmp, 256, 3, activation=None)
tmp = tflearn.batch_normalization(tmp)
net = tflearn.activation(net + tmp, activation='relu')
final = tflearn.fully_connected(net, 1, activation='tanh')
sgd = tflearn.optimizers.SGD(learning_rate=0.01, lr_decay=0.95, decay_step=200000)
regression = tflearn.regression(final, optimizer=sgd, loss='mean_square', metric='R2')
m = tflearn.DNN(regression)
m.load('m1')
#
#
# tf.reset_default_graph()
# input_layer = tflearn.input_data(shape=[None, 4, 4, 2])
# net = tflearn.conv_2d(input_layer, 128, 3, activation=None)
# net = tflearn.batch_normalization(net)
# net = tflearn.activation(net, activation='relu')
# # block 2
# tmp = tflearn.conv_2d(net, 128, 3, activation=None)
# tmp = tflearn.batch_normalization(tmp)
# net = tflearn.activation(net + tmp, activation='relu')
# final = tflearn.fully_connected(net, 1, activation='tanh')
# sgd = tflearn.optimizers.SGD(learning_rate=0.01, lr_decay=0.95, decay_step=200000)
# regression = tflearn.regression(final, optimizer=sgd, loss='mean_square', metric='R2')
# m2 = tflearn.DNN(regression)
print(m.predict( X ))
#
# m2.load('test2')
# print(m2.pridict(X))
| 34.411765
| 91
| 0.704274
| 257
| 1,755
| 4.696498
| 0.276265
| 0.06628
| 0.053853
| 0.076222
| 0.757249
| 0.757249
| 0.757249
| 0.757249
| 0.757249
| 0.714167
| 0
| 0.049202
| 0.14302
| 1,755
| 50
| 92
| 35.1
| 0.753324
| 0.404558
| 0
| 0.083333
| 0
| 0
| 0.030273
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0.041667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
40d541d5260375a555b2f6267cc6cb0899b4e4c4
| 126
|
py
|
Python
|
measurement/array-operations/vsquaresquare.py
|
quepas/performance-estimation-array-operations
|
b209ba5efebf5dee60ec5fca0fa711ca2e766e17
|
[
"MIT"
] | null | null | null |
measurement/array-operations/vsquaresquare.py
|
quepas/performance-estimation-array-operations
|
b209ba5efebf5dee60ec5fca0fa711ca2e766e17
|
[
"MIT"
] | null | null | null |
measurement/array-operations/vsquaresquare.py
|
quepas/performance-estimation-array-operations
|
b209ba5efebf5dee60ec5fca0fa711ca2e766e17
|
[
"MIT"
] | null | null | null |
import numpy as np
# Compute double element-wise square of vector
def vsquaresquare(V):
R = np.power(np.power(V, 2), 2)
| 18
| 46
| 0.698413
| 22
| 126
| 4
| 0.772727
| 0.159091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019608
| 0.190476
| 126
| 6
| 47
| 21
| 0.843137
| 0.349206
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
905100c11b368587b755761b0305166ee0292372
| 8,019
|
py
|
Python
|
tests/integration/loading/test_loading_component.py
|
ohaibbq/dash-core-components
|
b7d1ebee327cf1b1938569a07cb5bf0dae4ecc54
|
[
"MIT"
] | 1
|
2020-08-15T07:04:25.000Z
|
2020-08-15T07:04:25.000Z
|
tests/integration/loading/test_loading_component.py
|
ohaibbq/dash-core-components
|
b7d1ebee327cf1b1938569a07cb5bf0dae4ecc54
|
[
"MIT"
] | null | null | null |
tests/integration/loading/test_loading_component.py
|
ohaibbq/dash-core-components
|
b7d1ebee327cf1b1938569a07cb5bf0dae4ecc54
|
[
"MIT"
] | null | null | null |
from multiprocessing import Lock
import dash
from dash.dependencies import Input, Output
import dash_core_components as dcc
import dash_html_components as html
def test_ldcp001_loading_component_initialization(dash_dcc):
lock = Lock()
app = dash.Dash(__name__)
app.layout = html.Div(
[dcc.Loading([html.Div(id="div-1")], className="loading")], id="root"
)
@app.callback(Output("div-1", "children"), [Input("root", "n_clicks")])
def updateDiv(children):
with lock:
return "content"
with lock:
dash_dcc.start_server(app)
dash_dcc.find_element(".loading .dash-spinner")
# ensure inner component is also mounted
dash_dcc.wait_for_text_to_equal("#div-1", "")
dash_dcc.wait_for_text_to_equal("#div-1", "content")
assert not dash_dcc.get_logs()
def test_ldcp002_loading_component_action(dash_dcc):
lock = Lock()
app = dash.Dash(__name__)
app.layout = html.Div(
[dcc.Loading([html.Div(id="div-1")], className="loading")], id="root"
)
@app.callback(Output("div-1", "children"), [Input("root", "n_clicks")])
def updateDiv(n_clicks):
if n_clicks is not None:
with lock:
return "changed"
return "content"
with lock:
dash_dcc.start_server(app)
dash_dcc.wait_for_text_to_equal("#div-1", "content")
dash_dcc.find_element("#root").click()
dash_dcc.find_element(".loading .dash-spinner")
# mounted but hidden, so looks like no text
dash_dcc.wait_for_text_to_equal("#div-1", "")
dash_dcc.wait_for_text_to_equal("#div-1", "changed")
assert not dash_dcc.get_logs()
def test_ldcp003_multiple_loading_components(dash_dcc):
lock = Lock()
app = dash.Dash(__name__)
app.layout = html.Div(
[
dcc.Loading([html.Button(id="btn-1")], className="loading-1"),
dcc.Loading([html.Button(id="btn-2")], className="loading-2"),
],
id="root",
)
@app.callback(Output("btn-1", "children"), [Input("btn-2", "n_clicks")])
def updateDiv(n_clicks):
if n_clicks is not None:
with lock:
return "changed 1"
return "content 1"
@app.callback(Output("btn-2", "children"), [Input("btn-1", "n_clicks")])
def updateDiv(n_clicks):
if n_clicks is not None:
with lock:
return "changed 2"
return "content 2"
dash_dcc.start_server(app)
dash_dcc.wait_for_text_to_equal("#btn-1", "content 1")
dash_dcc.wait_for_text_to_equal("#btn-2", "content 2")
with lock:
dash_dcc.find_element("#btn-1").click()
dash_dcc.find_element(".loading-2 .dash-spinner")
dash_dcc.wait_for_text_to_equal("#btn-2", "")
dash_dcc.wait_for_text_to_equal("#btn-2", "changed 2")
with lock:
dash_dcc.find_element("#btn-2").click()
dash_dcc.find_element(".loading-1 .dash-spinner")
dash_dcc.wait_for_text_to_equal("#btn-1", "")
dash_dcc.wait_for_text_to_equal("#btn-1", "changed 1")
assert not dash_dcc.get_logs()
def test_ldcp004_nested_loading_components(dash_dcc):
lock = Lock()
app = dash.Dash(__name__)
app.layout = html.Div(
[
dcc.Loading(
[
html.Button(id="btn-1"),
dcc.Loading([html.Button(id="btn-2")], className="loading-2"),
],
className="loading-1",
)
],
id="root",
)
@app.callback(Output("btn-1", "children"), [Input("btn-2", "n_clicks")])
def updateDiv(n_clicks):
if n_clicks is not None:
with lock:
return "changed 1"
return "content 1"
@app.callback(Output("btn-2", "children"), [Input("btn-1", "n_clicks")])
def updateDiv(n_clicks):
if n_clicks is not None:
with lock:
return "changed 2"
return "content 2"
dash_dcc.start_server(app)
dash_dcc.wait_for_text_to_equal("#btn-1", "content 1")
dash_dcc.wait_for_text_to_equal("#btn-2", "content 2")
with lock:
dash_dcc.find_element("#btn-1").click()
dash_dcc.find_element(".loading-2 .dash-spinner")
dash_dcc.wait_for_text_to_equal("#btn-2", "")
dash_dcc.wait_for_text_to_equal("#btn-2", "changed 2")
with lock:
dash_dcc.find_element("#btn-2").click()
dash_dcc.find_element(".loading-1 .dash-spinner")
dash_dcc.wait_for_text_to_equal("#btn-1", "")
dash_dcc.wait_for_text_to_equal("#btn-1", "changed 1")
assert not dash_dcc.get_logs()
def test_ldcp005_dynamic_loading_component(dash_dcc):
lock = Lock()
app = dash.Dash(__name__, suppress_callback_exceptions=True)
app.layout = html.Div([html.Button(id="btn-1"), html.Div(id="div-1")])
@app.callback(Output("div-1", "children"), [Input("btn-1", "n_clicks")])
def updateDiv(n_clicks):
if n_clicks is None:
return
with lock:
return html.Div(
[
html.Button(id="btn-2"),
dcc.Loading([html.Button(id="btn-3")], className="loading-1"),
]
)
@app.callback(Output("btn-3", "children"), [Input("btn-2", "n_clicks")])
def updateDynamic(n_clicks):
if n_clicks is None:
return "content"
with lock:
return "changed"
dash_dcc.start_server(app)
dash_dcc.find_element("#btn-1")
dash_dcc.wait_for_text_to_equal("#div-1", "")
dash_dcc.find_element("#btn-1").click()
dash_dcc.find_element("#div-1 #btn-2")
dash_dcc.wait_for_text_to_equal("#btn-3", "content")
with lock:
dash_dcc.find_element("#btn-2").click()
dash_dcc.find_element(".loading-1 .dash-spinner")
dash_dcc.wait_for_text_to_equal("#btn-3", "")
dash_dcc.wait_for_text_to_equal("#btn-3", "changed")
assert not dash_dcc.get_logs()
def test_ldcp006_children_identity(dash_dcc):
lock = Lock()
app = dash.Dash(__name__)
app.layout = html.Div(
[
html.Button("click", id="btn"),
dcc.Loading(dcc.Graph(id="graph"), className="loading"),
]
)
@app.callback(Output("graph", "figure"), [Input("btn", "n_clicks")])
def update_graph(n):
with lock:
bars = list(range(2, (n or 0) + 5))
return {
"data": [{"type": "bar", "x": bars, "y": bars}],
"layout": {"width": 400, "height": 400},
}
def get_graph_visibility():
return dash_dcc.driver.execute_script(
"var gd_ = document.querySelector('.js-plotly-plot');"
"return getComputedStyle(gd_).visibility;"
)
with lock:
dash_dcc.start_server(app)
dash_dcc.find_element(".loading .dash-spinner")
dash_dcc.find_element("#graph .js-plotly-plot")
dash_dcc.driver.execute_script(
"window.gd = document.querySelector('.js-plotly-plot');"
"window.gd.__test__ = 'boo';"
)
assert get_graph_visibility() == "hidden"
test_identity = (
"var gd_ = document.querySelector('.js-plotly-plot');"
"return gd_ === window.gd && gd_.__test__ === 'boo';"
)
assert len(dash_dcc.find_elements(".js-plotly-plot .bars path")) == 3
assert dash_dcc.driver.execute_script(test_identity)
assert get_graph_visibility() == "visible"
with lock:
dash_dcc.find_element("#btn").click()
dash_dcc.find_element(".loading .dash-spinner")
assert len(dash_dcc.find_elements(".js-plotly-plot .bars path")) == 3
assert dash_dcc.driver.execute_script(test_identity)
assert get_graph_visibility() == "hidden"
assert len(dash_dcc.find_elements(".js-plotly-plot .bars path")) == 4
assert dash_dcc.driver.execute_script(test_identity)
assert get_graph_visibility() == "visible"
| 28.537367
| 82
| 0.599701
| 1,065
| 8,019
| 4.240376
| 0.110798
| 0.102303
| 0.056023
| 0.065102
| 0.809123
| 0.780115
| 0.752879
| 0.735828
| 0.676484
| 0.643933
| 0
| 0.018288
| 0.249906
| 8,019
| 280
| 83
| 28.639286
| 0.732502
| 0.009976
| 0
| 0.651515
| 0
| 0
| 0.178553
| 0.020035
| 0
| 0
| 0
| 0
| 0.075758
| 1
| 0.080808
| false
| 0
| 0.025253
| 0.005051
| 0.191919
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
905fb6d02acdb52f8e03d76c7891f3630034af6d
| 88
|
py
|
Python
|
code/default/python27/1.0/lib/noarch/front_base/host_manager.py
|
wuyongwen/XX-Net
|
313aefd862b8f230f7c61dc29db1b2b93a17e6ab
|
[
"BSD-2-Clause"
] | null | null | null |
code/default/python27/1.0/lib/noarch/front_base/host_manager.py
|
wuyongwen/XX-Net
|
313aefd862b8f230f7c61dc29db1b2b93a17e6ab
|
[
"BSD-2-Clause"
] | null | null | null |
code/default/python27/1.0/lib/noarch/front_base/host_manager.py
|
wuyongwen/XX-Net
|
313aefd862b8f230f7c61dc29db1b2b93a17e6ab
|
[
"BSD-2-Clause"
] | null | null | null |
class HostManagerBase(object):
def get_sni_host(self, ip):
return "", ""
| 12.571429
| 31
| 0.602273
| 10
| 88
| 5.1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.261364
| 88
| 6
| 32
| 14.666667
| 0.784615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
90983149a6e0eb9b57fd9a75d0a487b1fcd45c54
| 140
|
py
|
Python
|
client/asot_client/__init__.py
|
lun-4/asot
|
24d556af9695f7ac2f059bc7776fc59945a7ec0f
|
[
"BSD-3-Clause"
] | 1
|
2021-08-01T21:20:52.000Z
|
2021-08-01T21:20:52.000Z
|
client/asot_client/__init__.py
|
lun-4/asot
|
24d556af9695f7ac2f059bc7776fc59945a7ec0f
|
[
"BSD-3-Clause"
] | null | null | null |
client/asot_client/__init__.py
|
lun-4/asot
|
24d556af9695f7ac2f059bc7776fc59945a7ec0f
|
[
"BSD-3-Clause"
] | null | null | null |
# asot: Localhost tunneling
# Copyright 2021, Luna and asot contributors
# SPDX-License-Identifier: BSD-3-Clause
from .cli import main_cli
| 23.333333
| 44
| 0.785714
| 20
| 140
| 5.45
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041322
| 0.135714
| 140
| 5
| 45
| 28
| 0.859504
| 0.757143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
90caae54bed6781fd730bfab97ec5def8c409688
| 42,783
|
py
|
Python
|
vspk/v4_0/nulink.py
|
mohaimenhasan/vspk-python
|
4c7b297427048340b250cc3c74d9214dc0d4bde1
|
[
"BSD-3-Clause"
] | null | null | null |
vspk/v4_0/nulink.py
|
mohaimenhasan/vspk-python
|
4c7b297427048340b250cc3c74d9214dc0d4bde1
|
[
"BSD-3-Clause"
] | null | null | null |
vspk/v4_0/nulink.py
|
mohaimenhasan/vspk-python
|
4c7b297427048340b250cc3c74d9214dc0d4bde1
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUDemarcationServicesFetcher
from .fetchers import NUMetadatasFetcher
from .fetchers import NUNextHopAddressFetcher
from .fetchers import NUGlobalMetadatasFetcher
from .fetchers import NUOverlayAddressPoolsFetcher
from bambou import NURESTObject
class NULink(NURESTObject):
""" Represents a Link in the VSD
Notes:
This object represents the link between a source and destination domain in service chaining
"""
__rest_name__ = "link"
__resource_name__ = "links"
## Constants
CONST_ASSOCIATED_DESTINATION_TYPE_AVATAR = "AVATAR"
CONST_ASSOCIATED_DESTINATION_TYPE_KEYSERVER_NOTIFICATION = "KEYSERVER_NOTIFICATION"
CONST_ASSOCIATED_DESTINATION_TYPE_MONITORING_PORT = "MONITORING_PORT"
CONST_ASSOCIATED_DESTINATION_TYPE_STATIC_ROUTE = "STATIC_ROUTE"
CONST_ASSOCIATED_DESTINATION_TYPE_METADATA_TAG = "METADATA_TAG"
CONST_ASSOCIATED_DESTINATION_TYPE_VCENTER_FETCH_DATACENTERS = "VCENTER_FETCH_DATACENTERS"
CONST_ASSOCIATED_DESTINATION_TYPE_INGRESS_ACL_TEMPLATE_ENTRY = "INGRESS_ACL_TEMPLATE_ENTRY"
CONST_ASSOCIATED_DESTINATION_TYPE_KEYSERVER_MONITOR = "KEYSERVER_MONITOR"
CONST_ASSOCIATED_DESTINATION_TYPE_EVPN_BGP_COMMUNITY_TAG_SEQ_NO = "EVPN_BGP_COMMUNITY_TAG_SEQ_NO"
CONST_ASSOCIATED_DESTINATION_TYPE_PERMITTED_ACTION = "PERMITTED_ACTION"
CONST_ASSOCIATED_DESTINATION_TYPE_INGRESS_ACL_TEMPLATE = "INGRESS_ACL_TEMPLATE"
CONST_ASSOCIATED_DESTINATION_TYPE_DOMAIN = "DOMAIN"
CONST_ASSOCIATED_DESTINATION_TYPE_VPORT_GATEWAY_RESPONSE = "VPORT_GATEWAY_RESPONSE"
CONST_ASSOCIATED_DESTINATION_TYPE_USER = "USER"
CONST_ASSOCIATED_DESTINATION_TYPE_NSGATEWAY_CONFIG = "NSGATEWAY_CONFIG"
CONST_ASSOCIATED_DESTINATION_TYPE_NEXT_HOP = "NEXT_HOP"
CONST_ASSOCIATED_DESTINATION_TYPE_INGRESS_ACL_ENTRY = "INGRESS_ACL_ENTRY"
CONST_ASSOCIATED_DESTINATION_TYPE_VPORT_MEDIATION_REQUEST = "VPORT_MEDIATION_REQUEST"
CONST_ASSOCIATED_DESTINATION_TYPE_INGRESS_EXT_SERVICE = "INGRESS_EXT_SERVICE"
CONST_ASSOCIATED_DESTINATION_TYPE_STATS_POLICY = "STATS_POLICY"
CONST_ASSOCIATED_DESTINATION_TYPE_GATEWAY_CONFIG_RESP = "GATEWAY_CONFIG_RESP"
CONST_ASSOCIATED_DESTINATION_TYPE_VSG_REDUNDANT_PORT = "VSG_REDUNDANT_PORT"
CONST_ASSOCIATED_DESTINATION_TYPE_IP_BINDING = "IP_BINDING"
CONST_ASSOCIATED_DESTINATION_TYPE_POLICY_GROUP = "POLICY_GROUP"
CONST_ASSOCIATED_DESTINATION_TYPE_POLICING_POLICY = "POLICING_POLICY"
CONST_ASSOCIATED_DESTINATION_TYPE_RTRD_SEQUENCENO = "RTRD_SEQUENCENO"
CONST_ASSOCIATED_DESTINATION_TYPE_EVPN_BGP_COMMUNITY_TAG_ENTRY = "EVPN_BGP_COMMUNITY_TAG_ENTRY"
CONST_ASSOCIATED_DESTINATION_TYPE_JOB = "JOB"
CONST_ASSOCIATED_DESTINATION_TYPE_KEYSERVER_MONITOR_ENCRYPTED_SEED = "KEYSERVER_MONITOR_ENCRYPTED_SEED"
CONST_ASSOCIATED_DESTINATION_TYPE_VMWARE_VCENTER_CLUSTER = "VMWARE_VCENTER_CLUSTER"
CONST_ASSOCIATED_DESTINATION_TYPE_CONTAINER_INTERFACE = "CONTAINER_INTERFACE"
CONST_ASSOCIATED_DESTINATION_TYPE_FLOATINGIP = "FLOATINGIP"
CONST_ASSOCIATED_DESTINATION_TYPE_INGRESS_ADV_FWD = "INGRESS_ADV_FWD"
CONST_ASSOCIATED_DESTINATION_TYPE_MULTI_NIC_VPORT = "MULTI_NIC_VPORT"
CONST_ASSOCIATED_DESTINATION_TYPE_BACK_HAUL_SERVICE_RESP = "BACK_HAUL_SERVICE_RESP"
CONST_ASSOCIATED_DESTINATION_TYPE_PORT_TEMPLATE = "PORT_TEMPLATE"
CONST_ASSOCIATED_DESTINATION_TYPE_EGRESS_ACL = "EGRESS_ACL"
CONST_ASSOCIATED_DESTINATION_TYPE_INFRASTRUCTURE_VSC_PROFILE = "INFRASTRUCTURE_VSC_PROFILE"
CONST_ASSOCIATED_DESTINATION_TYPE_HEALTH_REQ = "HEALTH_REQ"
CONST_ASSOCIATED_DESTINATION_TYPE_HOSTINTERFACE = "HOSTINTERFACE"
CONST_ASSOCIATED_DESTINATION_TYPE_ROUTING_POL_MED_RESPONSE = "ROUTING_POL_MED_RESPONSE"
CONST_ASSOCIATED_DESTINATION_TYPE_CHILD_ENTITY_POLICY_CHANGE = "CHILD_ENTITY_POLICY_CHANGE"
CONST_ASSOCIATED_DESTINATION_TYPE_INGRESS_ACL = "INGRESS_ACL"
CONST_ASSOCIATED_DESTINATION_TYPE_SHARED_RESOURCE = "SHARED_RESOURCE"
CONST_ASSOCIATED_DESTINATION_TYPE_SITE_RES = "SITE_RES"
CONST_ACCEPTANCE_CRITERIA_SUBNETS_ONLY = "SUBNETS_ONLY"
CONST_ASSOCIATED_DESTINATION_TYPE_INGRESS_ADV_FWD_ENTRY = "INGRESS_ADV_FWD_ENTRY"
CONST_ASSOCIATED_DESTINATION_TYPE_VM_RESYNC = "VM_RESYNC"
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_ASSOCIATED_DESTINATION_TYPE_STATIC_ROUTE_RESP = "STATIC_ROUTE_RESP"
CONST_ASSOCIATED_DESTINATION_TYPE_EGRESS_QOS_MR = "EGRESS_QOS_MR"
CONST_ASSOCIATED_DESTINATION_TYPE_IKE_GATEWAY_CONNECTION = "IKE_GATEWAY_CONNECTION"
CONST_ASSOCIATED_DESTINATION_TYPE_CUSTOMER_VRF_SEQUENCENO = "CUSTOMER_VRF_SEQUENCENO"
CONST_ASSOCIATED_DESTINATION_TYPE_PATNATPOOL = "PATNATPOOL"
CONST_ASSOCIATED_DESTINATION_TYPE_PUBLIC_NETWORK = "PUBLIC_NETWORK"
CONST_ASSOCIATED_DESTINATION_TYPE_VPORTTAG = "VPORTTAG"
CONST_ASSOCIATED_DESTINATION_TYPE_NSPORT = "NSPORT"
CONST_ASSOCIATED_DESTINATION_TYPE_NSGATEWAY = "NSGATEWAY"
CONST_ASSOCIATED_DESTINATION_TYPE_REDUNDANT_GW_GRP = "REDUNDANT_GW_GRP"
CONST_ASSOCIATED_DESTINATION_TYPE_IKE_PSK = "IKE_PSK"
CONST_ASSOCIATED_DESTINATION_TYPE_GROUP = "GROUP"
CONST_ASSOCIATED_DESTINATION_TYPE_APPD_FLOW_FORWARDING_POLICY = "APPD_FLOW_FORWARDING_POLICY"
CONST_ASSOCIATED_DESTINATION_TYPE_DISKSTATS = "DISKSTATS"
CONST_ASSOCIATED_DESTINATION_TYPE_GATEWAY_SERVICE_CONFIG_RESP = "GATEWAY_SERVICE_CONFIG_RESP"
CONST_ASSOCIATED_DESTINATION_TYPE_NSGATEWAY_TEMPLATE = "NSGATEWAY_TEMPLATE"
CONST_ASSOCIATED_DESTINATION_TYPE_GATEWAY_STATE = "GATEWAY_STATE"
CONST_ASSOCIATED_DESTINATION_TYPE_INFRASTRUCTURE_GATEWAY_PROFILE = "INFRASTRUCTURE_GATEWAY_PROFILE"
CONST_ASSOCIATED_DESTINATION_TYPE_APPD_FLOW = "APPD_FLOW"
CONST_ASSOCIATED_DESTINATION_TYPE_SERVICE_GATEWAY_RESPONSE = "SERVICE_GATEWAY_RESPONSE"
CONST_ASSOCIATED_DESTINATION_TYPE_UNSUPPORTED = "UNSUPPORTED"
CONST_ASSOCIATED_DESTINATION_TYPE_GATEWAY_SECURITY = "GATEWAY_SECURITY"
CONST_ASSOCIATED_DESTINATION_TYPE_NEXT_HOP_RESP = "NEXT_HOP_RESP"
CONST_ASSOCIATED_DESTINATION_TYPE_SUBNET_TEMPLATE = "SUBNET_TEMPLATE"
CONST_ASSOCIATED_DESTINATION_TYPE_ACLENTRY_LOCATION = "ACLENTRY_LOCATION"
CONST_ASSOCIATED_DESTINATION_TYPE_ENTITY_METADATA_BINDING = "ENTITY_METADATA_BINDING"
CONST_ASSOCIATED_DESTINATION_TYPE_IKE_GATEWAY_CONFIG = "IKE_GATEWAY_CONFIG"
CONST_ASSOCIATED_DESTINATION_TYPE_VSP = "VSP"
CONST_ASSOCIATED_DESTINATION_TYPE_ZFB_REQUEST = "ZFB_REQUEST"
CONST_ASSOCIATED_DESTINATION_TYPE_VM_INTERFACE = "VM_INTERFACE"
CONST_ASSOCIATED_DESTINATION_TYPE_INFRASTRUCTURE_PORT_PROFILE = "INFRASTRUCTURE_PORT_PROFILE"
CONST_ASSOCIATED_DESTINATION_TYPE_PORT = "PORT"
CONST_ASSOCIATED_DESTINATION_TYPE_KEYSERVER_MONITOR_SEED = "KEYSERVER_MONITOR_SEED"
CONST_ASSOCIATED_DESTINATION_TYPE_QOS_PRIMITIVE = "QOS_PRIMITIVE"
CONST_ASSOCIATED_DESTINATION_TYPE_SYSTEM_CONFIG = "SYSTEM_CONFIG"
CONST_ASSOCIATED_DESTINATION_TYPE_FLOATINGIP_ACL_ENTRY = "FLOATINGIP_ACL_ENTRY"
CONST_TYPE_HUB_AND_SPOKE = "HUB_AND_SPOKE"
CONST_ASSOCIATED_DESTINATION_TYPE_MACRO_GROUP_MED = "MACRO_GROUP_MED"
CONST_ASSOCIATED_DESTINATION_TYPE_NSG_NOTIFICATION = "NSG_NOTIFICATION"
CONST_ASSOCIATED_DESTINATION_TYPE_LICENSE = "LICENSE"
CONST_ASSOCIATED_DESTINATION_TYPE_PATMAPPER = "PATMAPPER"
CONST_ASSOCIATED_DESTINATION_TYPE_KEYSERVER_MEMBER = "KEYSERVER_MEMBER"
CONST_ASSOCIATED_DESTINATION_TYPE_VLAN_CONFIG_RESPONSE = "VLAN_CONFIG_RESPONSE"
CONST_ASSOCIATED_DESTINATION_TYPE_GATEWAY_CONFIG = "GATEWAY_CONFIG"
CONST_ASSOCIATED_DESTINATION_TYPE_VM_DESCRIPTION = "VM_DESCRIPTION"
CONST_ASSOCIATED_DESTINATION_TYPE_SYSTEM_MONITORING = "SYSTEM_MONITORING"
CONST_ASSOCIATED_DESTINATION_TYPE_IKE_ENCRYPTION_PROFILE_REQUEST = "IKE_ENCRYPTION_PROFILE_REQUEST"
CONST_ASSOCIATED_DESTINATION_TYPE_LINK = "LINK"
CONST_ASSOCIATED_DESTINATION_TYPE_EXPORTIMPORT = "EXPORTIMPORT"
CONST_ASSOCIATED_DESTINATION_TYPE_GATEWAY_VPORT_CONFIG = "GATEWAY_VPORT_CONFIG"
CONST_ASSOCIATED_DESTINATION_TYPE_POLICY_GROUP_TEMPLATE = "POLICY_GROUP_TEMPLATE"
CONST_ASSOCIATED_DESTINATION_TYPE_RATE_LIMITER = "RATE_LIMITER"
CONST_ASSOCIATED_DESTINATION_TYPE_PORT_VLAN_CONFIG_RESPONSE = "PORT_VLAN_CONFIG_RESPONSE"
CONST_ASSOCIATED_DESTINATION_TYPE_VMWARE_VCENTER_VRS_CONFIG = "VMWARE_VCENTER_VRS_CONFIG"
CONST_ASSOCIATED_DESTINATION_TYPE_CONTAINER = "CONTAINER"
CONST_ASSOCIATED_DESTINATION_TYPE_SUBNET = "SUBNET"
CONST_ASSOCIATED_DESTINATION_TYPE_PERMISSION = "PERMISSION"
CONST_ASSOCIATED_DESTINATION_TYPE_VIRTUAL_IP = "VIRTUAL_IP"
CONST_ASSOCIATED_DESTINATION_TYPE_NSG_INFO = "NSG_INFO"
CONST_ASSOCIATED_DESTINATION_TYPE_BOOTSTRAP_ACTIVATION = "BOOTSTRAP_ACTIVATION"
CONST_ASSOCIATED_DESTINATION_TYPE_GATEWAY_VPORT_CONFIG_RESP = "GATEWAY_VPORT_CONFIG_RESP"
CONST_ASSOCIATED_DESTINATION_TYPE_FLOATINGIP_ACL = "FLOATINGIP_ACL"
CONST_ASSOCIATED_DESTINATION_TYPE_ZFB_AUTO_ASSIGNMENT = "ZFB_AUTO_ASSIGNMENT"
CONST_ASSOCIATED_DESTINATION_TYPE_ZONE = "ZONE"
CONST_ASSOCIATED_DESTINATION_TYPE_INFRASTRUCTURE_CONFIG = "INFRASTRUCTURE_CONFIG"
CONST_ASSOCIATED_DESTINATION_TYPE_VMWARE_VCENTER_EAM_CONFIG = "VMWARE_VCENTER_EAM_CONFIG"
CONST_ASSOCIATED_DESTINATION_TYPE_POLICY_DECISION = "POLICY_DECISION"
CONST_ASSOCIATED_DESTINATION_TYPE_SERVICES_GATEWAY_RESPONSE = "SERVICES_GATEWAY_RESPONSE"
CONST_ASSOCIATED_DESTINATION_TYPE_VLAN = "VLAN"
CONST_ASSOCIATED_DESTINATION_TYPE_VMWARE_REMOVE_CLUSTER_INSCOPE = "VMWARE_REMOVE_CLUSTER_INSCOPE"
CONST_ASSOCIATED_DESTINATION_TYPE_ADDRESS_RANGE_STATE = "ADDRESS_RANGE_STATE"
CONST_ASSOCIATED_DESTINATION_TYPE_SYSTEM_CONFIG_REQ = "SYSTEM_CONFIG_REQ"
CONST_ASSOCIATED_DESTINATION_TYPE_SUBNET_ENTRY = "SUBNET_ENTRY"
CONST_ASSOCIATED_DESTINATION_TYPE_VCENTER_FETCH_CLUSTERS = "VCENTER_FETCH_CLUSTERS"
CONST_ASSOCIATED_DESTINATION_TYPE_LIBVIRT_INTERFACE = "LIBVIRT_INTERFACE"
CONST_ASSOCIATED_DESTINATION_TYPE_GATEWAY_SECURED_DATA = "GATEWAY_SECURED_DATA"
CONST_ASSOCIATED_DESTINATION_TYPE_BGP_NEIGHBOR = "BGP_NEIGHBOR"
CONST_ASSOCIATED_DESTINATION_TYPE_VMWARE_VCENTER_DATACENTER = "VMWARE_VCENTER_DATACENTER"
CONST_ASSOCIATED_DESTINATION_TYPE_FETCH_HYPERVISOR_PROPERTIES = "FETCH_HYPERVISOR_PROPERTIES"
CONST_TYPE_OVERLAY_ADDRESS_TRANSLATION = "OVERLAY_ADDRESS_TRANSLATION"
CONST_ASSOCIATED_DESTINATION_TYPE_EGRESS_QOS_PRIMITIVE = "EGRESS_QOS_PRIMITIVE"
CONST_ASSOCIATED_DESTINATION_TYPE_ESI_SEQUENCENO = "ESI_SEQUENCENO"
CONST_ASSOCIATED_DESTINATION_TYPE_INGRESS_ADV_FWD_TEMPLATE = "INGRESS_ADV_FWD_TEMPLATE"
CONST_ASSOCIATED_DESTINATION_TYPE_IKE_GATEWAY_PROFILE = "IKE_GATEWAY_PROFILE"
CONST_ASSOCIATED_DESTINATION_TYPE_IKE_CERTIFICATE = "IKE_CERTIFICATE"
CONST_ASSOCIATED_DESTINATION_TYPE_VMWARE_VRS_REDEPLOYMENT_POLICY = "VMWARE_VRS_REDEPLOYMENT_POLICY"
CONST_ASSOCIATED_DESTINATION_TYPE_INGRESS_EXT_SERVICE_TEMPLATE_ENTRY = "INGRESS_EXT_SERVICE_TEMPLATE_ENTRY"
CONST_ASSOCIATED_DESTINATION_TYPE_BGP_NEIGHBOR_MED_RESPONSE = "BGP_NEIGHBOR_MED_RESPONSE"
CONST_ASSOCIATED_DESTINATION_TYPE_NSPORT_STATIC_CONFIG = "NSPORT_STATIC_CONFIG"
CONST_ASSOCIATED_DESTINATION_TYPE_BGP_PROFILE = "BGP_PROFILE"
CONST_ASSOCIATED_DESTINATION_TYPE_ENTERPRISE_SECURED_DATA = "ENTERPRISE_SECURED_DATA"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
CONST_ASSOCIATED_DESTINATION_TYPE_L2DOMAIN_TEMPLATE = "L2DOMAIN_TEMPLATE"
CONST_ASSOCIATED_DESTINATION_TYPE_APPD_TIER = "APPD_TIER"
CONST_ASSOCIATED_DESTINATION_TYPE_GEO_VM_EVENT = "GEO_VM_EVENT"
CONST_ASSOCIATED_DESTINATION_TYPE_ENTERPRISE_SECURITY = "ENTERPRISE_SECURITY"
CONST_ASSOCIATED_DESTINATION_TYPE_DSCP_FORWARDING_CLASS_TABLE = "DSCP_FORWARDING_CLASS_TABLE"
CONST_ASSOCIATED_DESTINATION_TYPE_GATEWAY_TEMPLATE = "GATEWAY_TEMPLATE"
CONST_ASSOCIATED_DESTINATION_TYPE_VRS = "VRS"
CONST_ASSOCIATED_DESTINATION_TYPE_EGRESS_ACL_ENTRY = "EGRESS_ACL_ENTRY"
CONST_ASSOCIATED_DESTINATION_TYPE_VPORT = "VPORT"
CONST_ASSOCIATED_DESTINATION_TYPE_GROUPKEY_ENCRYPTION_PROFILE = "GROUPKEY_ENCRYPTION_PROFILE"
CONST_ASSOCIATED_DESTINATION_TYPE_APPD_APPLICATION = "APPD_APPLICATION"
CONST_ASSOCIATED_DESTINATION_TYPE_IKE_ENCRYPTION_PROFILE = "IKE_ENCRYPTION_PROFILE"
CONST_ASSOCIATED_DESTINATION_TYPE_RTRD_ENTITY = "RTRD_ENTITY"
CONST_ASSOCIATED_DESTINATION_TYPE_ZFB_GLOBAL = "ZFB_GLOBAL"
CONST_ASSOCIATED_DESTINATION_TYPE_RD_SEQUENCENO = "RD_SEQUENCENO"
CONST_ASSOCIATED_DESTINATION_TYPE_GATEWAY = "GATEWAY"
CONST_ASSOCIATED_DESTINATION_TYPE_DOMAIN_FLOATING_IP_ACL_TEMPLATE = "DOMAIN_FLOATING_IP_ACL_TEMPLATE"
CONST_ASSOCIATED_DESTINATION_TYPE_VMWARE_ADD_CLUSTER_INSCOPE = "VMWARE_ADD_CLUSTER_INSCOPE"
CONST_ASSOCIATED_DESTINATION_TYPE_VPORTTAGTEMPLATE = "VPORTTAGTEMPLATE"
CONST_ASSOCIATED_DESTINATION_TYPE_SITE = "SITE"
CONST_ASSOCIATED_DESTINATION_TYPE_GATEWAY_SECURITY_RESPONSE = "GATEWAY_SECURITY_RESPONSE"
CONST_ASSOCIATED_DESTINATION_TYPE_WAN_SERVICE = "WAN_SERVICE"
CONST_ASSOCIATED_DESTINATION_TYPE_GATEWAY_SECURITY_REQUEST = "GATEWAY_SECURITY_REQUEST"
CONST_ASSOCIATED_DESTINATION_TYPE_FLOATING_IP_ACL_TEMPLATE = "FLOATING_IP_ACL_TEMPLATE"
CONST_ASSOCIATED_DESTINATION_TYPE_APPD_FLOW_SECURITY_POLICY = "APPD_FLOW_SECURITY_POLICY"
CONST_TYPE_SERVICE_CHAINING = "SERVICE_CHAINING"
CONST_ASSOCIATED_DESTINATION_TYPE_BGPPEER = "BGPPEER"
CONST_ASSOCIATED_DESTINATION_TYPE_NSPORT_VLAN_CONFIG = "NSPORT_VLAN_CONFIG"
CONST_ASSOCIATED_DESTINATION_TYPE_SYSTEM_CONFIG_RESP = "SYSTEM_CONFIG_RESP"
CONST_ASSOCIATED_DESTINATION_TYPE_IKE_SUBNET = "IKE_SUBNET"
CONST_ASSOCIATED_DESTINATION_TYPE_LDAP_CONFIG = "LDAP_CONFIG"
CONST_ASSOCIATED_DESTINATION_TYPE_VPORT_TAG_BASE = "VPORT_TAG_BASE"
CONST_ASSOCIATED_DESTINATION_TYPE_MC_LIST = "MC_LIST"
CONST_ASSOCIATED_DESTINATION_TYPE_ENTERPRISE_CONFIG = "ENTERPRISE_CONFIG"
CONST_ASSOCIATED_DESTINATION_TYPE_DOMAIN_CONFIG_RESP = "DOMAIN_CONFIG_RESP"
CONST_ASSOCIATED_DESTINATION_TYPE_NODE_EXECUTION_ERROR = "NODE_EXECUTION_ERROR"
CONST_ASSOCIATED_DESTINATION_TYPE_STATSSERVER = "STATSSERVER"
CONST_ASSOCIATED_DESTINATION_TYPE_ALARM = "ALARM"
CONST_ASSOCIATED_DESTINATION_TYPE_NETWORK_LAYOUT = "NETWORK_LAYOUT"
CONST_ASSOCIATED_DESTINATION_TYPE_EVENT_LOG = "EVENT_LOG"
CONST_ASSOCIATED_DESTINATION_TYPE_APPLICATION = "APPLICATION"
CONST_ASSOCIATED_DESTINATION_TYPE_NETWORK_ELEMENT = "NETWORK_ELEMENT"
CONST_ASSOCIATED_DESTINATION_TYPE_VSD_COMPONENT = "VSD_COMPONENT"
CONST_ASSOCIATED_DESTINATION_TYPE_ZONE_TEMPLATE = "ZONE_TEMPLATE"
CONST_ASSOCIATED_DESTINATION_TYPE_APPD_SERVICE = "APPD_SERVICE"
CONST_ASSOCIATED_DESTINATION_TYPE_DSCP_FORWARDING_CLASS_MAPPING = "DSCP_FORWARDING_CLASS_MAPPING"
CONST_ASSOCIATED_DESTINATION_TYPE_PAT_IP_ENTRY = "PAT_IP_ENTRY"
CONST_ASSOCIATED_DESTINATION_TYPE_METADATA = "METADATA"
CONST_ASSOCIATED_DESTINATION_TYPE_DHCP_ALLOC_MESSAGE = "DHCP_ALLOC_MESSAGE"
CONST_ASSOCIATED_DESTINATION_TYPE_EGRESS_ACL_TEMPLATE = "EGRESS_ACL_TEMPLATE"
CONST_ASSOCIATED_DESTINATION_TYPE_AUTO_DISC_GATEWAY = "AUTO_DISC_GATEWAY"
CONST_ASSOCIATED_DESTINATION_TYPE_GEO_VM_REQ = "GEO_VM_REQ"
CONST_ASSOCIATED_DESTINATION_TYPE_GEO_VM_RES = "GEO_VM_RES"
CONST_ASSOCIATED_DESTINATION_TYPE_PORT_PUSH = "PORT_PUSH"
CONST_ASSOCIATED_DESTINATION_TYPE_VIRTUAL_MACHINE = "VIRTUAL_MACHINE"
CONST_ASSOCIATED_DESTINATION_TYPE_CONTAINER_RESYNC = "CONTAINER_RESYNC"
CONST_ASSOCIATED_DESTINATION_TYPE_BGP_PROFILE_MED_RESPONSE = "BGP_PROFILE_MED_RESPONSE"
CONST_ASSOCIATED_DESTINATION_TYPE_EAM_VRS_METRICS = "EAM_VRS_METRICS"
CONST_ASSOCIATED_DESTINATION_TYPE_VNID_SEQUENCENO = "VNID_SEQUENCENO"
CONST_ASSOCIATED_DESTINATION_TYPE_DHCP_OPTION = "DHCP_OPTION"
CONST_ASSOCIATED_DESTINATION_TYPE_PORT_RANGE_MED = "PORT_RANGE_MED"
CONST_ASSOCIATED_DESTINATION_TYPE_NSREDUNDANT_GW_GRP = "NSREDUNDANT_GW_GRP"
CONST_ASSOCIATED_DESTINATION_TYPE_ROUTING_POLICY = "ROUTING_POLICY"
CONST_ASSOCIATED_DESTINATION_TYPE_CERTIFICATE = "CERTIFICATE"
CONST_ASSOCIATED_DESTINATION_TYPE_GATEWAY_SECURITY_PROFILE_REQUEST = "GATEWAY_SECURITY_PROFILE_REQUEST"
CONST_ASSOCIATED_DESTINATION_TYPE_STATISTICS = "STATISTICS"
CONST_ASSOCIATED_DESTINATION_TYPE_INGRESS_ADV_FWD_TEMPLATE_ENTRY = "INGRESS_ADV_FWD_TEMPLATE_ENTRY"
CONST_ASSOCIATED_DESTINATION_TYPE_ENDPOINT = "ENDPOINT"
CONST_ASSOCIATED_DESTINATION_TYPE_ENTERPRISE_PERMISSION = "ENTERPRISE_PERMISSION"
CONST_ASSOCIATED_DESTINATION_TYPE_LINKED_DOMAIN_RESP = "LINKED_DOMAIN_RESP"
CONST_ASSOCIATED_DESTINATION_TYPE_PORTMAPPING = "PORTMAPPING"
CONST_ASSOCIATED_DESTINATION_TYPE_ENTERPRISE = "ENTERPRISE"
CONST_ASSOCIATED_DESTINATION_TYPE_VPORT_MIRROR = "VPORT_MIRROR"
CONST_ASSOCIATED_DESTINATION_TYPE_NSPORT_TEMPLATE = "NSPORT_TEMPLATE"
CONST_ASSOCIATED_DESTINATION_TYPE_HSC = "HSC"
CONST_ASSOCIATED_DESTINATION_TYPE_MIRROR_DESTINATION = "MIRROR_DESTINATION"
CONST_ASSOCIATED_DESTINATION_TYPE_DC_CONFIG = "DC_CONFIG"
CONST_ASSOCIATED_DESTINATION_TYPE_BOOTSTRAP = "BOOTSTRAP"
CONST_TYPE_BORDER_ROUTER = "BORDER_ROUTER"
CONST_ASSOCIATED_DESTINATION_TYPE_NETWORK_POLICY_GROUP = "NETWORK_POLICY_GROUP"
CONST_ASSOCIATED_DESTINATION_TYPE_VPRN_LABEL_SEQUENCENO = "VPRN_LABEL_SEQUENCENO"
CONST_ASSOCIATED_DESTINATION_TYPE_VPN_CONNECT = "VPN_CONNECT"
CONST_ASSOCIATED_DESTINATION_TYPE_UPLINK_RD = "UPLINK_RD"
CONST_ASSOCIATED_DESTINATION_TYPE_VLAN_TEMPLATE = "VLAN_TEMPLATE"
CONST_ASSOCIATED_DESTINATION_TYPE_PORT_MR = "PORT_MR"
CONST_ASSOCIATED_DESTINATION_TYPE_PATCONFIG_CONFIG_RESP = "PATCONFIG_CONFIG_RESP"
CONST_ASSOCIATED_DESTINATION_TYPE_NETWORK_MACRO_GROUP = "NETWORK_MACRO_GROUP"
CONST_ASSOCIATED_DESTINATION_TYPE_VMWARE_RELOAD_CONFIG = "VMWARE_RELOAD_CONFIG"
CONST_ASSOCIATED_DESTINATION_TYPE_MC_RANGE = "MC_RANGE"
CONST_ASSOCIATED_DESTINATION_TYPE_INGRESS_EXT_SERVICE_ENTRY = "INGRESS_EXT_SERVICE_ENTRY"
CONST_ASSOCIATED_DESTINATION_TYPE_DOMAIN_FLOATING_IP_ACL_TEMPLATE_ENTRY = "DOMAIN_FLOATING_IP_ACL_TEMPLATE_ENTRY"
CONST_ASSOCIATED_DESTINATION_TYPE_FLOATING_IP_ACL_TEMPLATE_ENTRY = "FLOATING_IP_ACL_TEMPLATE_ENTRY"
CONST_ASSOCIATED_DESTINATION_TYPE_APPD_EXTERNAL_APP_SERVICE = "APPD_EXTERNAL_APP_SERVICE"
CONST_ACCEPTANCE_CRITERIA_ALL = "ALL"
CONST_ASSOCIATED_DESTINATION_TYPE_GATEWAY_SERVICE_CONFIG = "GATEWAY_SERVICE_CONFIG"
CONST_ASSOCIATED_DESTINATION_TYPE_STATS_COLLECTOR = "STATS_COLLECTOR"
CONST_ASSOCIATED_DESTINATION_TYPE_L2DOMAIN_SHARED = "L2DOMAIN_SHARED"
CONST_ASSOCIATED_DESTINATION_TYPE_DOMAIN_CONFIG = "DOMAIN_CONFIG"
CONST_ASSOCIATED_DESTINATION_TYPE_PORT_VLAN_CONFIG = "PORT_VLAN_CONFIG"
CONST_ASSOCIATED_DESTINATION_TYPE_ADDRESS_RANGE = "ADDRESS_RANGE"
CONST_ASSOCIATED_DESTINATION_TYPE_BGP_DAMPENING_MED_RESPONSE = "BGP_DAMPENING_MED_RESPONSE"
CONST_ASSOCIATED_DESTINATION_TYPE_INGRESS_EXT_SERVICE_TEMPLATE = "INGRESS_EXT_SERVICE_TEMPLATE"
CONST_ASSOCIATED_DESTINATION_TYPE_LOCATION = "LOCATION"
CONST_ASSOCIATED_DESTINATION_TYPE_SITE_REQ = "SITE_REQ"
CONST_ASSOCIATED_DESTINATION_TYPE_STATS_TCA = "STATS_TCA"
CONST_ASSOCIATED_DESTINATION_TYPE_CONTAINER_DESCRIPTION = "CONTAINER_DESCRIPTION"
CONST_ASSOCIATED_DESTINATION_TYPE_SUBNET_MAC_ENTRY = "SUBNET_MAC_ENTRY"
CONST_ASSOCIATED_DESTINATION_TYPE_EGRESS_QOS_QUEUE_MR = "EGRESS_QOS_QUEUE_MR"
CONST_ASSOCIATED_DESTINATION_TYPE_NS_REDUNDANT_PORT = "NS_REDUNDANT_PORT"
CONST_ASSOCIATED_DESTINATION_TYPE_SERVICE_VRF_SEQUENCENO = "SERVICE_VRF_SEQUENCENO"
CONST_ASSOCIATED_DESTINATION_TYPE_ZFB_AUTO_ASSIGNMENT_VALUE = "ZFB_AUTO_ASSIGNMENT_VALUE"
CONST_ASSOCIATED_DESTINATION_TYPE_NATMAPENTRY = "NATMAPENTRY"
CONST_ASSOCIATED_DESTINATION_TYPE_LICENSE_STATUS = "LICENSE_STATUS"
CONST_ASSOCIATED_DESTINATION_TYPE_DHCP_CONFIG_RESP = "DHCP_CONFIG_RESP"
CONST_ASSOCIATED_DESTINATION_TYPE_VMWARE_VCENTER = "VMWARE_VCENTER"
CONST_ASSOCIATED_DESTINATION_TYPE_VMWARE_VCENTER_HYPERVISOR = "VMWARE_VCENTER_HYPERVISOR"
CONST_ASSOCIATED_DESTINATION_TYPE_VSD = "VSD"
CONST_ASSOCIATED_DESTINATION_TYPE_SHAPING_POLICY = "SHAPING_POLICY"
CONST_ASSOCIATED_DESTINATION_TYPE_BRIDGEINTERFACE = "BRIDGEINTERFACE"
CONST_ASSOCIATED_DESTINATION_TYPE_VSC = "VSC"
CONST_ASSOCIATED_DESTINATION_TYPE_ENTERPRISE_NETWORK = "ENTERPRISE_NETWORK"
CONST_ASSOCIATED_DESTINATION_TYPE_ENTERPRISE_PROFILE = "ENTERPRISE_PROFILE"
CONST_ASSOCIATED_DESTINATION_TYPE_BULKSTATISTICS = "BULKSTATISTICS"
CONST_ASSOCIATED_DESTINATION_TYPE_EXTERNAL_SERVICE = "EXTERNAL_SERVICE"
CONST_ASSOCIATED_DESTINATION_TYPE_KEYSERVER_MONITOR_SEK = "KEYSERVER_MONITOR_SEK"
CONST_ASSOCIATED_DESTINATION_TYPE_SUBNET_POOL_ENTRY = "SUBNET_POOL_ENTRY"
CONST_ASSOCIATED_DESTINATION_TYPE_IKE_GATEWAY_CONNECTION_REQUEST = "IKE_GATEWAY_CONNECTION_REQUEST"
CONST_ASSOCIATED_DESTINATION_TYPE_CLOUD_MGMT_SYSTEM = "CLOUD_MGMT_SYSTEM"
CONST_ASSOCIATED_DESTINATION_TYPE_GATEWAY_SECURITY_PROFILE_RESPONSE = "GATEWAY_SECURITY_PROFILE_RESPONSE"
CONST_ASSOCIATED_DESTINATION_TYPE_IKE_GATEWAY = "IKE_GATEWAY"
CONST_ASSOCIATED_DESTINATION_TYPE_VIRTUAL_MACHINE_REPORT = "VIRTUAL_MACHINE_REPORT"
CONST_ASSOCIATED_DESTINATION_TYPE_ENTERPRISE_CONFIG_RESP = "ENTERPRISE_CONFIG_RESP"
CONST_ASSOCIATED_DESTINATION_TYPE_DOMAIN_TEMPLATE = "DOMAIN_TEMPLATE"
CONST_ASSOCIATED_DESTINATION_TYPE_MC_CHANNEL_MAP = "MC_CHANNEL_MAP"
CONST_ASSOCIATED_DESTINATION_TYPE_VMWARE_VRS_ADDRESS_RANGE = "VMWARE_VRS_ADDRESS_RANGE"
CONST_ASSOCIATED_DESTINATION_TYPE_VMWARE_VCENTER_VRS_BASE_CONFIG = "VMWARE_VCENTER_VRS_BASE_CONFIG"
CONST_ASSOCIATED_DESTINATION_TYPE_EGRESS_ACL_TEMPLATE_ENTRY = "EGRESS_ACL_TEMPLATE_ENTRY"
CONST_ASSOCIATED_DESTINATION_TYPE_L2DOMAIN = "L2DOMAIN"
def __init__(self, **kwargs):
""" Initializes a Link instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> link = NULink(id=u'xxxx-xxx-xxx-xxx', name=u'Link')
>>> link = NULink(data=my_dict)
"""
super(NULink, self).__init__()
# Read/Write Attributes
self._last_updated_by = None
self._acceptance_criteria = None
self._read_only = None
self._entity_scope = None
self._associated_destination_id = None
self._associated_destination_name = None
self._associated_destination_type = None
self._associated_source_id = None
self._associated_source_name = None
self._associated_source_type = None
self._external_id = None
self._type = None
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="acceptance_criteria", remote_name="acceptanceCriteria", attribute_type=str, is_required=False, is_unique=False, choices=[u'ALL', u'SUBNETS_ONLY'])
self.expose_attribute(local_name="read_only", remote_name="readOnly", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="associated_destination_id", remote_name="associatedDestinationID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="associated_destination_name", remote_name="associatedDestinationName", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="associated_destination_type", remote_name="associatedDestinationType", attribute_type=str, is_required=False, is_unique=False, choices=[u'ACLENTRY_LOCATION', u'ADDRESS_RANGE', u'ADDRESS_RANGE_STATE', u'ALARM', u'APPD_APPLICATION', u'APPD_EXTERNAL_APP_SERVICE', u'APPD_FLOW', u'APPD_FLOW_FORWARDING_POLICY', u'APPD_FLOW_SECURITY_POLICY', u'APPD_SERVICE', u'APPD_TIER', u'APPLICATION', u'AUTO_DISC_GATEWAY', u'AVATAR', u'BACK_HAUL_SERVICE_RESP', u'BGP_DAMPENING_MED_RESPONSE', u'BGP_NEIGHBOR', u'BGP_NEIGHBOR_MED_RESPONSE', u'BGP_PROFILE', u'BGP_PROFILE_MED_RESPONSE', u'BGPPEER', u'BOOTSTRAP', u'BOOTSTRAP_ACTIVATION', u'BRIDGEINTERFACE', u'BULKSTATISTICS', u'CERTIFICATE', u'CHILD_ENTITY_POLICY_CHANGE', u'CLOUD_MGMT_SYSTEM', u'CONTAINER', u'CONTAINER_DESCRIPTION', u'CONTAINER_INTERFACE', u'CONTAINER_RESYNC', u'CUSTOMER_VRF_SEQUENCENO', u'DC_CONFIG', u'DHCP_ALLOC_MESSAGE', u'DHCP_CONFIG_RESP', u'DHCP_OPTION', u'DISKSTATS', u'DOMAIN', u'DOMAIN_CONFIG', u'DOMAIN_CONFIG_RESP', u'DOMAIN_FLOATING_IP_ACL_TEMPLATE', u'DOMAIN_FLOATING_IP_ACL_TEMPLATE_ENTRY', u'DOMAIN_TEMPLATE', u'DSCP_FORWARDING_CLASS_MAPPING', u'DSCP_FORWARDING_CLASS_TABLE', u'EAM_VRS_METRICS', u'EGRESS_ACL', u'EGRESS_ACL_ENTRY', u'EGRESS_ACL_TEMPLATE', u'EGRESS_ACL_TEMPLATE_ENTRY', u'EGRESS_QOS_MR', u'EGRESS_QOS_PRIMITIVE', u'EGRESS_QOS_QUEUE_MR', u'ENDPOINT', u'ENTERPRISE', u'ENTERPRISE_CONFIG', u'ENTERPRISE_CONFIG_RESP', u'ENTERPRISE_NETWORK', u'ENTERPRISE_PERMISSION', u'ENTERPRISE_PROFILE', u'ENTERPRISE_SECURED_DATA', u'ENTERPRISE_SECURITY', u'ENTITY_METADATA_BINDING', u'ESI_SEQUENCENO', u'EVENT_LOG', u'EVPN_BGP_COMMUNITY_TAG_ENTRY', u'EVPN_BGP_COMMUNITY_TAG_SEQ_NO', u'EXPORTIMPORT', u'EXTERNAL_SERVICE', u'FETCH_HYPERVISOR_PROPERTIES', u'FLOATING_IP_ACL_TEMPLATE', u'FLOATING_IP_ACL_TEMPLATE_ENTRY', u'FLOATINGIP', u'FLOATINGIP_ACL', u'FLOATINGIP_ACL_ENTRY', u'GATEWAY', u'GATEWAY_CONFIG', u'GATEWAY_CONFIG_RESP', u'GATEWAY_SECURED_DATA', u'GATEWAY_SECURITY', u'GATEWAY_SECURITY_PROFILE_REQUEST', u'GATEWAY_SECURITY_PROFILE_RESPONSE', u'GATEWAY_SECURITY_REQUEST', u'GATEWAY_SECURITY_RESPONSE', u'GATEWAY_SERVICE_CONFIG', u'GATEWAY_SERVICE_CONFIG_RESP', u'GATEWAY_STATE', u'GATEWAY_TEMPLATE', u'GATEWAY_VPORT_CONFIG', u'GATEWAY_VPORT_CONFIG_RESP', u'GEO_VM_EVENT', u'GEO_VM_REQ', u'GEO_VM_RES', u'GROUP', u'GROUPKEY_ENCRYPTION_PROFILE', u'HEALTH_REQ', u'HOSTINTERFACE', u'HSC', u'IKE_CERTIFICATE', u'IKE_ENCRYPTION_PROFILE', u'IKE_ENCRYPTION_PROFILE_REQUEST', u'IKE_GATEWAY', u'IKE_GATEWAY_CONFIG', u'IKE_GATEWAY_CONNECTION', u'IKE_GATEWAY_CONNECTION_REQUEST', u'IKE_GATEWAY_PROFILE', u'IKE_PSK', u'IKE_SUBNET', u'INFRASTRUCTURE_CONFIG', u'INFRASTRUCTURE_GATEWAY_PROFILE', u'INFRASTRUCTURE_PORT_PROFILE', u'INFRASTRUCTURE_VSC_PROFILE', u'INGRESS_ACL', u'INGRESS_ACL_ENTRY', u'INGRESS_ACL_TEMPLATE', u'INGRESS_ACL_TEMPLATE_ENTRY', u'INGRESS_ADV_FWD', u'INGRESS_ADV_FWD_ENTRY', u'INGRESS_ADV_FWD_TEMPLATE', u'INGRESS_ADV_FWD_TEMPLATE_ENTRY', u'INGRESS_EXT_SERVICE', u'INGRESS_EXT_SERVICE_ENTRY', u'INGRESS_EXT_SERVICE_TEMPLATE', u'INGRESS_EXT_SERVICE_TEMPLATE_ENTRY', u'IP_BINDING', u'JOB', u'KEYSERVER_MEMBER', u'KEYSERVER_MONITOR', u'KEYSERVER_MONITOR_ENCRYPTED_SEED', u'KEYSERVER_MONITOR_SEED', u'KEYSERVER_MONITOR_SEK', u'KEYSERVER_NOTIFICATION', u'L2DOMAIN', u'L2DOMAIN_SHARED', u'L2DOMAIN_TEMPLATE', u'LDAP_CONFIG', u'LIBVIRT_INTERFACE', u'LICENSE', u'LICENSE_STATUS', u'LINK', u'LINKED_DOMAIN_RESP', u'LOCATION', u'MACRO_GROUP_MED', u'MC_CHANNEL_MAP', u'MC_LIST', u'MC_RANGE', u'METADATA', u'METADATA_TAG', u'MIRROR_DESTINATION', u'MONITORING_PORT', u'MULTI_NIC_VPORT', u'NATMAPENTRY', u'NETWORK_ELEMENT', u'NETWORK_LAYOUT', u'NETWORK_MACRO_GROUP', u'NETWORK_POLICY_GROUP', u'NEXT_HOP', u'NEXT_HOP_RESP', u'NODE_EXECUTION_ERROR', u'NS_REDUNDANT_PORT', u'NSG_INFO', u'NSG_NOTIFICATION', u'NSGATEWAY', u'NSGATEWAY_CONFIG', u'NSGATEWAY_TEMPLATE', u'NSPORT', u'NSPORT_STATIC_CONFIG', u'NSPORT_TEMPLATE', u'NSPORT_VLAN_CONFIG', u'NSREDUNDANT_GW_GRP', u'PAT_IP_ENTRY', u'PATCONFIG_CONFIG_RESP', u'PATMAPPER', u'PATNATPOOL', u'PERMISSION', u'PERMITTED_ACTION', u'POLICING_POLICY', u'POLICY_DECISION', u'POLICY_GROUP', u'POLICY_GROUP_TEMPLATE', u'PORT', u'PORT_MR', u'PORT_PUSH', u'PORT_RANGE_MED', u'PORT_TEMPLATE', u'PORT_VLAN_CONFIG', u'PORT_VLAN_CONFIG_RESPONSE', u'PORTMAPPING', u'PUBLIC_NETWORK', u'QOS_PRIMITIVE', u'RATE_LIMITER', u'RD_SEQUENCENO', u'REDUNDANT_GW_GRP', u'ROUTING_POL_MED_RESPONSE', u'ROUTING_POLICY', u'RTRD_ENTITY', u'RTRD_SEQUENCENO', u'SERVICE_GATEWAY_RESPONSE', u'SERVICE_VRF_SEQUENCENO', u'SERVICES_GATEWAY_RESPONSE', u'SHAPING_POLICY', u'SHARED_RESOURCE', u'SITE', u'SITE_REQ', u'SITE_RES', u'STATIC_ROUTE', u'STATIC_ROUTE_RESP', u'STATISTICS', u'STATS_COLLECTOR', u'STATS_POLICY', u'STATS_TCA', u'STATSSERVER', u'SUBNET', u'SUBNET_ENTRY', u'SUBNET_MAC_ENTRY', u'SUBNET_POOL_ENTRY', u'SUBNET_TEMPLATE', u'SYSTEM_CONFIG', u'SYSTEM_CONFIG_REQ', u'SYSTEM_CONFIG_RESP', u'SYSTEM_MONITORING', u'UNSUPPORTED', u'UPLINK_RD', u'USER', u'VCENTER_FETCH_CLUSTERS', u'VCENTER_FETCH_DATACENTERS', u'VIRTUAL_IP', u'VIRTUAL_MACHINE', u'VIRTUAL_MACHINE_REPORT', u'VLAN', u'VLAN_CONFIG_RESPONSE', u'VLAN_TEMPLATE', u'VM_DESCRIPTION', u'VM_INTERFACE', u'VM_RESYNC', u'VMWARE_ADD_CLUSTER_INSCOPE', u'VMWARE_RELOAD_CONFIG', u'VMWARE_REMOVE_CLUSTER_INSCOPE', u'VMWARE_VCENTER', u'VMWARE_VCENTER_CLUSTER', u'VMWARE_VCENTER_DATACENTER', u'VMWARE_VCENTER_EAM_CONFIG', u'VMWARE_VCENTER_HYPERVISOR', u'VMWARE_VCENTER_VRS_BASE_CONFIG', u'VMWARE_VCENTER_VRS_CONFIG', u'VMWARE_VRS_ADDRESS_RANGE', u'VMWARE_VRS_REDEPLOYMENT_POLICY', u'VNID_SEQUENCENO', u'VPN_CONNECT', u'VPORT', u'VPORT_GATEWAY_RESPONSE', u'VPORT_MEDIATION_REQUEST', u'VPORT_MIRROR', u'VPORT_TAG_BASE', u'VPORTTAG', u'VPORTTAGTEMPLATE', u'VPRN_LABEL_SEQUENCENO', u'VRS', u'VSC', u'VSD', u'VSD_COMPONENT', u'VSG_REDUNDANT_PORT', u'VSP', u'WAN_SERVICE', u'ZFB_AUTO_ASSIGNMENT', u'ZFB_AUTO_ASSIGNMENT_VALUE', u'ZFB_GLOBAL', u'ZFB_REQUEST', u'ZONE', u'ZONE_TEMPLATE'])
self.expose_attribute(local_name="associated_source_id", remote_name="associatedSourceID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="associated_source_name", remote_name="associatedSourceName", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="associated_source_type", remote_name="associatedSourceType", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
self.expose_attribute(local_name="type", remote_name="type", attribute_type=str, is_required=False, is_unique=False, choices=[u'BORDER_ROUTER', u'HUB_AND_SPOKE', u'OVERLAY_ADDRESS_TRANSLATION', u'SERVICE_CHAINING'])
# Fetchers
self.demarcation_services = NUDemarcationServicesFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.next_hop_address = NUNextHopAddressFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.overlay_address_pools = NUOverlayAddressPoolsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def acceptance_criteria(self):
""" Get acceptance_criteria value.
Notes:
A route filtering criteria enum. Defaults to ALL.
This attribute is named `acceptanceCriteria` in VSD API.
"""
return self._acceptance_criteria
@acceptance_criteria.setter
def acceptance_criteria(self, value):
""" Set acceptance_criteria value.
Notes:
A route filtering criteria enum. Defaults to ALL.
This attribute is named `acceptanceCriteria` in VSD API.
"""
self._acceptance_criteria = value
@property
def read_only(self):
""" Get read_only value.
Notes:
This is set to true if a link has been created in the opposite direction
This attribute is named `readOnly` in VSD API.
"""
return self._read_only
@read_only.setter
def read_only(self, value):
""" Set read_only value.
Notes:
This is set to true if a link has been created in the opposite direction
This attribute is named `readOnly` in VSD API.
"""
self._read_only = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def associated_destination_id(self):
""" Get associated_destination_id value.
Notes:
This is the ID of the domain receiving the routes from the source. This can only be set for links of type OVERLAY_ADDRESS_TRANSLATION.
This attribute is named `associatedDestinationID` in VSD API.
"""
return self._associated_destination_id
@associated_destination_id.setter
def associated_destination_id(self, value):
""" Set associated_destination_id value.
Notes:
This is the ID of the domain receiving the routes from the source. This can only be set for links of type OVERLAY_ADDRESS_TRANSLATION.
This attribute is named `associatedDestinationID` in VSD API.
"""
self._associated_destination_id = value
@property
def associated_destination_name(self):
""" Get associated_destination_name value.
Notes:
None
This attribute is named `associatedDestinationName` in VSD API.
"""
return self._associated_destination_name
@associated_destination_name.setter
def associated_destination_name(self, value):
""" Set associated_destination_name value.
Notes:
None
This attribute is named `associatedDestinationName` in VSD API.
"""
self._associated_destination_name = value
@property
def associated_destination_type(self):
""" Get associated_destination_type value.
Notes:
Type of the entity type for the source
This attribute is named `associatedDestinationType` in VSD API.
"""
return self._associated_destination_type
@associated_destination_type.setter
def associated_destination_type(self, value):
""" Set associated_destination_type value.
Notes:
Type of the entity type for the source
This attribute is named `associatedDestinationType` in VSD API.
"""
self._associated_destination_type = value
@property
def associated_source_id(self):
""" Get associated_source_id value.
Notes:
The ID of the domain receiving the routes from another domain
This attribute is named `associatedSourceID` in VSD API.
"""
return self._associated_source_id
@associated_source_id.setter
def associated_source_id(self, value):
""" Set associated_source_id value.
Notes:
The ID of the domain receiving the routes from another domain
This attribute is named `associatedSourceID` in VSD API.
"""
self._associated_source_id = value
@property
def associated_source_name(self):
""" Get associated_source_name value.
Notes:
None
This attribute is named `associatedSourceName` in VSD API.
"""
return self._associated_source_name
@associated_source_name.setter
def associated_source_name(self, value):
""" Set associated_source_name value.
Notes:
None
This attribute is named `associatedSourceName` in VSD API.
"""
self._associated_source_name = value
@property
def associated_source_type(self):
""" Get associated_source_type value.
Notes:
This is the source object type for the associatedSourceID
This attribute is named `associatedSourceType` in VSD API.
"""
return self._associated_source_type
@associated_source_type.setter
def associated_source_type(self, value):
""" Set associated_source_type value.
Notes:
This is the source object type for the associatedSourceID
This attribute is named `associatedSourceType` in VSD API.
"""
self._associated_source_type = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
@property
def type(self):
""" Get type value.
Notes:
This is used to distinguish between different type of links: hub and spoke, ip address, VNS border router links.
"""
return self._type
@type.setter
def type(self, value):
""" Set type value.
Notes:
This is used to distinguish between different type of links: hub and spoke, ip address, VNS border router links.
"""
self._type = value
| 42.443452
| 5,952
| 0.749293
| 4,976
| 42,783
| 5.913384
| 0.094855
| 0.212676
| 0.237893
| 0.276296
| 0.555242
| 0.36418
| 0.219303
| 0.15983
| 0.142056
| 0.1252
| 0
| 0.00052
| 0.191291
| 42,783
| 1,008
| 5,953
| 42.443452
| 0.849938
| 0.141598
| 0
| 0.030612
| 0
| 0
| 0.272281
| 0.116954
| 0
| 0
| 0
| 0
| 0
| 1
| 0.063776
| false
| 0
| 0.020408
| 0
| 0.834184
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
90dc6096fa655c4be4d6dcceee0580dafe9e34ee
| 434
|
py
|
Python
|
python/tests/arthoolbox/conftest.py
|
ArthurVal/toolbox
|
857a42043183797582b7f05a78937f224f515ec6
|
[
"MIT"
] | null | null | null |
python/tests/arthoolbox/conftest.py
|
ArthurVal/toolbox
|
857a42043183797582b7f05a78937f224f515ec6
|
[
"MIT"
] | null | null | null |
python/tests/arthoolbox/conftest.py
|
ArthurVal/toolbox
|
857a42043183797582b7f05a78937f224f515ec6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
"""conftest.py file use to configurate pytest for the arthoolbox lib
"""
import pytest
###############################################################################
# PYTEST - HOOKS #
###############################################################################
# TEST - ARTHOOLBOX FIXTURES #
| 33.384615
| 79
| 0.267281
| 22
| 434
| 5.272727
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003425
| 0.327189
| 434
| 12
| 80
| 36.166667
| 0.393836
| 0.497696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
29068353862d2c9787b4723f4d77fb9cc79a8d21
| 23,803
|
py
|
Python
|
AutomatedTesting/Gem/PythonTests/assetpipeline/wwise_bank_dependency_tests/bank_info_parser_tests.py
|
aaarsene/o3de
|
37e3b0226958974defd14dd6d808e8557dcd7345
|
[
"Apache-2.0",
"MIT"
] | 1
|
2021-09-13T00:01:12.000Z
|
2021-09-13T00:01:12.000Z
|
AutomatedTesting/Gem/PythonTests/assetpipeline/wwise_bank_dependency_tests/bank_info_parser_tests.py
|
aaarsene/o3de
|
37e3b0226958974defd14dd6d808e8557dcd7345
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
AutomatedTesting/Gem/PythonTests/assetpipeline/wwise_bank_dependency_tests/bank_info_parser_tests.py
|
aaarsene/o3de
|
37e3b0226958974defd14dd6d808e8557dcd7345
|
[
"Apache-2.0",
"MIT"
] | 1
|
2021-07-20T11:07:25.000Z
|
2021-07-20T11:07:25.000Z
|
"""
Copyright (c) Contributors to the Open 3D Engine Project. For complete copyright and license terms please see the LICENSE at the root of this distribution.
SPDX-License-Identifier: Apache-2.0 OR MIT
"""
import os
import pytest
import sys
soundbanks_xml_filename = 'SoundbanksInfo.xml'
@pytest.fixture
def soundbank_metadata_generator_setup_fixture(workspace):
resources = dict()
resources['tests_dir'] = os.path.dirname(os.path.realpath(__file__))
return resources
def success_case_test(test_folder, expected_dependencies_dict, bank_info, expected_result_code=0):
"""
Test Steps:
1. Make sure the return code is what was expected, and that the expected number of banks were returned.
2. Validate bank is in the expected dependencies dictionary.
3. Validate the path to output the metadata file to was assembled correctly.
4. Validate metadata object for this bank is set, and that it has an object assigned to its dependencies field
and its includedEvents field
5. Validate metadata object has the correct number of dependencies, and validated that every expected dependency
exists in the dependencies list of the metadata object.
6. Validate metadata object has the correct number of events, and validate that every expected event exists in the
events of the metadata object.
"""
expected_bank_count = len(expected_dependencies_dict)
banks, result_code = bank_info.generate_metadata(
os.path.join(test_folder, soundbanks_xml_filename),
test_folder)
# Make sure the return code is what was expected, and that the expected number of banks were returned.
assert result_code is expected_result_code
assert len(banks) is expected_bank_count
for bank_index in range(expected_bank_count):
bank = banks[bank_index]
# Find a bank of this name in the expected dependencies dictionary.
assert bank.path in expected_dependencies_dict
# Make sure the path to output the metadata file to was assembled correctly.
expected_metadata_filepath = os.path.splitext(os.path.join(test_folder, bank.path))[0] + \
bank_info.metadata_file_extension
assert bank.metadata_path == expected_metadata_filepath
# Make sure the metadata object for this bank is set, and that it has an object assigned to
# its dependencies field and its includedEvents field
assert bank.metadata_object
assert bank.metadata_object['dependencies'] is not None
assert bank.metadata_object['includedEvents'] is not None
# Make sure the generated metadata object has the correct number of dependencies, and validated that every
# expected dependency exists in the dependencies list of the metadata object.
assert len(bank.metadata_object['dependencies']) is len(expected_dependencies_dict[bank.path]['dependencies'])
for dependency in expected_dependencies_dict[bank.path]['dependencies']:
assert dependency in bank.metadata_object['dependencies']
# Make sure the generated metadata object has the correct number of events, and validate that every expected
# event exists in the events list of the metadata object.
assert len(bank.metadata_object['includedEvents']) is len(expected_dependencies_dict[bank.path]['events'])
for event in expected_dependencies_dict[bank.path]['events']:
assert event in bank.metadata_object['includedEvents']
def get_bank_info(workspace):
sys.path.append(
os.path.join(workspace.paths.engine_root(), 'Gems', 'AudioEngineWwise', 'Tools'))
from WwiseAuthoringScripts import bank_info_parser as bank_info_module
return bank_info_module
@pytest.mark.usefixtures("workspace")
@pytest.mark.SUITE_periodic
@pytest.mark.parametrize("project", ["AutomatedTesting"])
class TestSoundBankMetadataGenerator:
def test_NoMetadataTooFewBanks_ReturnCodeIsError(self, workspace, soundbank_metadata_generator_setup_fixture):
"""
Trying to generate metadata for banks in a folder with one or fewer banks and no metadata is not possible
and should fail.
Test Steps:
1. Setup testing environment with only 1 bank file
2. Get Sound Bank Info
3. Attempt to generate sound bank metadata
4. Verify that proper error code is returned
"""
#
test_assets_folder = os.path.join(soundbank_metadata_generator_setup_fixture['tests_dir'], 'assets',
'test_NoMetadataTooFewBanks_ReturnCodeIsError')
if not os.path.isdir(test_assets_folder):
os.makedirs(test_assets_folder)
bank_info = get_bank_info(workspace)
banks, error_code = bank_info.generate_metadata(
os.path.join(test_assets_folder, soundbanks_xml_filename),
test_assets_folder)
os.rmdir(test_assets_folder)
assert error_code is 2, 'Metadata was generated when there were fewer than two banks in the target directory.'
def test_NoMetadataNoContentBank_NoMetadataGenerated(self, workspace, soundbank_metadata_generator_setup_fixture):
"""
Test Steps:
1. Setup testing environment
2. No expected dependencies
3. Call success case test
"""
test_assets_folder = os.path.join(soundbank_metadata_generator_setup_fixture['tests_dir'], 'assets',
'test_NoMetadataNoContentBank_NoMetadataGenerated')
expected_dependencies = dict()
success_case_test(test_assets_folder, expected_dependencies, get_bank_info(workspace))
def test_NoMetadataOneContentBank_NoStreamedFiles_OneDependency(self, workspace, soundbank_metadata_generator_setup_fixture):
"""
When no Wwise metadata is present, and there is only one content bank in the target directory with no wem
files, then only the content bank should have metadata associated with it. The generated metadata should
only describe a dependency on the init bank.
Test Steps:
1. Setup testing environment
2. Get current bank info
3. Build expected dependencies
4. Call success case test
"""
test_assets_folder = os.path.join(soundbank_metadata_generator_setup_fixture['tests_dir'], 'assets',
'test_NoMetadataOneContentBank_NoStreamedFiles_OneDependency')
bank_info = get_bank_info(workspace)
expected_dependencies = {'Content.bnk': {'dependencies': [bank_info.init_bank_path], 'events': []},}
success_case_test(test_assets_folder, expected_dependencies, get_bank_info(workspace))
def test_NoMetadataOneContentBank_StreamedFiles_MultipleDependencies(self, workspace,
soundbank_metadata_generator_setup_fixture):
"""
When no Wwise metadata is present, and there is only one content bank in the target directory with wem files
present, then only the content bank should have metadata associated with it. The generated metadata should
describe a dependency on the init bank and all wem files in the folder.
Test Steps:
1. Setup testing environment
2. Get current bank info
3. Build expected dependencies
4. Call success case test
"""
test_assets_folder = os.path.join(soundbank_metadata_generator_setup_fixture['tests_dir'], 'assets',
'test_NoMetadataOneContentBank_StreamedFiles_MultipleDependencies')
bank_info = get_bank_info(workspace)
expected_dependencies = {
'Content.bnk': {
'dependencies': [
bank_info.init_bank_path,
'590205561.wem',
'791740036.wem'
],
'events': []
}
}
success_case_test(test_assets_folder, expected_dependencies, get_bank_info(workspace))
def test_NoMetadataMultipleBanks_OneDependency_ReturnCodeIsWarning(self, workspace, soundbank_metadata_generator_setup_fixture):
"""
When no Wwise metadata is present, and there are multiple content banks in the target directory with wem files
present, there is no way to tell which bank requires which wem files. A warning should be emitted,
stating that the full dependency graph could not be created, and only dependencies on the init bank are
described in the generated metadata files.
Test Steps:
1. Setup testing environment
2. Get current bank info
3. Build expected dependencies
4. Call success case test
"""
test_assets_folder = os.path.join(soundbank_metadata_generator_setup_fixture['tests_dir'], 'assets',
'test_NoMetadataMultipleBanks_OneDependency_ReturnCodeIsWarning')
bank_info = get_bank_info(workspace)
expected_dependencies = {
'test_bank1.bnk': {'dependencies': [bank_info.init_bank_path], 'events': []},
'test_bank2.bnk': {'dependencies': [bank_info.init_bank_path], 'events': []}
}
success_case_test(test_assets_folder, expected_dependencies, get_bank_info(workspace), expected_result_code=1)
def test_OneContentBank_NoStreamedFiles_OneDependency(self, workspace, soundbank_metadata_generator_setup_fixture):
"""
Wwise metadata describes one content bank that contains all media needed by its events. Generated metadata
describes a dependency only on the init bank.
Test Steps:
1. Setup testing environment
2. Get current bank info
3. Build expected dependencies
4. Call success case test
"""
test_assets_folder = os.path.join(soundbank_metadata_generator_setup_fixture['tests_dir'], 'assets',
'test_OneContentBank_NoStreamedFiles_OneDependency')
bank_info = get_bank_info(workspace)
expected_dependencies = {
'test_bank1.bnk': {
'dependencies': [bank_info.init_bank_path],
'events': ['test_event_1_bank1_embedded_target']
}
}
success_case_test(test_assets_folder, expected_dependencies, get_bank_info(workspace))
def test_OneContentBank_StreamedFiles_MultipleDependencies(self, workspace, soundbank_metadata_generator_setup_fixture):
"""
Wwise metadata describes one content bank that references streamed media files needed by its events. Generated
metadata describes dependencies on the init bank and wems named by the IDs of referenced streamed media.
Test Steps:
1. Setup testing environment
2. Get current bank info
3. Build expected dependencies
4. Call success case test
"""
test_assets_folder = os.path.join(soundbank_metadata_generator_setup_fixture['tests_dir'], 'assets',
'test_OneContentBank_StreamedFiles_MultipleDependencies')
bank_info = get_bank_info(workspace)
expected_dependencies = {
'test_bank1.bnk': {
'dependencies': [
bank_info.init_bank_path,
'590205561.wem',
'791740036.wem'
],
'events': [
'test_event_1_bank1_embedded_target',
'test_event_2_bank1_streamed_target'
]
}
}
success_case_test(test_assets_folder, expected_dependencies, get_bank_info(workspace))
def test_MultipleContentBanks_NoStreamedFiles_OneDependency(self, workspace, soundbank_metadata_generator_setup_fixture):
"""
Wwise metadata describes multiple content banks. Each bank contains all media needed by its events. Generated
metadata describes each bank having a dependency only on the init bank.
Test Steps:
1. Setup testing environment
2. Get current bank info
3. Build expected dependencies
4. Call success case test
"""
test_assets_folder = os.path.join(soundbank_metadata_generator_setup_fixture['tests_dir'], 'assets',
'test_MultipleContentBanks_NoStreamedFiles_OneDependency')
bank_info = get_bank_info(workspace)
expected_dependencies = {
'test_bank1.bnk': {
'dependencies': [bank_info.init_bank_path],
'events': ['test_event_1_bank1_embedded_target']
},
'test_bank2.bnk': {
'dependencies': [bank_info.init_bank_path],
'events': ['test_event_3_bank2_embedded_target', 'test_event_4_bank2_streamed_target']
}
}
success_case_test(test_assets_folder, expected_dependencies, get_bank_info(workspace))
def test_MultipleContentBanks_Bank1StreamedFiles(self, workspace, soundbank_metadata_generator_setup_fixture):
"""
Wwise metadata describes multiple content banks. Bank 1 references streamed media files needed by its events,
while bank 2 contains all media need by its events.
Test Steps:
1. Setup testing environment
2. Get current bank info
3. Build expected dependencies
4. Call success case test
"""
test_assets_folder = os.path.join(soundbank_metadata_generator_setup_fixture['tests_dir'], 'assets',
'test_MultipleContentBanks_Bank1StreamedFiles')
bank_info = get_bank_info(workspace)
expected_dependencies = {
'test_bank1.bnk': {
'dependencies': [
bank_info.init_bank_path,
'590205561.wem'
],
'events': ['test_event_1_bank1_embedded_target', 'test_event_2_bank1_streamed_target']
},
'test_bank2.bnk': {
'dependencies': [bank_info.init_bank_path],
'events': ['test_event_3_bank2_embedded_target', 'test_event_4_bank2_streamed_target']
}
}
success_case_test(test_assets_folder, expected_dependencies, get_bank_info(workspace))
def test_MultipleContentBanks_SplitBanks_OnlyBankDependenices(self, workspace, soundbank_metadata_generator_setup_fixture):
"""
Wwise metadata describes multiple content banks. Bank 3 events require media that is contained in bank 4.
Generated metadata describes each bank having a dependency on the init bank, while bank 3 has an additional
dependency on bank 4.
Test Steps:
1. Setup testing environment
2. Get current bank info
3. Build expected dependencies
4. Call success case test
"""
test_assets_folder = os.path.join(soundbank_metadata_generator_setup_fixture['tests_dir'], 'assets',
'test_MultipleContentBanks_SplitBanks_OnlyBankDependenices')
bank_info = get_bank_info(workspace)
expected_dependencies = {
'test_bank3.bnk': {
'dependencies': [
bank_info.init_bank_path,
'test_bank4.bnk'
],
'events': ['test_event_5_bank3_embedded_target_bank4']
},
'test_bank4.bnk': {'dependencies': [bank_info.init_bank_path], 'events': []}
}
success_case_test(test_assets_folder, expected_dependencies, get_bank_info(workspace))
def test_MultipleContentBanks_ReferencedEvent_MediaEmbeddedInBank(self, workspace, soundbank_metadata_generator_setup_fixture):
"""
Wwise metadata describes multiple content banks. Bank 1 contains all media required by its events, while bank
5 contains a reference to an event in bank 1, but no media for that event. Generated metadata describes both
banks having a dependency on the init bank, while bank 5 has an additional dependency on bank 1.
Test Steps:
1. Setup testing environment
2. Get current bank info
3. Build expected dependencies
4. Call success case test
"""
test_assets_folder = os.path.join(soundbank_metadata_generator_setup_fixture['tests_dir'], 'assets',
'test_MultipleContentBanks_ReferencedEvent_MediaEmbeddedInBank')
bank_info = get_bank_info(workspace)
expected_dependencies = {
'test_bank1.bnk': {
'dependencies': [bank_info.init_bank_path],
'events': ['test_event_1_bank1_embedded_target']
},
'test_bank5.bnk': {
'dependencies': [
bank_info.init_bank_path,
'test_bank1.bnk'
],
'events': ['test_event_1_bank1_embedded_target', 'test_event_7_bank5_referenced_event_bank1_embedded']
}
}
success_case_test(test_assets_folder, expected_dependencies, get_bank_info(workspace))
def test_MultipleContentBanks_ReferencedEvent_MediaStreamed(self, workspace, soundbank_metadata_generator_setup_fixture):
"""
Wwise metadata describes multiple content banks. Bank 1 references streamed media files needed by its events,
while bank 5 contains a reference to an event in bank 1. This causes bank 5 to also describe a reference to
the streamed media file referenced by the event from bank 1. Generated metadata describes both banks having
dependencies on the init bank, as well as the wem named by the ID of referenced streamed media.
Test Steps:
1. Setup testing environment
2. Get current bank info
3. Build expected dependencies
4. Call success case test
"""
test_assets_folder = os.path.join(soundbank_metadata_generator_setup_fixture['tests_dir'], 'assets',
'test_MultipleContentBanks_ReferencedEvent_MediaStreamed')
bank_info = get_bank_info(workspace)
expected_dependencies = {
'test_bank1.bnk': {
'dependencies': [
bank_info.init_bank_path,
'590205561.wem'
],
'events': ['test_event_2_bank1_streamed_target']
},
'test_bank5.bnk': {
'dependencies': [
bank_info.init_bank_path,
'590205561.wem'
],
'events': ['test_event_2_bank1_streamed_target', 'test_event_8_bank5_referenced_event_bank1_streamed']
}
}
success_case_test(test_assets_folder, expected_dependencies, get_bank_info(workspace))
def test_MultipleContentBanks_ReferencedEvent_MixedSources(self, workspace, soundbank_metadata_generator_setup_fixture):
"""
Wwise metadata describes multiple content banks. Bank 1 references a streamed media files needed by one of its
events, and contains all media needed for its other events, while bank 5 contains a reference to two events
in bank 1: one that requires streamed media, and one that requires media embedded in bank 1. Generated
metadata describes both banks having dependencies on the init bank and the wem named by the ID of referenced
streamed media, while bank 5 has an additional dependency on bank 1.
Test Steps:
1. Setup testing environment
2. Get current bank info
3. Build expected dependencies
4. Call success case test
"""
test_assets_folder = os.path.join(soundbank_metadata_generator_setup_fixture['tests_dir'], 'assets',
'test_MultipleContentBanks_ReferencedEvent_MixedSources')
bank_info = get_bank_info(workspace)
expected_dependencies = {
'test_bank1.bnk': {
'dependencies': [
bank_info.init_bank_path,
'590205561.wem'
],
'events': ['test_event_1_bank1_embedded_target', 'test_event_2_bank1_streamed_target']
},
'test_bank5.bnk': {
'dependencies': [
bank_info.init_bank_path,
'test_bank1.bnk',
'590205561.wem'
],
'events': [
'test_event_1_bank1_embedded_target',
'test_event_2_bank1_streamed_target',
'test_event_7_bank5_referenced_event_bank1_embedded',
'test_event_8_bank5_referenced_event_bank1_streamed'
]
}
}
success_case_test(test_assets_folder, expected_dependencies, get_bank_info(workspace))
def test_MultipleContentBanks_VaryingDependencies_MixedSources(self, workspace, soundbank_metadata_generator_setup_fixture):
"""
Wwise metadata describes multiple content banks that have varying dependencies on each other, and dependencies
on streamed media files.
Test Steps:
1. Setup testing environment
2. Get current bank info
3. Build expected dependencies
4. Call success case test
"""
test_assets_folder = os.path.join(soundbank_metadata_generator_setup_fixture['tests_dir'], 'assets',
'test_MultipleContentBanks_VaryingDependencies_MixedSources')
bank_info = get_bank_info(workspace)
expected_dependencies = {
'test_bank1.bnk': {
'dependencies': [
bank_info.init_bank_path,
'590205561.wem'
],
'events': ['test_event_1_bank1_embedded_target', 'test_event_2_bank1_streamed_target']
},
'test_bank2.bnk': {
'dependencies': [bank_info.init_bank_path],
'events': ['test_event_3_bank2_embedded_target', 'test_event_4_bank2_streamed_target']
},
'test_bank3.bnk': {
'dependencies': [
bank_info.init_bank_path,
'791740036.wem',
'test_bank4.bnk'
],
'events': ['test_event_5_bank3_embedded_target_bank4', 'test_event_6_bank3_streamed_target_bank4']
},
'test_bank4.bnk': {'dependencies': [bank_info.init_bank_path], 'events': []},
'test_bank5.bnk': {
'dependencies': [
bank_info.init_bank_path,
'test_bank1.bnk',
'590205561.wem'
],
'events': [
'test_event_1_bank1_embedded_target',
'test_event_2_bank1_streamed_target',
'test_event_7_bank5_referenced_event_bank1_embedded',
'test_event_8_bank5_referenced_event_bank1_streamed'
]
},
'test_bank6.bnk': {
'dependencies': [bank_info.init_bank_path],
'events': [
'test_event_3_bank2_embedded_target',
'test_event_4_bank2_streamed_target',
'test_event_9_bank6_referenced_event_bank2_embedded',
'test_event_10_bank6_referenced_event_bank2_streamed'
]
},
}
success_case_test(test_assets_folder, expected_dependencies, get_bank_info(workspace))
| 46.129845
| 155
| 0.6508
| 2,629
| 23,803
| 5.599087
| 0.097756
| 0.045652
| 0.034783
| 0.061073
| 0.793139
| 0.767731
| 0.749932
| 0.730435
| 0.716916
| 0.68587
| 0
| 0.018458
| 0.2853
| 23,803
| 515
| 156
| 46.219417
| 0.846814
| 0.283494
| 0
| 0.526882
| 0
| 0
| 0.229093
| 0.136175
| 0
| 0
| 0
| 0
| 0.043011
| 1
| 0.060932
| false
| 0
| 0.014337
| 0
| 0.086022
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
290afce313a2f8fb3f4f228703adf862079f1f0b
| 48
|
py
|
Python
|
Python/Tests/TestData/Grammar/InvalidUnicodeLiteral26Up.py
|
techkey/PTVS
|
8355e67eedd8e915ca49bd38a2f36172696fd903
|
[
"Apache-2.0"
] | 695
|
2019-05-06T23:49:37.000Z
|
2022-03-30T01:56:00.000Z
|
Python/Tests/TestData/Grammar/InvalidUnicodeLiteral26Up.py
|
techkey/PTVS
|
8355e67eedd8e915ca49bd38a2f36172696fd903
|
[
"Apache-2.0"
] | 1,672
|
2019-05-06T21:09:38.000Z
|
2022-03-31T23:16:04.000Z
|
Python/Tests/TestData/Grammar/InvalidUnicodeLiteral26Up.py
|
techkey/PTVS
|
8355e67eedd8e915ca49bd38a2f36172696fd903
|
[
"Apache-2.0"
] | 186
|
2019-05-13T03:17:37.000Z
|
2022-03-31T16:24:05.000Z
|
from __future__ import unicode_literals
'\uTEST'
| 24
| 39
| 0.854167
| 6
| 48
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 48
| 2
| 40
| 24
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0.122449
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.5
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
291ad2a68aa957958e5ac3e38b28cff3d12fb746
| 37
|
py
|
Python
|
gwrappy/errors.py
|
hairizuanbinnoorazman/gwrappy
|
aae569eb87d0aeac6126ccceac8a208b8dfdcf51
|
[
"Apache-2.0"
] | 5
|
2016-09-21T10:27:05.000Z
|
2017-03-13T11:37:16.000Z
|
gwrappy/errors.py
|
hairizuanbinnoorazman/gwrappy
|
aae569eb87d0aeac6126ccceac8a208b8dfdcf51
|
[
"Apache-2.0"
] | 1
|
2021-11-15T17:46:52.000Z
|
2021-11-15T17:46:52.000Z
|
gwrappy/errors.py
|
hairizuanbinnoorazman/gwrappy
|
aae569eb87d0aeac6126ccceac8a208b8dfdcf51
|
[
"Apache-2.0"
] | 2
|
2016-09-21T10:34:59.000Z
|
2017-04-05T10:38:10.000Z
|
from googleapiclient.errors import *
| 18.5
| 36
| 0.837838
| 4
| 37
| 7.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
2928b414f307fab10cd91f8e49a12103be2dc292
| 104
|
py
|
Python
|
adafruit_rgb_display/__init__.py
|
philippkeller/Adafruit_CircuitPython_RGB_Display
|
0f6ca4d2cb78c0a69a4025b4b59780c4afb967b8
|
[
"MIT"
] | null | null | null |
adafruit_rgb_display/__init__.py
|
philippkeller/Adafruit_CircuitPython_RGB_Display
|
0f6ca4d2cb78c0a69a4025b4b59780c4afb967b8
|
[
"MIT"
] | null | null | null |
adafruit_rgb_display/__init__.py
|
philippkeller/Adafruit_CircuitPython_RGB_Display
|
0f6ca4d2cb78c0a69a4025b4b59780c4afb967b8
|
[
"MIT"
] | 1
|
2020-04-30T15:20:37.000Z
|
2020-04-30T15:20:37.000Z
|
"""Auto imports for Adafruit_CircuitPython_RGB_Display"""
from adafruit_rgb_display.rgb import color565
| 34.666667
| 57
| 0.855769
| 14
| 104
| 6
| 0.714286
| 0.238095
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03125
| 0.076923
| 104
| 2
| 58
| 52
| 0.84375
| 0.490385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
293af42b6c68c1bb12ca954e472e8777a47b35e2
| 69
|
py
|
Python
|
01-Hola-Mundo/holamundo.py
|
wparedesgt/Master-Python
|
b0e8963a5a95d479ef929c2d482be50a1959a18f
|
[
"BSD-3-Clause"
] | null | null | null |
01-Hola-Mundo/holamundo.py
|
wparedesgt/Master-Python
|
b0e8963a5a95d479ef929c2d482be50a1959a18f
|
[
"BSD-3-Clause"
] | null | null | null |
01-Hola-Mundo/holamundo.py
|
wparedesgt/Master-Python
|
b0e8963a5a95d479ef929c2d482be50a1959a18f
|
[
"BSD-3-Clause"
] | null | null | null |
print("#############")
print("Hola Mundo !!!")
print("#############")
| 23
| 23
| 0.347826
| 5
| 69
| 4.8
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057971
| 69
| 3
| 24
| 23
| 0.369231
| 0
| 0
| 0.666667
| 0
| 0
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
2942924e85fb4eea725f38fb6c5e7e5e9d37e073
| 555
|
py
|
Python
|
tools/leetcode.022.Generate Parentheses/leetcode.022.Generate Parentheses.submission3.py
|
tedye/leetcode
|
975d7e3b8cb9b6be9e80e07febf4bcf6414acd46
|
[
"MIT"
] | 4
|
2015-10-10T00:30:55.000Z
|
2020-07-27T19:45:54.000Z
|
tools/leetcode.022.Generate Parentheses/leetcode.022.Generate Parentheses.submission3.py
|
tedye/leetcode
|
975d7e3b8cb9b6be9e80e07febf4bcf6414acd46
|
[
"MIT"
] | null | null | null |
tools/leetcode.022.Generate Parentheses/leetcode.022.Generate Parentheses.submission3.py
|
tedye/leetcode
|
975d7e3b8cb9b6be9e80e07febf4bcf6414acd46
|
[
"MIT"
] | null | null | null |
class Solution:
# @param an integer
# @return a list of string
def generateParenthesis(self, n):
if n == 0: return [""]
l = set()
self.generateHelper(l,n,n,"")
return list(l)
def generateHelper(self,l,left,right,current):
if left == right == 0:
l.add(current)
return
if left > 0:
self.generateHelper(l,left-1,right,current+'(')
if right > 0 and left < right:
self.generateHelper(l,left,right-1,current+')')
| 555
| 555
| 0.513514
| 66
| 555
| 4.318182
| 0.378788
| 0.126316
| 0.2
| 0.161404
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016854
| 0.358559
| 555
| 1
| 555
| 555
| 0.783708
| 0.075676
| 0
| 0
| 1
| 0
| 0.003914
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0
| 0
| 0.357143
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
2961f8be00b18bed3e37dc74aff149e534f4c8a4
| 119
|
py
|
Python
|
python3/problem0010.py
|
Furisuke/ProjectEuler
|
6f91e35fa394300d3f3761e4ab8c20824e4711ac
|
[
"MIT"
] | 1
|
2015-12-19T09:43:02.000Z
|
2015-12-19T09:43:02.000Z
|
python3/problem0010.py
|
Furisuke/ProjectEuler
|
6f91e35fa394300d3f3761e4ab8c20824e4711ac
|
[
"MIT"
] | null | null | null |
python3/problem0010.py
|
Furisuke/ProjectEuler
|
6f91e35fa394300d3f3761e4ab8c20824e4711ac
|
[
"MIT"
] | null | null | null |
from problem0003 import primes
from itertools import takewhile
print(sum(takewhile(lambda x: x < 2000000, primes())))
| 23.8
| 54
| 0.781513
| 16
| 119
| 5.8125
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105769
| 0.12605
| 119
| 4
| 55
| 29.75
| 0.788462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
461c41c0011838f90970c110da5e7f653a1ffdc3
| 234
|
py
|
Python
|
classify_predict/__init__.py
|
dinckaniskan/ML-Inferencing-Durable-Function-Workflow
|
396be4be2ee093bc3fa4a8a6f325e10bc4ce95e4
|
[
"MIT"
] | null | null | null |
classify_predict/__init__.py
|
dinckaniskan/ML-Inferencing-Durable-Function-Workflow
|
396be4be2ee093bc3fa4a8a6f325e10bc4ce95e4
|
[
"MIT"
] | null | null | null |
classify_predict/__init__.py
|
dinckaniskan/ML-Inferencing-Durable-Function-Workflow
|
396be4be2ee093bc3fa4a8a6f325e10bc4ce95e4
|
[
"MIT"
] | null | null | null |
"""
"""
import logging
from .predict import predict_image_from_url
def main(imageUrl: str) -> str:
logging.info('Running prediction on: ' + imageUrl)
results = predict_image_from_url(imageUrl)
return results
| 19.5
| 55
| 0.688034
| 28
| 234
| 5.535714
| 0.571429
| 0.154839
| 0.206452
| 0.245161
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.213675
| 234
| 12
| 56
| 19.5
| 0.842391
| 0
| 0
| 0
| 0
| 0
| 0.106481
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
465bc3e3d2772da7cf9aa762d35583f530de8f19
| 13,090
|
py
|
Python
|
tests/components/wiz/test_config_flow.py
|
wstewart15/core
|
854d7d49367d560406d6099a5ba56a0be6c0b9c7
|
[
"Apache-2.0"
] | null | null | null |
tests/components/wiz/test_config_flow.py
|
wstewart15/core
|
854d7d49367d560406d6099a5ba56a0be6c0b9c7
|
[
"Apache-2.0"
] | null | null | null |
tests/components/wiz/test_config_flow.py
|
wstewart15/core
|
854d7d49367d560406d6099a5ba56a0be6c0b9c7
|
[
"Apache-2.0"
] | null | null | null |
"""Test the WiZ Platform config flow."""
from unittest.mock import patch
import pytest
from pywizlight.exceptions import WizLightConnectionError, WizLightTimeOutError
from homeassistant import config_entries
from homeassistant.components import dhcp
from homeassistant.components.wiz.config_flow import CONF_DEVICE
from homeassistant.components.wiz.const import DOMAIN
from homeassistant.const import CONF_HOST
from homeassistant.data_entry_flow import RESULT_TYPE_ABORT, RESULT_TYPE_FORM
from . import (
FAKE_BULB_CONFIG,
FAKE_DIMMABLE_BULB,
FAKE_EXTENDED_WHITE_RANGE,
FAKE_IP,
FAKE_MAC,
FAKE_RGBW_BULB,
FAKE_RGBWW_BULB,
FAKE_SOCKET,
FAKE_SOCKET_CONFIG,
TEST_CONNECTION,
TEST_SYSTEM_INFO,
_patch_discovery,
_patch_wizlight,
)
from tests.common import MockConfigEntry
DHCP_DISCOVERY = dhcp.DhcpServiceInfo(
hostname="wiz_abcabc",
ip=FAKE_IP,
macaddress=FAKE_MAC,
)
INTEGRATION_DISCOVERY = {
"ip_address": FAKE_IP,
"mac_address": FAKE_MAC,
}
async def test_form(hass):
"""Test we get the form."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
# Patch functions
with _patch_wizlight(), patch(
"homeassistant.components.wiz.async_setup_entry",
return_value=True,
) as mock_setup_entry, patch(
"homeassistant.components.wiz.async_setup", return_value=True
) as mock_setup:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
TEST_CONNECTION,
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "WiZ Dimmable White ABCABC"
assert result2["data"] == {
CONF_HOST: "1.1.1.1",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_user_flow_enters_dns_name(hass):
"""Test we reject dns names and want ips."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_HOST: "ip.only"},
)
await hass.async_block_till_done()
assert result2["type"] == RESULT_TYPE_FORM
assert result2["errors"] == {"base": "no_ip"}
with _patch_wizlight(), patch(
"homeassistant.components.wiz.async_setup_entry",
return_value=True,
) as mock_setup_entry, patch(
"homeassistant.components.wiz.async_setup", return_value=True
) as mock_setup:
result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"],
TEST_CONNECTION,
)
await hass.async_block_till_done()
assert result3["type"] == "create_entry"
assert result3["title"] == "WiZ Dimmable White ABCABC"
assert result3["data"] == {
CONF_HOST: "1.1.1.1",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.parametrize(
"side_effect, error_base",
[
(WizLightTimeOutError, "bulb_time_out"),
(WizLightConnectionError, "no_wiz_light"),
(Exception, "unknown"),
(ConnectionRefusedError, "cannot_connect"),
],
)
async def test_user_form_exceptions(hass, side_effect, error_base):
"""Test all user exceptions in the flow."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.wiz.wizlight.getBulbConfig",
side_effect=side_effect,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
TEST_CONNECTION,
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": error_base}
async def test_form_updates_unique_id(hass):
"""Test a duplicate id aborts and updates existing entry."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id=TEST_SYSTEM_INFO["id"],
data={CONF_HOST: "dummy"},
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with _patch_wizlight():
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
TEST_CONNECTION,
)
await hass.async_block_till_done()
assert result2["type"] == "abort"
assert result2["reason"] == "already_configured"
assert entry.data[CONF_HOST] == FAKE_IP
@pytest.mark.parametrize(
"source, data",
[
(config_entries.SOURCE_DHCP, DHCP_DISCOVERY),
(config_entries.SOURCE_INTEGRATION_DISCOVERY, INTEGRATION_DISCOVERY),
],
)
async def test_discovered_by_dhcp_connection_fails(hass, source, data):
"""Test we abort on connection failure."""
with patch(
"homeassistant.components.wiz.wizlight.getBulbConfig",
side_effect=WizLightTimeOutError,
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": source}, data=data
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "cannot_connect"
@pytest.mark.parametrize(
"source, data, device, bulb_type, extended_white_range, name",
[
(
config_entries.SOURCE_DHCP,
DHCP_DISCOVERY,
FAKE_BULB_CONFIG,
FAKE_DIMMABLE_BULB,
FAKE_EXTENDED_WHITE_RANGE,
"WiZ Dimmable White ABCABC",
),
(
config_entries.SOURCE_INTEGRATION_DISCOVERY,
INTEGRATION_DISCOVERY,
FAKE_BULB_CONFIG,
FAKE_DIMMABLE_BULB,
FAKE_EXTENDED_WHITE_RANGE,
"WiZ Dimmable White ABCABC",
),
(
config_entries.SOURCE_DHCP,
DHCP_DISCOVERY,
FAKE_BULB_CONFIG,
FAKE_RGBW_BULB,
FAKE_EXTENDED_WHITE_RANGE,
"WiZ RGBW Tunable ABCABC",
),
(
config_entries.SOURCE_INTEGRATION_DISCOVERY,
INTEGRATION_DISCOVERY,
FAKE_BULB_CONFIG,
FAKE_RGBW_BULB,
FAKE_EXTENDED_WHITE_RANGE,
"WiZ RGBW Tunable ABCABC",
),
(
config_entries.SOURCE_DHCP,
DHCP_DISCOVERY,
FAKE_BULB_CONFIG,
FAKE_RGBWW_BULB,
FAKE_EXTENDED_WHITE_RANGE,
"WiZ RGBWW Tunable ABCABC",
),
(
config_entries.SOURCE_INTEGRATION_DISCOVERY,
INTEGRATION_DISCOVERY,
FAKE_BULB_CONFIG,
FAKE_RGBWW_BULB,
FAKE_EXTENDED_WHITE_RANGE,
"WiZ RGBWW Tunable ABCABC",
),
(
config_entries.SOURCE_DHCP,
DHCP_DISCOVERY,
FAKE_SOCKET_CONFIG,
FAKE_SOCKET,
None,
"WiZ Socket ABCABC",
),
(
config_entries.SOURCE_INTEGRATION_DISCOVERY,
INTEGRATION_DISCOVERY,
FAKE_SOCKET_CONFIG,
FAKE_SOCKET,
None,
"WiZ Socket ABCABC",
),
],
)
async def test_discovered_by_dhcp_or_integration_discovery(
hass, source, data, device, bulb_type, extended_white_range, name
):
"""Test we can configure when discovered from dhcp or discovery."""
with _patch_wizlight(
device=device, extended_white_range=extended_white_range, bulb_type=bulb_type
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": source}, data=data
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "discovery_confirm"
with patch(
"homeassistant.components.wiz.async_setup_entry",
return_value=True,
) as mock_setup_entry, patch(
"homeassistant.components.wiz.async_setup", return_value=True
) as mock_setup:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == name
assert result2["data"] == {
CONF_HOST: "1.1.1.1",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.parametrize(
"source, data",
[
(config_entries.SOURCE_DHCP, DHCP_DISCOVERY),
(config_entries.SOURCE_INTEGRATION_DISCOVERY, INTEGRATION_DISCOVERY),
],
)
async def test_discovered_by_dhcp_or_integration_discovery_updates_host(
hass, source, data
):
"""Test dhcp or discovery updates existing host."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id=TEST_SYSTEM_INFO["id"],
data={CONF_HOST: "dummy"},
)
entry.add_to_hass(hass)
with _patch_wizlight():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": source}, data=data
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
assert entry.data[CONF_HOST] == FAKE_IP
async def test_setup_via_discovery(hass):
"""Test setting up via discovery."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert result["type"] == "form"
assert result["step_id"] == "user"
assert not result["errors"]
with _patch_discovery():
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "form"
assert result2["step_id"] == "pick_device"
assert not result2["errors"]
# test we can try again
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert not result["errors"]
with _patch_discovery():
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "form"
assert result2["step_id"] == "pick_device"
assert not result2["errors"]
with _patch_wizlight(), patch(
"homeassistant.components.wiz.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.wiz.async_setup_entry", return_value=True
) as mock_setup_entry:
result3 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_DEVICE: FAKE_MAC},
)
await hass.async_block_till_done()
assert result3["type"] == "create_entry"
assert result3["title"] == "WiZ Dimmable White ABCABC"
assert result3["data"] == {
CONF_HOST: "1.1.1.1",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
# ignore configured devices
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert not result["errors"]
with _patch_discovery():
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "abort"
assert result2["reason"] == "no_devices_found"
async def test_setup_via_discovery_cannot_connect(hass):
"""Test setting up via discovery and we fail to connect to the discovered device."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert result["type"] == "form"
assert result["step_id"] == "user"
assert not result["errors"]
with _patch_discovery():
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "form"
assert result2["step_id"] == "pick_device"
assert not result2["errors"]
with patch(
"homeassistant.components.wiz.wizlight.getBulbConfig",
side_effect=WizLightTimeOutError,
), _patch_discovery():
result3 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_DEVICE: FAKE_MAC},
)
await hass.async_block_till_done()
assert result3["type"] == "abort"
assert result3["reason"] == "cannot_connect"
| 31.24105
| 88
| 0.649351
| 1,498
| 13,090
| 5.371162
| 0.099466
| 0.071091
| 0.042878
| 0.062888
| 0.796793
| 0.792941
| 0.766468
| 0.763734
| 0.757892
| 0.716754
| 0
| 0.006872
| 0.244079
| 13,090
| 418
| 89
| 31.315789
| 0.806266
| 0.007563
| 0
| 0.695157
| 0
| 0
| 0.132972
| 0.041344
| 0
| 0
| 0
| 0
| 0.179487
| 1
| 0
| false
| 0
| 0.031339
| 0
| 0.031339
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
46a76abc9e955dea1371346aeb0beb632e52b606
| 65
|
py
|
Python
|
Validation/RecoTau/python/dataTypes/ValidateTausOnFastSimZTT_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
Validation/RecoTau/python/dataTypes/ValidateTausOnFastSimZTT_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
Validation/RecoTau/python/dataTypes/ValidateTausOnFastSimZTT_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
from Validation.RecoTau.dataTypes.ValidateTausOnZTT_cff import *
| 32.5
| 64
| 0.876923
| 7
| 65
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061538
| 65
| 1
| 65
| 65
| 0.918033
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
46b12d2d5b14df7d24373961bd4041fa2a5e20c9
| 1,587
|
py
|
Python
|
PythonLibrary/GlobalUtilities/test_List_Time_Zone.py
|
ashishp100194/timezone_api_automation
|
8a896f2d790d0a0916067bf2c3e3d4d4193921dd
|
[
"Apache-2.0"
] | 1
|
2020-07-13T04:22:55.000Z
|
2020-07-13T04:22:55.000Z
|
PythonLibrary/GlobalUtilities/test_List_Time_Zone.py
|
ashishp100194/timezone_api_automation
|
8a896f2d790d0a0916067bf2c3e3d4d4193921dd
|
[
"Apache-2.0"
] | null | null | null |
PythonLibrary/GlobalUtilities/test_List_Time_Zone.py
|
ashishp100194/timezone_api_automation
|
8a896f2d790d0a0916067bf2c3e3d4d4193921dd
|
[
"Apache-2.0"
] | null | null | null |
from BaseClass import BaseClass
from Variables import Variables
def test_List_Time_Zone_TC1():
Base=BaseClass()
Base.Timezone("List_Time_Zone", "List_Time_Zone_TC1")
def test_List_Time_Zone_TC2():
Base=BaseClass()
Base.Timezone("List_Time_Zone", "List_Time_Zone_TC2")
def test_List_Time_Zone_TC3():
Base=BaseClass()
Base.Timezone("List_Time_Zone", "List_Time_Zone_TC3")
def test_List_Time_Zone_TC4():
Base = BaseClass()
Base.Timezone("List_Time_Zone", "List_Time_Zone_TC4",Variables["api_base_url"], Variables['Invalid_Key'])
def test_List_Time_Zone_TC5():
Base=BaseClass()
Base.Timezone("List_Time_Zone", "List_Time_Zone_TC5")
def test_List_Time_Zone_TC6():
Base=BaseClass()
Base.Timezone("List_Time_Zone", "List_Time_Zone_TC6")
def test_List_Time_Zone_TC7():
Base=BaseClass()
Base.Timezone("List_Time_Zone", "List_Time_Zone_TC7")
def test_List_Time_Zone_TC8():
Base=BaseClass()
Base.Timezone("List_Time_Zone", "List_Time_Zone_TC8")
def test_List_Time_Zone_TC9():
Base=BaseClass()
Base.Timezone("List_Time_Zone", "List_Time_Zone_TC9")
def test_List_Time_Zone_TC10():
Base=BaseClass()
Base.Timezone("List_Time_Zone", "List_Time_Zone_TC10")
def execute_List_Time_Zone():
test_List_Time_Zone_TC1()
test_List_Time_Zone_TC2()
test_List_Time_Zone_TC3()
test_List_Time_Zone_TC4()
test_List_Time_Zone_TC5()
test_List_Time_Zone_TC6()
test_List_Time_Zone_TC7()
test_List_Time_Zone_TC8()
test_List_Time_Zone_TC9()
test_List_Time_Zone_TC10()
execute_List_Time_Zone()
| 28.339286
| 109
| 0.760555
| 244
| 1,587
| 4.377049
| 0.106557
| 0.314607
| 0.47191
| 0.299625
| 0.844569
| 0.458802
| 0.458802
| 0.458802
| 0.458802
| 0.458802
| 0
| 0.023878
| 0.129175
| 1,587
| 56
| 110
| 28.339286
| 0.748915
| 0
| 0
| 0.227273
| 0
| 0
| 0.216625
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.045455
| 0
| 0.295455
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d3bbf96679a2eb2d7fdd5cb3f7928ce05a1a392b
| 171
|
py
|
Python
|
tests/web_platform/css_flexbox_1/test_flexbox_item_vertical_align.py
|
jonboland/colosseum
|
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
|
[
"BSD-3-Clause"
] | 71
|
2015-04-13T09:44:14.000Z
|
2019-03-24T01:03:02.000Z
|
tests/web_platform/css_flexbox_1/test_flexbox_item_vertical_align.py
|
jonboland/colosseum
|
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
|
[
"BSD-3-Clause"
] | 35
|
2019-05-06T15:26:09.000Z
|
2022-03-28T06:30:33.000Z
|
tests/web_platform/css_flexbox_1/test_flexbox_item_vertical_align.py
|
jonboland/colosseum
|
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
|
[
"BSD-3-Clause"
] | 139
|
2015-05-30T18:37:43.000Z
|
2019-03-27T17:14:05.000Z
|
from tests.utils import W3CTestCase
class TestFlexbox_ItemVerticalAlign(W3CTestCase):
vars().update(W3CTestCase.find_tests(__file__, 'flexbox_item-vertical-align'))
| 28.5
| 82
| 0.818713
| 19
| 171
| 7
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019108
| 0.081871
| 171
| 5
| 83
| 34.2
| 0.828025
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 0.157895
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d3d7b4004eaeb5ecb5d00ad47a267af9f3ce2beb
| 191
|
py
|
Python
|
tests/app2.py
|
gilbrookie/cmdr
|
ee31e5b75a01f00e45f8181bf78017f232f0287e
|
[
"ISC"
] | null | null | null |
tests/app2.py
|
gilbrookie/cmdr
|
ee31e5b75a01f00e45f8181bf78017f232f0287e
|
[
"ISC"
] | null | null | null |
tests/app2.py
|
gilbrookie/cmdr
|
ee31e5b75a01f00e45f8181bf78017f232f0287e
|
[
"ISC"
] | null | null | null |
#!/usr/bin/python
from data import CmdrOverrideParams, TestCmd1, TestCmd2
CmdrOverrideParams.register_cmd(TestCmd1())
CmdrOverrideParams.register_cmd(TestCmd2())
CmdrOverrideParams.start()
| 23.875
| 55
| 0.832461
| 19
| 191
| 8.263158
| 0.631579
| 0.33121
| 0.369427
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022346
| 0.062827
| 191
| 7
| 56
| 27.285714
| 0.854749
| 0.08377
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d3ed28c21aae68c440ca66b6e9019548948e52a0
| 72
|
py
|
Python
|
main.py
|
franneck94/Digits-Recognition-Tensorflow
|
eb3ec8fa4cff64d2f135a40e0104369cd4dd190a
|
[
"MIT"
] | 16
|
2018-01-28T12:03:52.000Z
|
2020-12-21T13:31:49.000Z
|
main.py
|
franneck94/Digits-Recognition-Tensorflow
|
eb3ec8fa4cff64d2f135a40e0104369cd4dd190a
|
[
"MIT"
] | null | null | null |
main.py
|
franneck94/Digits-Recognition-Tensorflow
|
eb3ec8fa4cff64d2f135a40e0104369cd4dd190a
|
[
"MIT"
] | 11
|
2017-11-20T20:51:06.000Z
|
2020-03-11T13:48:49.000Z
|
from drawer import main_gui
if __name__ == "__main__":
main_gui()
| 12
| 27
| 0.694444
| 10
| 72
| 4
| 0.7
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208333
| 72
| 5
| 28
| 14.4
| 0.701754
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
312c0d0a38f5bf686cd815e99383545825f1ce7c
| 65
|
py
|
Python
|
Pipeline/main/Strategy/Close/lib/__init__.py
|
simonydbutt/b2a
|
0bf4a6de8547d73ace22967780442deeaff2d5c6
|
[
"MIT"
] | 2
|
2018-07-01T03:36:24.000Z
|
2020-02-13T17:22:46.000Z
|
Pipeline/main/Strategy/Close/lib/__init__.py
|
simonydbutt/b2a
|
0bf4a6de8547d73ace22967780442deeaff2d5c6
|
[
"MIT"
] | null | null | null |
Pipeline/main/Strategy/Close/lib/__init__.py
|
simonydbutt/b2a
|
0bf4a6de8547d73ace22967780442deeaff2d5c6
|
[
"MIT"
] | null | null | null |
from Pipeline.main.Strategy.Close.lib.ProfitRun import ProfitRun
| 32.5
| 64
| 0.861538
| 9
| 65
| 6.222222
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061538
| 65
| 1
| 65
| 65
| 0.918033
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3186a0791f2f06075bf3619bc6dcdd62813bdcec
| 152
|
py
|
Python
|
pseudo-codes/lcm.py
|
Varshaav16/off-net-learner
|
ab50af11acc0cbbada54a5d9b9239d2c329cc0c2
|
[
"MIT"
] | null | null | null |
pseudo-codes/lcm.py
|
Varshaav16/off-net-learner
|
ab50af11acc0cbbada54a5d9b9239d2c329cc0c2
|
[
"MIT"
] | null | null | null |
pseudo-codes/lcm.py
|
Varshaav16/off-net-learner
|
ab50af11acc0cbbada54a5d9b9239d2c329cc0c2
|
[
"MIT"
] | null | null | null |
def gcd(a: int, b: int) -> int :
if a == 0:
return b
return gcd(b % a, a)
def lcm(a: int, b: int) -> int:
return (a / gcd(a,b))* b
| 21.714286
| 32
| 0.460526
| 30
| 152
| 2.333333
| 0.3
| 0.114286
| 0.142857
| 0.228571
| 0.314286
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01
| 0.342105
| 152
| 7
| 33
| 21.714286
| 0.69
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
31b1403c264fe5238638d606fb79d288e1b60822
| 241
|
py
|
Python
|
fractalis/analytics/__init__.py
|
thehyve/Fractalis
|
5591112e5bc994eea5baf3d28caa7e5dfee85a57
|
[
"Apache-2.0"
] | 7
|
2018-06-01T12:17:26.000Z
|
2019-08-23T13:15:34.000Z
|
fractalis/analytics/__init__.py
|
thehyve/Fractalis
|
5591112e5bc994eea5baf3d28caa7e5dfee85a57
|
[
"Apache-2.0"
] | 6
|
2018-11-02T10:00:04.000Z
|
2021-09-13T14:15:36.000Z
|
fractalis/analytics/__init__.py
|
LCSB-BioCore/Fractalis
|
a9f7f8da7675b55c5996d2f32d7baa7313b0350e
|
[
"Apache-2.0"
] | 3
|
2018-08-02T16:42:50.000Z
|
2018-12-14T18:16:22.000Z
|
from fractalis.utils import list_classes_with_base_class
from fractalis.analytics.task import AnalyticTask
TASK_REGISTRY = list_classes_with_base_class('fractalis.analytics.tasks',
AnalyticTask)
| 40.166667
| 73
| 0.713693
| 26
| 241
| 6.269231
| 0.538462
| 0.159509
| 0.184049
| 0.233129
| 0.294479
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.240664
| 241
| 5
| 74
| 48.2
| 0.89071
| 0
| 0
| 0
| 0
| 0
| 0.103734
| 0.103734
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
31bd2b7849e6827df261c952de7c09de81a89d14
| 756
|
py
|
Python
|
SBMLDiagrams/__init__.py
|
SunnyXu/SBMLDiagrams
|
a7a9dccf42f544d6f48bc097d4dc7ebf5f4e2d41
|
[
"MIT"
] | null | null | null |
SBMLDiagrams/__init__.py
|
SunnyXu/SBMLDiagrams
|
a7a9dccf42f544d6f48bc097d4dc7ebf5f4e2d41
|
[
"MIT"
] | 50
|
2021-12-03T22:43:18.000Z
|
2022-03-30T22:15:09.000Z
|
SBMLDiagrams/__init__.py
|
sys-bio/SBMLDiagrams
|
ff951ff987fadf61a25d239966134e7bbfa1ff1a
|
[
"MIT"
] | 2
|
2022-01-30T00:47:44.000Z
|
2022-03-03T01:13:24.000Z
|
# try:
# from . import _version
# except:
# from SBMLDiagrams import _version
# __version__ = _version.__version__
try:
from . import visualizeSBML
from . import drawNetwork
from . import processSBML
from . import editSBML
from . import exportSBML
from . import styleSBML
from. import visualizeInfo
except:
from SBMLDiagrams import visualizeSBML
from SBMLDiagrams import drawNetwork
from SBMLDiagrams import processSBML
from SBMLDiagrams import editSBML
from SBMLDiagrams import exportSBML
from SBMLDiagrams import styleSBML
from SBMLDiagrams import visualizeInfo
from SBMLDiagrams._version import __version__
from SBMLDiagrams.processSBML import *
from SBMLDiagrams.visualizeSBML import *
| 26.068966
| 45
| 0.765873
| 76
| 756
| 7.407895
| 0.171053
| 0.312611
| 0.312611
| 0.099467
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.202381
| 756
| 28
| 46
| 27
| 0.933665
| 0.145503
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.894737
| 0
| 0.894737
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7361d7a18499fca17dcae5e9cf6e4d4b77325d64
| 135
|
py
|
Python
|
generators/app/templates/_project/_data/inputs_test.py
|
jrabary/generator-tf
|
ef376fcd0f2b968b5b7e18a2d68950243f376432
|
[
"Apache-2.0"
] | null | null | null |
generators/app/templates/_project/_data/inputs_test.py
|
jrabary/generator-tf
|
ef376fcd0f2b968b5b7e18a2d68950243f376432
|
[
"Apache-2.0"
] | null | null | null |
generators/app/templates/_project/_data/inputs_test.py
|
jrabary/generator-tf
|
ef376fcd0f2b968b5b7e18a2d68950243f376432
|
[
"Apache-2.0"
] | null | null | null |
import tensorflow as tf
class InputsTest(tf.test.TestCase):
def test_inputs(self):
# Write your inputs unit test here
pass
| 16.875
| 38
| 0.725926
| 20
| 135
| 4.85
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.207407
| 135
| 8
| 39
| 16.875
| 0.906542
| 0.237037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
b4070315564b3b80bf9cce6119d3c04ee2fabcfa
| 248
|
py
|
Python
|
bcdp/__init__.py
|
kinow/bcdp
|
f4366a307672d84ed7992f3bb68a04303a107c56
|
[
"Apache-2.0"
] | 5
|
2020-02-17T10:24:32.000Z
|
2021-09-16T14:58:00.000Z
|
bcdp/__init__.py
|
kinow/bcdp
|
f4366a307672d84ed7992f3bb68a04303a107c56
|
[
"Apache-2.0"
] | 1
|
2020-04-16T22:17:45.000Z
|
2020-04-16T22:17:45.000Z
|
bcdp/__init__.py
|
kinow/bcdp
|
f4366a307672d84ed7992f3bb68a04303a107c56
|
[
"Apache-2.0"
] | 2
|
2020-02-05T23:28:32.000Z
|
2020-04-04T09:33:00.000Z
|
from .adapters import *
from .bounds import *
from .ensemble import *
from .extractors import *
from .regridder import *
from .sources import *
__all__ = ['adapters', 'bounds', 'ensemble',
'extractors', 'regridder', 'sources']
| 24.8
| 49
| 0.657258
| 25
| 248
| 6.36
| 0.36
| 0.314465
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.21371
| 248
| 9
| 50
| 27.555556
| 0.815385
| 0
| 0
| 0
| 0
| 0
| 0.200837
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b40a2f804928a9a8e0271eb3eac438c02a005b69
| 235
|
py
|
Python
|
atest/testdata/running/pass_execution_library.py
|
phil-davis/robotframework
|
4d4ce686cbe01e293bb86ea6ff34330e8c45fc43
|
[
"ECL-2.0",
"Apache-2.0"
] | 7,073
|
2015-01-01T17:19:16.000Z
|
2022-03-31T22:01:29.000Z
|
atest/testdata/running/pass_execution_library.py
|
phil-davis/robotframework
|
4d4ce686cbe01e293bb86ea6ff34330e8c45fc43
|
[
"ECL-2.0",
"Apache-2.0"
] | 2,412
|
2015-01-02T09:29:05.000Z
|
2022-03-31T13:10:46.000Z
|
atest/testdata/running/pass_execution_library.py
|
phil-davis/robotframework
|
4d4ce686cbe01e293bb86ea6ff34330e8c45fc43
|
[
"ECL-2.0",
"Apache-2.0"
] | 2,298
|
2015-01-03T02:47:15.000Z
|
2022-03-31T02:00:16.000Z
|
from robot.errors import PassExecution
from robot.libraries.BuiltIn import BuiltIn
def raise_pass_execution_exception(msg):
raise PassExecution(msg)
def call_pass_execution_method(msg):
BuiltIn().pass_execution(msg, 'lol')
| 21.363636
| 43
| 0.8
| 31
| 235
| 5.83871
| 0.516129
| 0.21547
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119149
| 235
| 10
| 44
| 23.5
| 0.874396
| 0
| 0
| 0
| 0
| 0
| 0.012766
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.833333
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
b476de8f289343219a3df99392d4ce3e75b0b357
| 173
|
py
|
Python
|
agent/bin/boot_time.py
|
kemoycampbell/atomic-monitor
|
29d2fcf8d12bd6ac76f71d5f509fb515b006a44e
|
[
"MIT"
] | null | null | null |
agent/bin/boot_time.py
|
kemoycampbell/atomic-monitor
|
29d2fcf8d12bd6ac76f71d5f509fb515b006a44e
|
[
"MIT"
] | null | null | null |
agent/bin/boot_time.py
|
kemoycampbell/atomic-monitor
|
29d2fcf8d12bd6ac76f71d5f509fb515b006a44e
|
[
"MIT"
] | 1
|
2020-04-26T19:16:49.000Z
|
2020-04-26T19:16:49.000Z
|
from uptime import boottime
class BootTime:
# get system boot time
def get_boot_time(self):
# datetime of boot
return boottime().strftime('%x %X')
| 19.222222
| 43
| 0.647399
| 23
| 173
| 4.782609
| 0.695652
| 0.145455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.265896
| 173
| 8
| 44
| 21.625
| 0.866142
| 0.213873
| 0
| 0
| 0
| 0
| 0.037594
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
b47a40973471868c3ded6523c3ab1317b47bd99a
| 251
|
py
|
Python
|
openprocurement/tender/twostage/validation.py
|
leits/openprocurement.tender.twostage
|
2cacf77364bf7ebf74fedf6ddabc8ac600b6d73f
|
[
"Apache-2.0"
] | null | null | null |
openprocurement/tender/twostage/validation.py
|
leits/openprocurement.tender.twostage
|
2cacf77364bf7ebf74fedf6ddabc8ac600b6d73f
|
[
"Apache-2.0"
] | 2
|
2021-03-26T00:35:15.000Z
|
2022-03-21T22:21:08.000Z
|
openprocurement/tender/twostage/validation.py
|
leits/openprocurement.tender.twostage
|
2cacf77364bf7ebf74fedf6ddabc8ac600b6d73f
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from openprocurement.api.validation import validate_data
from openprocurement.tender.twostage.models import Qualification
def validate_patch_qualification_data(request):
return validate_data(request, Qualification, True)
| 31.375
| 64
| 0.816733
| 29
| 251
| 6.896552
| 0.655172
| 0.19
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004425
| 0.099602
| 251
| 7
| 65
| 35.857143
| 0.880531
| 0.083665
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
b47a43e0754a3d5a0e29fbc01cf1f1fbd0c1f635
| 237
|
py
|
Python
|
Bot/1_Find/AI/_Value_Investing.py
|
ReedGraff/High-Low
|
c8ba0339d7818e344cacf9a73a83d24dc539c2ca
|
[
"MIT"
] | 1
|
2022-01-06T05:50:53.000Z
|
2022-01-06T05:50:53.000Z
|
Bot/1_Find/AI/_Value_Investing.py
|
ReedGraff/High-Low
|
c8ba0339d7818e344cacf9a73a83d24dc539c2ca
|
[
"MIT"
] | null | null | null |
Bot/1_Find/AI/_Value_Investing.py
|
ReedGraff/High-Low
|
c8ba0339d7818e344cacf9a73a83d24dc539c2ca
|
[
"MIT"
] | null | null | null |
def Value_Investing(self, training_data, testing_data=""):
if testing_data == "":
percent_taken = 30
index = ((100 - percent_taken) / 100) * len(training_data)
testing_data = training_data[index:]
return 0
| 39.5
| 66
| 0.64557
| 29
| 237
| 4.965517
| 0.551724
| 0.25
| 0.263889
| 0.319444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.049724
| 0.236287
| 237
| 6
| 67
| 39.5
| 0.745856
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
81ec2f644ff2098c451506963f7e53c59a0ba80f
| 1,931
|
py
|
Python
|
api_login/models.py
|
archkwon/python-django-restful-mysql
|
a8097c08057de9656cb40266420fcffebb11bdb6
|
[
"MIT"
] | null | null | null |
api_login/models.py
|
archkwon/python-django-restful-mysql
|
a8097c08057de9656cb40266420fcffebb11bdb6
|
[
"MIT"
] | null | null | null |
api_login/models.py
|
archkwon/python-django-restful-mysql
|
a8097c08057de9656cb40266420fcffebb11bdb6
|
[
"MIT"
] | null | null | null |
from django.db import models
class UserVeriCodeModel(models.Model):
uniq_id = models.CharField(primary_key=True, max_length=50, verbose_name='고유아이디')
user_mobile_no = models.CharField(max_length=20, blank=True, null=True, verbose_name='휴대폰번호')
verification_code = models.CharField(max_length=20, blank=True, null=True, verbose_name='SMS인증코드')
cre_date = models.DateTimeField(auto_now_add=True, verbose_name='등록일자')
upt_date = models.DateTimeField(auto_now=True, verbose_name='수정일자')
class Meta:
managed = False
db_table = 'tb_tacar_veri_code'
verbose_name = "SMS인증번호코드"
verbose_name_plural = "SMS인증번호코드"
class NaverCloudLogModel(models.Model):
uniq_id = models.CharField(primary_key=True, max_length=50, verbose_name='고유아이디')
user_mobile_no = models.CharField(max_length=20, blank=True, null=True, verbose_name='휴대폰번호')
api_type = models.CharField(max_length=20, blank=True, null=True, verbose_name='API타입')
response_json = models.TextField(blank=True, null=True, verbose_name='응답Json파일')
cre_date = models.DateTimeField(auto_now_add=True, verbose_name='등록일자')
upt_date = models.DateTimeField(auto_now=True, verbose_name='수정일자')
class Meta:
managed = False
db_table = 'tb_tacar_naver_log'
verbose_name = "네이버클라우드로그정보"
verbose_name_plural = "네이버클라우드로그정보"
class UserPurposeInfoModel(models.Model):
uniq_id = models.CharField(primary_key=True, max_length=50, verbose_name='고유아이디')
user_mobile_no = models.CharField(max_length=20, blank=True, null=True, verbose_name='휴대폰번호')
purpose_code = models.CharField(max_length=10, blank=True, null=True, verbose_name='가입목적')
cre_date = models.DateTimeField(auto_now_add=True, verbose_name='등록일자')
upt_date = models.DateTimeField(auto_now=True, verbose_name='수정일자')
class Meta:
managed = False
db_table = 'tb_tacar_purpose_info'
| 44.906977
| 102
| 0.737442
| 261
| 1,931
| 5.172414
| 0.245211
| 0.162963
| 0.144444
| 0.088148
| 0.780741
| 0.757037
| 0.715556
| 0.715556
| 0.715556
| 0.715556
| 0
| 0.010976
| 0.150699
| 1,931
| 42
| 103
| 45.97619
| 0.812195
| 0
| 0
| 0.545455
| 0
| 0
| 0.090627
| 0.010875
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.030303
| 0
| 0.69697
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c316d440063b325879c1842fe7990c9669b67191
| 86
|
py
|
Python
|
deletefb/exceptions.py
|
mIcHyAmRaNe/DeleteFB
|
e8896b1a2c834b0b0cc9404a20225a138e37b303
|
[
"MIT"
] | 2,905
|
2019-03-30T02:45:34.000Z
|
2022-02-11T23:08:32.000Z
|
deletefb/exceptions.py
|
mIcHyAmRaNe/DeleteFB
|
e8896b1a2c834b0b0cc9404a20225a138e37b303
|
[
"MIT"
] | 131
|
2019-05-20T21:52:05.000Z
|
2022-01-09T11:58:40.000Z
|
deletefb/exceptions.py
|
mIcHyAmRaNe/DeleteFB
|
e8896b1a2c834b0b0cc9404a20225a138e37b303
|
[
"MIT"
] | 249
|
2019-05-20T19:26:56.000Z
|
2022-01-25T02:59:00.000Z
|
class UnknownOSException(Exception):
pass
class ChromeError(Exception):
pass
| 14.333333
| 36
| 0.755814
| 8
| 86
| 8.125
| 0.625
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.174419
| 86
| 5
| 37
| 17.2
| 0.915493
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
5eddd41fc903f23c5ff489f5db219a6d12f5ee1f
| 64
|
py
|
Python
|
google_screener_data_extract/__init__.py
|
spidezad/google_screener_data_extract
|
8efe14e73918808182d8745ef38c38f1ac686f6e
|
[
"BSD-3-Clause"
] | 28
|
2015-09-27T21:11:23.000Z
|
2021-05-17T06:33:20.000Z
|
google_screener_data_extract/__init__.py
|
spidezad/google_screener_data_extract
|
8efe14e73918808182d8745ef38c38f1ac686f6e
|
[
"BSD-3-Clause"
] | 1
|
2015-10-18T23:11:03.000Z
|
2018-03-27T05:58:10.000Z
|
google_screener_data_extract/__init__.py
|
spidezad/google_screener_data_extract
|
8efe14e73918808182d8745ef38c38f1ac686f6e
|
[
"BSD-3-Clause"
] | 24
|
2016-01-14T09:53:48.000Z
|
2018-05-17T02:00:56.000Z
|
from .google_screener_data_extract import GoogleStockDataExtract
| 64
| 64
| 0.9375
| 7
| 64
| 8.142857
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046875
| 64
| 1
| 64
| 64
| 0.934426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5ee658096625597fad01df8c6e2f1a6d8f4daff4
| 60
|
py
|
Python
|
nilearn/input_data/nifti_labels_masker.py
|
ctw/nilearn
|
932eee9c69cd8fbf40ee6af5cee77f8f93b25da3
|
[
"BSD-2-Clause"
] | null | null | null |
nilearn/input_data/nifti_labels_masker.py
|
ctw/nilearn
|
932eee9c69cd8fbf40ee6af5cee77f8f93b25da3
|
[
"BSD-2-Clause"
] | null | null | null |
nilearn/input_data/nifti_labels_masker.py
|
ctw/nilearn
|
932eee9c69cd8fbf40ee6af5cee77f8f93b25da3
|
[
"BSD-2-Clause"
] | null | null | null |
from nilearn.maskers.nifti_labels_masker import * # noqa
| 15
| 57
| 0.783333
| 8
| 60
| 5.625
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 60
| 3
| 58
| 20
| 0.882353
| 0.066667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5ef0bf6299faa97cf0ddbf361c29415874359823
| 146
|
py
|
Python
|
build_gpcr/management/commands/build_prepare_new_structures.py
|
pszgaspar/protwis
|
4989a67175ef3c95047d795c843cf6b9cf4141fa
|
[
"Apache-2.0"
] | 21
|
2016-01-20T09:33:14.000Z
|
2021-12-20T19:19:45.000Z
|
build_gpcr/management/commands/build_prepare_new_structures.py
|
pszgaspar/protwis
|
4989a67175ef3c95047d795c843cf6b9cf4141fa
|
[
"Apache-2.0"
] | 75
|
2016-02-26T16:29:58.000Z
|
2022-03-21T12:35:13.000Z
|
build_gpcr/management/commands/build_prepare_new_structures.py
|
pszgaspar/protwis
|
4989a67175ef3c95047d795c843cf6b9cf4141fa
|
[
"Apache-2.0"
] | 77
|
2016-01-22T08:44:26.000Z
|
2022-02-01T15:54:56.000Z
|
from build.management.commands.build_prepare_new_structures import Command as PrepareNewStructures
class Command(PrepareNewStructures):
pass
| 29.2
| 98
| 0.863014
| 16
| 146
| 7.6875
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09589
| 146
| 5
| 99
| 29.2
| 0.931818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
5efede0d604cc0869f77281508b97a4744c06bd9
| 164
|
py
|
Python
|
test_libs.py
|
AgenttiX/fys2029-project
|
26dc885064721f40db10fad4405f3366f2cfdf4a
|
[
"Apache-2.0"
] | 1
|
2021-05-21T14:39:07.000Z
|
2021-05-21T14:39:07.000Z
|
test_libs.py
|
AgenttiX/fys2029-project
|
26dc885064721f40db10fad4405f3366f2cfdf4a
|
[
"Apache-2.0"
] | 1
|
2021-05-25T12:52:49.000Z
|
2021-05-25T12:52:49.000Z
|
test_libs.py
|
AgenttiX/fys2029-project
|
26dc885064721f40db10fad4405f3366f2cfdf4a
|
[
"Apache-2.0"
] | null | null | null |
# This file is for testing whether the TensorFlow imports work, without having to start a Jupyter server.
import tensorflow as tf
import tensorflow_quantum as tfq
| 32.8
| 105
| 0.817073
| 26
| 164
| 5.115385
| 0.846154
| 0.240602
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164634
| 164
| 4
| 106
| 41
| 0.970803
| 0.628049
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
6f0251f62e82dc53713159cf139162f5e487ee79
| 152
|
py
|
Python
|
online_gp/mlls/__init__.py
|
wjmaddox/online_gp
|
3bff4c347263a9b8b1f0aa801a986f4aaa019a66
|
[
"Apache-2.0"
] | 31
|
2021-03-05T00:51:34.000Z
|
2022-02-07T09:52:20.000Z
|
online_gp/mlls/__init__.py
|
wjmaddox/online_gp
|
3bff4c347263a9b8b1f0aa801a986f4aaa019a66
|
[
"Apache-2.0"
] | 1
|
2021-11-24T07:18:28.000Z
|
2021-11-24T12:07:20.000Z
|
online_gp/mlls/__init__.py
|
wjmaddox/online_gp
|
3bff4c347263a9b8b1f0aa801a986f4aaa019a66
|
[
"Apache-2.0"
] | 1
|
2021-05-19T19:12:36.000Z
|
2021-05-19T19:12:36.000Z
|
from .streaming_added_loss_term import StreamingAddedLossTerm
from .batched_woodbury_marginal_log_likelihood import BatchedWoodburyMarginalLogLikelihood
| 76
| 90
| 0.940789
| 15
| 152
| 9.066667
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046053
| 152
| 2
| 90
| 76
| 0.937931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6f0263f302229b3951f557f5a63bb60359554c37
| 119
|
py
|
Python
|
eoflow/tasks/__init__.py
|
JDESLOIRES/eo-flow
|
def495e9292809656b906cfd6b8e7389ff9cea61
|
[
"MIT"
] | 80
|
2019-09-11T08:53:03.000Z
|
2022-03-29T05:32:02.000Z
|
eoflow/tasks/__init__.py
|
JDESLOIRES/eo-flow
|
def495e9292809656b906cfd6b8e7389ff9cea61
|
[
"MIT"
] | 12
|
2019-10-11T11:00:56.000Z
|
2022-01-31T10:43:40.000Z
|
eoflow/tasks/__init__.py
|
JDESLOIRES/eo-flow
|
def495e9292809656b906cfd6b8e7389ff9cea61
|
[
"MIT"
] | 21
|
2019-09-11T08:12:57.000Z
|
2022-03-07T01:05:05.000Z
|
from .train import TrainTask, TrainAndEvaluateTask
from .predict import PredictTask
from .evaluate import EvaluateTask
| 29.75
| 50
| 0.857143
| 13
| 119
| 7.846154
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109244
| 119
| 3
| 51
| 39.666667
| 0.962264
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6f0cfca7dad80bd1a90a6e3ae4594d1afd447046
| 3,071
|
py
|
Python
|
src/test/stock_data_analysis_module/indicators/test_rate_of_change.py
|
Freitacr/ML-StockAnalysisProject
|
37411c1204ecf69040ba2a1658013e4bf71eef9d
|
[
"MIT"
] | null | null | null |
src/test/stock_data_analysis_module/indicators/test_rate_of_change.py
|
Freitacr/ML-StockAnalysisProject
|
37411c1204ecf69040ba2a1658013e4bf71eef9d
|
[
"MIT"
] | 6
|
2018-01-05T16:42:09.000Z
|
2021-03-18T00:20:18.000Z
|
src/test/stock_data_analysis_module/indicators/test_rate_of_change.py
|
Freitacr/ML-StockAnalysisProject
|
37411c1204ecf69040ba2a1658013e4bf71eef9d
|
[
"MIT"
] | 1
|
2021-03-21T04:49:51.000Z
|
2021-03-21T04:49:51.000Z
|
import unittest
import numpy as np
from stock_data_analysis_module.indicators import rate_of_change
class RateOfChangeTestCase(unittest.TestCase):
def __init__(self, *args):
super().__init__(*args)
self._empty_sequence = []
self._empty_ndarray = np.zeros((0,))
self._example_a = [1, 1, 2, 3, 4, 5]
self._example_b = [1, -1, 1, -1, 1, -1]
self._a_2_result = [100, 200, 100, (2/3) * 100]
self._b_2_result = [0, 0, 0, 0]
self._a_3_result = [200, 300, 150]
def test_invalid_length(self):
with self.assertRaises(ValueError):
rate_of_change.rate_of_change(self._empty_ndarray, 10)
with self.assertRaises(ValueError):
rate_of_change.rate_of_change(self._empty_sequence, 10)
def test_standard_example(self):
result = rate_of_change.rate_of_change(self._example_a, period=2)
if len(result) != len(self._a_2_result):
self.fail("Unexpected results from rate of change calculation with "
"the sequence %s.\nExpected: %s\nActual: %s" %
(str(self._example_a), str(self._a_2_result), str(result)))
for i in range(len(result)):
if result[i] != self._a_2_result[i]:
self.fail("Unexpected results from rate of change calculation with "
"the sequence %s.\nExpected: %s\nActual: %s" %
(str(self._example_a), str(self._a_2_result), str(result)))
def test_standard_example_2(self):
result = rate_of_change.rate_of_change(self._example_b, period=2)
if len(result) != len(self._b_2_result):
self.fail("Unexpected results from rate of change calculation with "
"the sequence %s.\nExpected: %s\nActual: %s" %
(str(self._example_b), str(self._b_2_result), str(result)))
for i in range(len(result)):
if result[i] != self._b_2_result[i]:
self.fail("Unexpected results from rate of change calculation with "
"the sequence %s.\nExpected: %s\nActual: %s" %
(str(self._example_b), str(self._b_2_result), str(result)))
def test_standard_example_3(self):
result = rate_of_change.rate_of_change(self._example_a, period=3)
if len(result) != len(self._a_3_result):
self.fail("Unexpected results from rate of change calculation with "
"the sequence %s.\nExpected: %s\nActual: %s" %
(str(self._example_a), str(self._a_3_result), str(result)))
for i in range(len(result)):
if result[i] != self._a_3_result[i]:
self.fail("Unexpected results from rate of change calculation with "
"the sequence %s.\nExpected: %s\nActual: %s" %
(str(self._example_a), str(self._a_3_result), str(result)))
if __name__ == '__main__':
unittest.main()
| 47.246154
| 86
| 0.583198
| 404
| 3,071
| 4.121287
| 0.163366
| 0.061261
| 0.122523
| 0.09009
| 0.770571
| 0.766967
| 0.754955
| 0.728529
| 0.702102
| 0.702102
| 0
| 0.029712
| 0.2986
| 3,071
| 64
| 87
| 47.984375
| 0.743268
| 0
| 0
| 0.433962
| 0
| 0
| 0.198204
| 0
| 0
| 0
| 0
| 0
| 0.037736
| 1
| 0.09434
| false
| 0
| 0.056604
| 0
| 0.169811
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6f245077d2c2559a5a0020b6347c2e2bf1a57062
| 3,860
|
py
|
Python
|
QCPU_Setup/DWave-library/dist-packages/dwave_networkx2/algorithms/max_cut.py
|
cogrpar/qcpuWARE
|
9b8233e830f8cfacbef787781b2279e42f26fec5
|
[
"Apache-2.0"
] | 1
|
2022-02-01T14:40:05.000Z
|
2022-02-01T14:40:05.000Z
|
QCPU_Setup/DWave-library/dist-packages/dwave_networkx2/algorithms/max_cut.py
|
cogrpar/qcpuWARE
|
9b8233e830f8cfacbef787781b2279e42f26fec5
|
[
"Apache-2.0"
] | null | null | null |
QCPU_Setup/DWave-library/dist-packages/dwave_networkx2/algorithms/max_cut.py
|
cogrpar/qcpuWARE
|
9b8233e830f8cfacbef787781b2279e42f26fec5
|
[
"Apache-2.0"
] | 1
|
2022-02-01T14:40:31.000Z
|
2022-02-01T14:40:31.000Z
|
from dwave_networkx.exceptions import DWaveNetworkXException
from dwave_networkx.utils import binary_quadratic_model_sampler
__all__ = ["maximum_cut", "weighted_maximum_cut"]
@binary_quadratic_model_sampler(1)
def maximum_cut(G, sampler=None, **sampler_args):
"""Returns an approximate maximum cut.
Defines an Ising problem with ground states corresponding to
a maximum cut and uses the sampler to sample from it.
A maximum cut is a subset S of the vertices of G such that
the number of edges between S and the complementary subset
is as large as possible.
Parameters
----------
G : NetworkX graph
sampler
A binary quadratic model sampler. A sampler is a process that
samples from low energy states in models defined by an Ising
equation or a Quadratic Unconstrained Binary Optimization
Problem (QUBO). A sampler is expected to have a 'sample_qubo'
and 'sample_ising' method. A sampler is expected to return an
iterable of samples, in order of increasing energy. If no
sampler is provided, one must be provided using the
`set_default_sampler` function.
sampler_args
Additional keyword parameters are passed to the sampler.
Returns
-------
S : set
A maximum cut of G.
Notes
-----
Samplers by their nature may not return the optimal solution. This
function does not attempt to confirm the quality of the returned
sample.
"""
# In order to form the Ising problem, we want to increase the
# energy by 1 for each edge between two nodes of the same color.
# The linear biases can all be 0.
h = {v: 0. for v in G}
J = {(u, v): 1 for u, v in G.edges}
# draw the lowest energy sample from the sampler
response = sampler.sample_ising(h, J, **sampler_args)
sample = next(iter(response))
return set(v for v in G if sample[v] >= 0)
def weighted_maximum_cut(G, sampler=None, **sampler_args):
"""Returns an approximate weighted maximum cut.
Defines an Ising problem with ground states corresponding to
a weighted maximum cut and uses the sampler to sample from it.
A weighted maximum cut is a subset S of the vertices of G that
maximizes the sum of the edge weights between S and its
complementary subset.
Parameters
----------
G : NetworkX graph
Each edge in G should have a numeric 'weight' attribute.
sampler
A binary quadratic model sampler. A sampler is a process that
samples from low energy states in models defined by an Ising
equation or a Quadratic Unconstrained Binary Optimization
Problem (QUBO). A sampler is expected to have a 'sample_qubo'
and 'sample_ising' method. A sampler is expected to return an
iterable of samples, in order of increasing energy. If no
sampler is provided, one must be provided using the
`set_default_sampler` function.
sampler_args
Additional keyword parameters are passed to the sampler.
Returns
-------
S : set
A maximum cut of G.
Notes
-----
Samplers by their nature may not return the optimal solution. This
function does not attempt to confirm the quality of the returned
sample.
"""
# In order to form the Ising problem, we want to increase the
# energy by 1 for each edge between two nodes of the same color.
# The linear biases can all be 0.
h = {v: 0. for v in G}
try:
J = {(u, v): G[u][v]['weight'] for u, v in G.edges}
except KeyError:
raise DWaveNetworkXException("edges must have 'weight' attribute")
# draw the lowest energy sample from the sampler
response = sampler.sample_ising(h, J, **sampler_args)
sample = next(iter(response))
return set(v for v in G if sample[v] >= 0)
| 33.859649
| 74
| 0.681606
| 576
| 3,860
| 4.508681
| 0.237847
| 0.046207
| 0.023104
| 0.041586
| 0.795533
| 0.795533
| 0.785522
| 0.785522
| 0.785522
| 0.785522
| 0
| 0.003493
| 0.25829
| 3,860
| 113
| 75
| 34.159292
| 0.903598
| 0.696114
| 0
| 0.421053
| 0
| 0
| 0.077596
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| false
| 0
| 0.105263
| 0
| 0.315789
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6f63117da26d35a7dfe144611363181514a5ece3
| 222
|
py
|
Python
|
leetcode/google/tagged/medium/validate_binary_search_tree_test.py
|
alvinctk/google-tech-dev-guide
|
9d7759bea1f44673c2de4f25a94b27368928a59f
|
[
"Apache-2.0"
] | 26
|
2019-06-07T05:29:47.000Z
|
2022-03-19T15:32:27.000Z
|
leetcode/google/tagged/medium/validate_binary_search_tree_test.py
|
alvinctk/google-tech-dev-guide
|
9d7759bea1f44673c2de4f25a94b27368928a59f
|
[
"Apache-2.0"
] | null | null | null |
leetcode/google/tagged/medium/validate_binary_search_tree_test.py
|
alvinctk/google-tech-dev-guide
|
9d7759bea1f44673c2de4f25a94b27368928a59f
|
[
"Apache-2.0"
] | 6
|
2019-10-10T06:39:28.000Z
|
2020-05-12T19:50:55.000Z
|
[2,1,3]
[5,1,4,null,null,3,6]
[2,1,4,null,null,3,6]
[2,1,4,null,null,8,6]
[]
[1, 1, 1]
[0, 1]
[0, 1, 3]
[10,5,15,null,null,6,20]
[10,5,15,3,11,12,20]
[3,null,30,10,null,null,15,null,45]
[3,null,30,10,null,null,15,null,19]
| 17.076923
| 35
| 0.576577
| 64
| 222
| 2
| 0.265625
| 0.375
| 0.140625
| 0.234375
| 0.640625
| 0.640625
| 0.640625
| 0.640625
| 0.28125
| 0.28125
| 0
| 0.312195
| 0.076577
| 222
| 12
| 36
| 18.5
| 0.312195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
489c8b7fc77b584ea69c38066aa27e09cc4e0b15
| 68
|
py
|
Python
|
lib/python/abcutils/__init__.py
|
gcodebackups/helgemathee-alembic-softimage
|
fb8c5f09f35ea899a272f9cab0dd2f887317c043
|
[
"RSA-MD"
] | null | null | null |
lib/python/abcutils/__init__.py
|
gcodebackups/helgemathee-alembic-softimage
|
fb8c5f09f35ea899a272f9cab0dd2f887317c043
|
[
"RSA-MD"
] | null | null | null |
lib/python/abcutils/__init__.py
|
gcodebackups/helgemathee-alembic-softimage
|
fb8c5f09f35ea899a272f9cab0dd2f887317c043
|
[
"RSA-MD"
] | 1
|
2015-11-24T18:58:38.000Z
|
2015-11-24T18:58:38.000Z
|
from Path import Path
from CMakeCache import CMakeCache, CacheEntry
| 22.666667
| 45
| 0.852941
| 9
| 68
| 6.444444
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132353
| 68
| 2
| 46
| 34
| 0.983051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
48a1f9a8e738e97b2bdd80fe016f7aab4096c1ca
| 209
|
py
|
Python
|
200Python/demo/01basic-hello/02basic/dict_set.py
|
lyliyongblue/JavaCoder
|
a04a350ec675a3a8b15c99da5cc89397dbbc97ef
|
[
"Apache-2.0"
] | null | null | null |
200Python/demo/01basic-hello/02basic/dict_set.py
|
lyliyongblue/JavaCoder
|
a04a350ec675a3a8b15c99da5cc89397dbbc97ef
|
[
"Apache-2.0"
] | null | null | null |
200Python/demo/01basic-hello/02basic/dict_set.py
|
lyliyongblue/JavaCoder
|
a04a350ec675a3a8b15c99da5cc89397dbbc97ef
|
[
"Apache-2.0"
] | null | null | null |
scores = {'AA': 10, 'BB': 20, "CC": 30}
print("AA score:", scores['AA'])
print("Before BB score:", scores['BB'])
scores['BB'] = 100
print("After BB score:", scores["BB"])
age = {1, 2, 3}
print(age)
| 20.9
| 40
| 0.545455
| 33
| 209
| 3.454545
| 0.484848
| 0.289474
| 0.22807
| 0.263158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070588
| 0.186603
| 209
| 9
| 41
| 23.222222
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0.27
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.571429
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
48ad3be834fd184aced1bedc40472fe60a92d916
| 435
|
py
|
Python
|
server-src/directMessages.py
|
Artingl/Fluffy
|
e51ca77651a67ea6206dcbfa0a3436c032f3a3ed
|
[
"Apache-2.0"
] | null | null | null |
server-src/directMessages.py
|
Artingl/Fluffy
|
e51ca77651a67ea6206dcbfa0a3436c032f3a3ed
|
[
"Apache-2.0"
] | null | null | null |
server-src/directMessages.py
|
Artingl/Fluffy
|
e51ca77651a67ea6206dcbfa0a3436c032f3a3ed
|
[
"Apache-2.0"
] | null | null | null |
import datetime
import sqlalchemy
import db
class directMessages(db.SqlAlchemyBase):
__tablename__ = 'directMessages'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True, autoincrement=True)
users = sqlalchemy.Column(sqlalchemy.String, index=True, nullable=True)
content = sqlalchemy.Column(sqlalchemy.String, default='{}')
info = sqlalchemy.Column(sqlalchemy.String, default='{"state":{}, "title":""}')
| 33.461538
| 84
| 0.744828
| 45
| 435
| 7.088889
| 0.533333
| 0.200627
| 0.326019
| 0.30094
| 0.244514
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121839
| 435
| 12
| 85
| 36.25
| 0.835079
| 0
| 0
| 0
| 0
| 0
| 0.091954
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d2942132aaddb326056ef36f3ab9a2deb335c799
| 118
|
py
|
Python
|
apps/app_a.py
|
nelsoncardenas/cerate_many_docker_images
|
14b57f5fd12c993d0c8776545e88973cf297eef2
|
[
"MIT"
] | null | null | null |
apps/app_a.py
|
nelsoncardenas/cerate_many_docker_images
|
14b57f5fd12c993d0c8776545e88973cf297eef2
|
[
"MIT"
] | null | null | null |
apps/app_a.py
|
nelsoncardenas/cerate_many_docker_images
|
14b57f5fd12c993d0c8776545e88973cf297eef2
|
[
"MIT"
] | null | null | null |
import numpy
import pandas
import pyarrow
import click
if __name__ == "__main__":
print("This is app_a running!")
| 16.857143
| 35
| 0.745763
| 17
| 118
| 4.647059
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169492
| 118
| 7
| 35
| 16.857143
| 0.806122
| 0
| 0
| 0
| 0
| 0
| 0.252101
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.166667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d2ce1af5632f270ac28413fd4bc05870c9ad09a9
| 150
|
py
|
Python
|
tests/test_app.py
|
PXMYH/doctor
|
fac85ea02a96d986c69b3ac27a7444c10287cff4
|
[
"MIT"
] | null | null | null |
tests/test_app.py
|
PXMYH/doctor
|
fac85ea02a96d986c69b3ac27a7444c10287cff4
|
[
"MIT"
] | null | null | null |
tests/test_app.py
|
PXMYH/doctor
|
fac85ea02a96d986c69b3ac27a7444c10287cff4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pytest
@pytest.mark.skip(reason="function not ready yet, todo")
def test_version():
pass
| 15
| 56
| 0.666667
| 22
| 150
| 4.5
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015873
| 0.16
| 150
| 9
| 57
| 16.666667
| 0.769841
| 0.286667
| 0
| 0
| 0
| 0
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
d2e54c138c93710f1556d10b67ab7919a25b7c00
| 156
|
py
|
Python
|
__main__.py
|
aldenso/pulumi_docker_demo
|
4bfd5d374c62b927e0fff0ea39695c7efa9469c5
|
[
"MIT"
] | null | null | null |
__main__.py
|
aldenso/pulumi_docker_demo
|
4bfd5d374c62b927e0fff0ea39695c7efa9469c5
|
[
"MIT"
] | null | null | null |
__main__.py
|
aldenso/pulumi_docker_demo
|
4bfd5d374c62b927e0fff0ea39695c7efa9469c5
|
[
"MIT"
] | null | null | null |
import pulumi
import infra
pulumi.export('image', infra.my_image)
pulumi.export('container', infra.container)
pulumi.export('dockerfile', infra.dockerfile)
| 26
| 45
| 0.801282
| 20
| 156
| 6.2
| 0.4
| 0.290323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064103
| 156
| 6
| 45
| 26
| 0.849315
| 0
| 0
| 0
| 0
| 0
| 0.152866
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
96218ff962d029dbf39837f84299562348e8bbcf
| 87
|
py
|
Python
|
app/hello/__init__.py
|
washiz99/python-flask-hello
|
9d602d53c85c38fd1f5fb191630df096f9cef88d
|
[
"MIT"
] | null | null | null |
app/hello/__init__.py
|
washiz99/python-flask-hello
|
9d602d53c85c38fd1f5fb191630df096f9cef88d
|
[
"MIT"
] | null | null | null |
app/hello/__init__.py
|
washiz99/python-flask-hello
|
9d602d53c85c38fd1f5fb191630df096f9cef88d
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
hello = Blueprint('hello', __name__)
from . import views
| 14.5
| 36
| 0.758621
| 11
| 87
| 5.636364
| 0.636364
| 0.451613
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16092
| 87
| 5
| 37
| 17.4
| 0.849315
| 0
| 0
| 0
| 0
| 0
| 0.057471
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
96247f0ee1665b2fa401a2b05798d31a0f4c6819
| 38
|
py
|
Python
|
tests/components/stream/__init__.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 30,023
|
2016-04-13T10:17:53.000Z
|
2020-03-02T12:56:31.000Z
|
tests/components/stream/__init__.py
|
jagadeeshvenkatesh/core
|
1bd982668449815fee2105478569f8e4b5670add
|
[
"Apache-2.0"
] | 31,101
|
2020-03-02T13:00:16.000Z
|
2022-03-31T23:57:36.000Z
|
tests/components/stream/__init__.py
|
jagadeeshvenkatesh/core
|
1bd982668449815fee2105478569f8e4b5670add
|
[
"Apache-2.0"
] | 11,956
|
2016-04-13T18:42:31.000Z
|
2020-03-02T09:32:12.000Z
|
"""The tests for stream platforms."""
| 19
| 37
| 0.684211
| 5
| 38
| 5.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 38
| 1
| 38
| 38
| 0.787879
| 0.815789
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
824a86a0ac80d0b8829c8f880dc614ee1f1a703a
| 536
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowEnvFan/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxe/tests/ShowEnvFan/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxe/tests/ShowEnvFan/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
'switch': {
"1": {
'fan': {
"1": {
'state': 'ok'
},
"2": {
'state': 'ok'
},
"3": {
'state': 'ok'
}
},
'power_supply': {
"1": {
'state': 'not present'
},
"2": {
'state': 'ok'
}
}
}
}
}
| 20.615385
| 42
| 0.158582
| 23
| 536
| 3.608696
| 0.565217
| 0.337349
| 0.192771
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 0.697761
| 536
| 25
| 43
| 21.44
| 0.475309
| 0
| 0
| 0.36
| 0
| 0
| 0.132463
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
826374d576e75c8a49672752bd228090e03e0a7c
| 88
|
py
|
Python
|
dev_server/context_api.py
|
PlatformOfTrust/code-samples-validator
|
75fa24d93ccafaa51f7e1c0ebae447ac2bf933e0
|
[
"MIT"
] | null | null | null |
dev_server/context_api.py
|
PlatformOfTrust/code-samples-validator
|
75fa24d93ccafaa51f7e1c0ebae447ac2bf933e0
|
[
"MIT"
] | null | null | null |
dev_server/context_api.py
|
PlatformOfTrust/code-samples-validator
|
75fa24d93ccafaa51f7e1c0ebae447ac2bf933e0
|
[
"MIT"
] | 1
|
2020-04-28T09:54:33.000Z
|
2020-04-28T09:54:33.000Z
|
import bottle
app = bottle.Bottle()
@app.get('/')
def list_contexts():
return {}
| 9.777778
| 21
| 0.625
| 11
| 88
| 4.909091
| 0.727273
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.193182
| 88
| 8
| 22
| 11
| 0.760563
| 0
| 0
| 0
| 0
| 0
| 0.011364
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
8272d16d8ed4370478973c544cdfd623819a5ec8
| 93
|
py
|
Python
|
homebrain/core/__init__.py
|
ErikBjare/Homebrain
|
7e4dcc9d0e5f5ef6bde3d2cf31639527166ab124
|
[
"MIT"
] | 1
|
2015-12-03T18:42:54.000Z
|
2015-12-03T18:42:54.000Z
|
homebrain/core/__init__.py
|
ErikBjare/Homebrain
|
7e4dcc9d0e5f5ef6bde3d2cf31639527166ab124
|
[
"MIT"
] | 14
|
2015-12-02T22:21:12.000Z
|
2019-11-06T10:26:08.000Z
|
homebrain/core/__init__.py
|
ErikBjare/Homebrain
|
7e4dcc9d0e5f5ef6bde3d2cf31639527166ab124
|
[
"MIT"
] | null | null | null |
from .events import Event
from .agents import Agent, PausableAgent
from . import decorators
| 18.6
| 40
| 0.806452
| 12
| 93
| 6.25
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150538
| 93
| 4
| 41
| 23.25
| 0.949367
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8279c612be50050b8d8dea972b566d6f41a28535
| 334
|
py
|
Python
|
lib/__init__.py
|
JohnEskimSmith/export-elasticmq
|
dadb6e9ac01d9e7593702d6b1b780d6c140cd3d3
|
[
"MIT"
] | null | null | null |
lib/__init__.py
|
JohnEskimSmith/export-elasticmq
|
dadb6e9ac01d9e7593702d6b1b780d6c140cd3d3
|
[
"MIT"
] | null | null | null |
lib/__init__.py
|
JohnEskimSmith/export-elasticmq
|
dadb6e9ac01d9e7593702d6b1b780d6c140cd3d3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = "SAI"
__license__ = "GPLv3"
__email__ = "andrew.foma@gmail.com"
__status__ = "Dev"
from .upload_settings import *
from .upload_records import *
from .upload_utils import *
from .upload_sqs import *
from .upload_parse_multi_records import *
from .upload_files_utils import *
| 23.857143
| 41
| 0.745509
| 45
| 334
| 4.977778
| 0.622222
| 0.267857
| 0.357143
| 0.205357
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006897
| 0.131737
| 334
| 13
| 42
| 25.692308
| 0.765517
| 0.125749
| 0
| 0
| 0
| 0
| 0.110345
| 0.072414
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.6
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
82e7a11b4e3671736342f1de0fd9b639f7b78e3b
| 15,043
|
py
|
Python
|
applications/ParticleMechanicsApplication/tests/test_generate_mpm_particle.py
|
lkusch/Kratos
|
e8072d8e24ab6f312765185b19d439f01ab7b27b
|
[
"BSD-4-Clause"
] | null | null | null |
applications/ParticleMechanicsApplication/tests/test_generate_mpm_particle.py
|
lkusch/Kratos
|
e8072d8e24ab6f312765185b19d439f01ab7b27b
|
[
"BSD-4-Clause"
] | null | null | null |
applications/ParticleMechanicsApplication/tests/test_generate_mpm_particle.py
|
lkusch/Kratos
|
e8072d8e24ab6f312765185b19d439f01ab7b27b
|
[
"BSD-4-Clause"
] | null | null | null |
from __future__ import print_function, absolute_import, division
import KratosMultiphysics
import KratosMultiphysics.ParticleMechanicsApplication as KratosParticle
import KratosMultiphysics.KratosUnittest as KratosUnittest
class TestGenerateMPMParticle(KratosUnittest.TestCase):
def _generate_particle_element_and_check(self, current_model, dimension, geometry_element, num_particle, expected_num_particle):
KratosMultiphysics.Logger.GetDefaultOutput().SetSeverity(KratosMultiphysics.Logger.Severity.WARNING)
# Initialize model part
## Material model part definition
material_point_model_part = current_model.CreateModelPart("dummy_name")
material_point_model_part.ProcessInfo.SetValue(KratosMultiphysics.DOMAIN_SIZE, dimension)
## Initial material model part definition
initial_mesh_model_part = current_model.CreateModelPart("Initial_dummy_name")
initial_mesh_model_part.ProcessInfo.SetValue(KratosMultiphysics.DOMAIN_SIZE, dimension)
## Grid model part definition
grid_model_part = current_model.CreateModelPart("Background_Grid")
grid_model_part.ProcessInfo.SetValue(KratosMultiphysics.DOMAIN_SIZE, dimension)
# Create element and nodes for background grids
sub_background = grid_model_part.CreateSubModelPart("test_background")
self._create_nodes(sub_background, dimension, geometry_element)
self._create_elements(sub_background,dimension, geometry_element)
# Create element and nodes for initial meshes
sub_mp = initial_mesh_model_part.CreateSubModelPart("test")
sub_mp.GetProperties()[1].SetValue(KratosParticle.PARTICLES_PER_ELEMENT, num_particle)
self._create_nodes(sub_mp, dimension, geometry_element)
self._create_elements(sub_mp,dimension, geometry_element)
# Generate MP Elements
KratosParticle.GenerateMaterialPointElement(grid_model_part, initial_mesh_model_part, material_point_model_part, False)
# Check total number of element
particle_counter = material_point_model_part.NumberOfElements()
self.assertEqual(expected_num_particle,particle_counter)
def _generate_particle_element_and_check_mp_volume(self, current_model, dimension, geometry_element, num_particle, expected_mp_volume):
KratosMultiphysics.Logger.GetDefaultOutput().SetSeverity(KratosMultiphysics.Logger.Severity.WARNING)
# Initialize model part
## Material model part definition
material_point_model_part = current_model.CreateModelPart("dummy_name")
material_point_model_part.ProcessInfo.SetValue(KratosMultiphysics.DOMAIN_SIZE, dimension)
## Initial material model part definition
initial_mesh_model_part = current_model.CreateModelPart("Initial_dummy_name")
initial_mesh_model_part.ProcessInfo.SetValue(KratosMultiphysics.DOMAIN_SIZE, dimension)
## Grid model part definition
grid_model_part = current_model.CreateModelPart("Background_Grid")
grid_model_part.ProcessInfo.SetValue(KratosMultiphysics.DOMAIN_SIZE, dimension)
# Create element and nodes for background grids
sub_background = grid_model_part.CreateSubModelPart("test_background")
self._create_nodes(sub_background, dimension, geometry_element)
self._create_elements(sub_background,dimension, geometry_element)
# Create element and nodes for initial meshes
sub_mp = initial_mesh_model_part.CreateSubModelPart("test")
sub_mp.GetProperties()[1].SetValue(KratosParticle.PARTICLES_PER_ELEMENT, num_particle)
self._create_nodes(sub_mp, dimension, geometry_element)
self._create_elements(sub_mp,dimension, geometry_element)
# Generate MP Elements
KratosParticle.GenerateMaterialPointElement(grid_model_part, initial_mesh_model_part, material_point_model_part, False)
# Check volume of first material point
for mp in material_point_model_part.Elements:
mp_volume = mp.CalculateOnIntegrationPoints(KratosParticle.MP_VOLUME, grid_model_part.ProcessInfo)[0]
self.assertEqual(expected_mp_volume,mp_volume)
break
def _create_nodes(self, initial_mp, dimension, geometry_element):
if geometry_element == "Triangle":
initial_mp.CreateNewNode(1, 0.0, 0.0, 0.0)
initial_mp.CreateNewNode(2, 1.0, 0.0, 0.0)
initial_mp.CreateNewNode(3, 0.0, 1.0, 0.0)
if (dimension == 3):
initial_mp.CreateNewNode(4, 0.0, 0.0, 1.0)
elif geometry_element == "TriangleSkew":
initial_mp.CreateNewNode(1, 0.0, 0.0, 0.0)
initial_mp.CreateNewNode(2, 2.0, 0.0, 0.0)
initial_mp.CreateNewNode(3, 0.0, 1.0, 0.0)
if (dimension == 3):
initial_mp.CreateNewNode(4, 0.0, 0.0, 1.0)
elif geometry_element == "Quadrilateral":
initial_mp.CreateNewNode(1, -0.5, -0.5, 0.0)
initial_mp.CreateNewNode(2, 0.5, -0.5, 0.0)
initial_mp.CreateNewNode(3, 0.5, 0.5, 0.0)
initial_mp.CreateNewNode(4, -0.5, 0.5, 0.0)
if (dimension == 3):
initial_mp.CreateNewNode(5, -0.5, -0.5, 1.0)
initial_mp.CreateNewNode(6, 0.5, -0.5, 1.0)
initial_mp.CreateNewNode(7, 0.5, 0.5, 1.0)
initial_mp.CreateNewNode(8, -0.5, 0.5, 1.0)
elif geometry_element == "QuadrilateralSkew":
initial_mp.CreateNewNode(1, -0.5, -0.5, 0.0)
initial_mp.CreateNewNode(2, 1.5, -0.5, 0.0)
initial_mp.CreateNewNode(3, 0.5, 0.5, 0.0)
initial_mp.CreateNewNode(4, -0.5, 0.5, 0.0)
if (dimension == 3):
initial_mp.CreateNewNode(5, -0.5, -0.5, 1.0)
initial_mp.CreateNewNode(6, 0.5, -0.5, 1.0)
initial_mp.CreateNewNode(7, 0.5, 0.5, 1.0)
initial_mp.CreateNewNode(8, -0.5, 0.5, 1.0)
def _create_elements(self, initial_mp, dimension, geometry_element):
if geometry_element == "Triangle" or geometry_element == "TriangleSkew":
if (dimension == 2):
initial_mp.CreateNewElement("Element2D3N", 1, [1,2,3], initial_mp.GetProperties()[1])
if (dimension == 3):
initial_mp.CreateNewElement("Element3D4N", 1, [1,2,3,4], initial_mp.GetProperties()[1])
elif geometry_element == "Quadrilateral" or geometry_element == "QuadrilateralSkew":
if (dimension == 2):
initial_mp.CreateNewElement("Element2D4N", 1, [1,2,3,4], initial_mp.GetProperties()[1])
if (dimension == 3):
initial_mp.CreateNewElement("Element3D8N", 1, [1,2,3,4,5,6,7,8], initial_mp.GetProperties()[1])
KratosMultiphysics.VariableUtils().SetFlag(KratosMultiphysics.ACTIVE, True, initial_mp.Elements)
def test_GenerateMPMParticleTriangle2D1P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Triangle", num_particle=1, expected_num_particle=1)
def test_GenerateMPMParticleTriangle2D3P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Triangle", num_particle=3, expected_num_particle=3)
def test_GenerateMPMParticleTriangle2D6P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Triangle", num_particle=6, expected_num_particle=6)
def test_GenerateMPMParticleTriangle2D12P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Triangle", num_particle=12, expected_num_particle=12)
def test_GenerateMPMParticleTriangle2D16P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Triangle", num_particle=16, expected_num_particle=16)
def test_GenerateMPMParticleTriangle2D33P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Triangle", num_particle=33, expected_num_particle=33)
def test_GenerateMPMParticleTriangle2DDefault(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Triangle", num_particle=50, expected_num_particle=3)
def test_GenerateMPMParticleTriangle3D1P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Triangle", num_particle=1, expected_num_particle=1)
def test_GenerateMPMParticleTriangle3D4P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Triangle", num_particle=3, expected_num_particle=4)
def test_GenerateMPMParticleTriangle3D14P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Triangle", num_particle=6, expected_num_particle=14)
def test_GenerateMPMParticleTriangle3D24P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Triangle", num_particle=12, expected_num_particle=24)
def test_GenerateMPMParticleTriangle3DDefault(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Triangle", num_particle=50, expected_num_particle=4)
def test_GenerateMPMParticleQuadrilateral2D1P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Quadrilateral", num_particle=1, expected_num_particle=1)
def test_GenerateMPMParticleQuadrilateral2D4P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Quadrilateral", num_particle=4, expected_num_particle=4)
def test_GenerateMPMParticleQuadrilateral2D9P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Quadrilateral", num_particle=9, expected_num_particle=9)
def test_GenerateMPMParticleQuadrilateral2D16P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Quadrilateral", num_particle=16, expected_num_particle=16)
def test_GenerateMPMParticleQuadrilateral2DDefault(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Quadrilateral", num_particle=50, expected_num_particle=4)
def test_GenerateMPMParticleQuadrilateral3D1P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Quadrilateral", num_particle=1, expected_num_particle=1)
def test_GenerateMPMParticleQuadrilateral3D8P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Quadrilateral", num_particle=4, expected_num_particle=8)
def test_GenerateMPMParticleQuadrilateral3D27P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Quadrilateral", num_particle=9, expected_num_particle=27)
def test_GenerateMPMParticleQuadrilateral3D64P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Quadrilateral", num_particle=16, expected_num_particle=64)
def test_GenerateMPMParticleQuadrilateral3DDefault(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Quadrilateral", num_particle=50, expected_num_particle=8)
# Tests for the correct computation of material point volume in the material point generator
def test_GenerateMPMParticleQuadrilateral2DSkew(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check_mp_volume(current_model, dimension=2, geometry_element="QuadrilateralSkew", num_particle=4, expected_mp_volume=0.44716878364870316)
def test_GenerateMPMParticleQuadrilateral3DSkew(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check_mp_volume(current_model, dimension=3, geometry_element="QuadrilateralSkew", num_particle=4, expected_mp_volume=0.20275105849101815)
def test_GenerateMPMParticleTriangle2DSkew(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check_mp_volume(current_model, dimension=2, geometry_element="TriangleSkew", num_particle=3, expected_mp_volume=0.3333333333333333)
def test_GenerateMPMParticleTriangle3DSkew(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check_mp_volume(current_model, dimension=3, geometry_element="TriangleSkew", num_particle=3, expected_mp_volume=0.08333333333333333)
def test_GenerateMPMParticleQuadrilateral2DNotSkew(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check_mp_volume(current_model, dimension=2, geometry_element="Quadrilateral", num_particle=4, expected_mp_volume=0.25)
def test_GenerateMPMParticleQuadrilateral3DNotSkew(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check_mp_volume(current_model, dimension=3, geometry_element="Quadrilateral", num_particle=4, expected_mp_volume=0.12499999999999993)
def test_GenerateMPMParticleTriangle2DNotSkew(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check_mp_volume(current_model, dimension=2, geometry_element="Triangle", num_particle=3, expected_mp_volume=0.16666666666666666)
def test_GenerateMPMParticleTriangle3DNotSkew(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check_mp_volume(current_model, dimension=3, geometry_element="Triangle", num_particle=3, expected_mp_volume=0.041666666666666664)
if __name__ == '__main__':
KratosUnittest.main()
| 59.932271
| 181
| 0.754969
| 1,714
| 15,043
| 6.274796
| 0.089848
| 0.075872
| 0.068433
| 0.077359
| 0.781311
| 0.77908
| 0.765783
| 0.761971
| 0.761971
| 0.715574
| 0
| 0.039484
| 0.159875
| 15,043
| 250
| 182
| 60.172
| 0.811521
| 0.041016
| 0
| 0.488889
| 0
| 0
| 0.0418
| 0
| 0
| 0
| 0
| 0
| 0.011111
| 1
| 0.188889
| false
| 0
| 0.022222
| 0
| 0.216667
| 0.005556
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7d84ba6e8daa7f46d81022d38abe49ae28dad7b3
| 73
|
py
|
Python
|
app/jwt_helpers/__init__.py
|
docusign/eg-03-python-auth-code-grant
|
e92913e25f753fb6b52fc3da6bc4b76c49c75b37
|
[
"MIT"
] | 7
|
2019-05-09T05:17:35.000Z
|
2020-05-06T14:27:51.000Z
|
app/jwt_helpers/__init__.py
|
docusign/eg-03-python-auth-code-grant
|
e92913e25f753fb6b52fc3da6bc4b76c49c75b37
|
[
"MIT"
] | 1
|
2019-06-25T23:06:34.000Z
|
2019-06-25T23:06:34.000Z
|
app/jwt_helpers/__init__.py
|
docusign/eg-03-python-auth-code-grant
|
e92913e25f753fb6b52fc3da6bc4b76c49c75b37
|
[
"MIT"
] | 8
|
2019-06-21T23:57:48.000Z
|
2020-02-11T18:58:34.000Z
|
from .jwt_helper import create_api_client, get_jwt_token, get_private_key
| 73
| 73
| 0.890411
| 13
| 73
| 4.461538
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068493
| 73
| 1
| 73
| 73
| 0.852941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7d8a18703ea07662fcb7b8d49335b94c1ff36ed7
| 570
|
py
|
Python
|
tests/test_sexp.py
|
gitoleg/bap-ida-python
|
f1cdd95578c331f1f3fba2150c2e2d134b8897f0
|
[
"MIT"
] | 81
|
2016-06-10T19:07:12.000Z
|
2022-03-23T08:15:41.000Z
|
tests/test_sexp.py
|
gitoleg/bap-ida-python
|
f1cdd95578c331f1f3fba2150c2e2d134b8897f0
|
[
"MIT"
] | 22
|
2016-06-16T19:35:59.000Z
|
2020-12-10T14:53:38.000Z
|
tests/test_sexp.py
|
gitoleg/bap-ida-python
|
f1cdd95578c331f1f3fba2150c2e2d134b8897f0
|
[
"MIT"
] | 29
|
2016-06-10T18:26:04.000Z
|
2022-02-14T06:15:30.000Z
|
from bap.utils.sexp import parse
def test_parse():
assert parse('()') == []
assert parse('hello') == 'hello'
assert parse('"hello world"') == '"hello world"'
assert parse('(hello world)') == ['hello', 'world']
assert parse('(() () ())') == [[], [], []]
assert parse("hi'") == "hi'"
assert parse('hello"') == 'hello"'
assert parse('(hello\" cruel world\")') == ['hello"', 'cruel', 'world"']
assert parse('(a (b c) c (d (e f) g) h') == [
'a',
['b', 'c'],
'c',
['d', ['e', 'f'], 'g'],
'h'
]
| 28.5
| 76
| 0.440351
| 65
| 570
| 3.846154
| 0.323077
| 0.396
| 0.32
| 0.176
| 0.596
| 0.596
| 0.596
| 0.364
| 0.072
| 0
| 0
| 0
| 0.270175
| 570
| 19
| 77
| 30
| 0.600962
| 0
| 0
| 0
| 0
| 0
| 0.284211
| 0
| 0
| 0
| 0
| 0
| 0.529412
| 1
| 0.058824
| true
| 0
| 0.058824
| 0
| 0.117647
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7da69b07b172a6afb3ffe8fcb055417415d0f60a
| 121
|
py
|
Python
|
tests/test_nothing.py
|
thundergolfer/PyGrobid
|
04e55f5f0e11537f367e281718a519d225ce2f70
|
[
"Apache-2.0"
] | 1
|
2021-09-11T21:29:57.000Z
|
2021-09-11T21:29:57.000Z
|
tests/test_nothing.py
|
thundergolfer-old/PyGrobid
|
04e55f5f0e11537f367e281718a519d225ce2f70
|
[
"Apache-2.0"
] | null | null | null |
tests/test_nothing.py
|
thundergolfer-old/PyGrobid
|
04e55f5f0e11537f367e281718a519d225ce2f70
|
[
"Apache-2.0"
] | null | null | null |
# This test is here so we don't get a non-zero code from Pytest in Travis CI build.
def test_dummy():
assert 5 == 5
| 24.2
| 83
| 0.68595
| 25
| 121
| 3.28
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021739
| 0.239669
| 121
| 4
| 84
| 30.25
| 0.869565
| 0.669421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7db30e571f8c55b8142793cce2416c315c6d4319
| 6,977
|
py
|
Python
|
predict_live.py
|
hulsmeier/best_voxelnet_ever
|
aeefd32711a5c986c6099d53c5a2efdf9e01ea48
|
[
"MIT"
] | null | null | null |
predict_live.py
|
hulsmeier/best_voxelnet_ever
|
aeefd32711a5c986c6099d53c5a2efdf9e01ea48
|
[
"MIT"
] | null | null | null |
predict_live.py
|
hulsmeier/best_voxelnet_ever
|
aeefd32711a5c986c6099d53c5a2efdf9e01ea48
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding:UTF-8 -*-
import glob
import argparse
import os
import time
import tensorflow.compat.v1 as tf
import socket
import struct
import win32pipe, win32file, pywintypes
from config import cfg
from model import RPN3D
from utils import *
from utils.kitti_loader import iterate_data, sample_test_data
parser = argparse.ArgumentParser(description='testing')
parser.add_argument('-d', '--decrease', type=bool, nargs='?', default=False,
help='set the flag to True if decrease model')
parser.add_argument('-m', '--minimize', type=bool, nargs='?', default=False,
help='set the flag to True if minimize model')
args = parser.parse_args()
res_dir = os.path.join('.', './predictions')
save_model_dir = os.path.join('.', 'save_model', 'default')
os.makedirs(res_dir, exist_ok=True)
os.makedirs(os.path.join(res_dir, 'data'), exist_ok=True)
serverIPAddress = '127.0.0.1'
serverPortNumber = 44444
def main(_):
with tf.Graph().as_default():
gpu_options = tf.GPUOptions(
per_process_gpu_memory_fraction=cfg.GPU_MEMORY_FRACTION,
visible_device_list=cfg.GPU_AVAILABLE,
allow_growth=True
)
config = tf.ConfigProto(
gpu_options=gpu_options,
device_count={"GPU": cfg.GPU_USE_COUNT,},
allow_soft_placement=True,
)
with tf.Session(config=config) as sess:
model = RPN3D(
cls=cfg.DETECT_OBJ,
decrease=args.decrease,
minimize=args.minimize,
single_batch_size=1,
avail_gpus=cfg.GPU_AVAILABLE.split(',')
)
# param init/restore
if tf.train.get_checkpoint_state(save_model_dir):
print("Reading model parameters from %s" % save_model_dir)
model.saver.restore(sess, tf.train.latest_checkpoint(save_model_dir))
f = open(r'\\.\pipe\LidarData', 'r+b', 0)
while True:
n = struct.unpack('I', f.read(4))[0]
data = f.read(n)
f.seek(0)
f_lidar = data
batch = sample_data_live(f_lidar)
results = model.predict_step_live(sess, batch)
# # #for result in zip(results):
# # # labels = box3d_to_label([result[:, 1:8]], [result[:, 0]], [result[:, -1]], coordinate='lidar')[0]
# # # print('write out {} objects'.format(len(labels)))
print('write out {} objects'.format(len(results)))
if __name__ == '__main__':
tf.app.run(main)
##!/usr/bin/env python
## -*- coding:UTF-8 -*-
#import glob
#import argparse
#import os
#import time
#import tensorflow as tf
#import socket
#import struct
#from config import cfg
#from model import RPN3D
#from utils import *
#from utils.kitti_loader import iterate_data, sample_test_data
#parser = argparse.ArgumentParser(description='testing')
#parser.add_argument('-d', '--decrease', type=bool, nargs='?', default=False,
# help='set the flag to True if decrease model')
#parser.add_argument('-m', '--minimize', type=bool, nargs='?', default=False,
# help='set the flag to True if minimize model')
#args = parser.parse_args()
#res_dir = os.path.join('.', './predictions')
#save_model_dir = os.path.join('.', 'save_model', 'default')
#os.makedirs(res_dir, exist_ok=True)
#os.makedirs(os.path.join(res_dir, 'data'), exist_ok=True)
#serverIPAddress = '127.0.0.1'
#serverPortNumber = 44444
#def main(_):
# with tf.Graph().as_default():
# gpu_options = tf.GPUOptions(
# per_process_gpu_memory_fraction=cfg.GPU_MEMORY_FRACTION,
# visible_device_list=cfg.GPU_AVAILABLE,
# allow_growth=True
# )
# config = tf.ConfigProto(
# gpu_options=gpu_options,
# device_count={"GPU": cfg.GPU_USE_COUNT,},
# allow_soft_placement=True,
# )
# with tf.Session(config=config) as sess:
# model = RPN3D(
# cls=cfg.DETECT_OBJ,
# decrease=args.decrease,
# minimize=args.minimize,
# single_batch_size=1,
# avail_gpus=cfg.GPU_AVAILABLE.split(',')
# )
# # param init/restore
# if tf.train.get_checkpoint_state(save_model_dir):
# print("Reading model parameters from %s" % save_model_dir)
# model.saver.restore(sess, tf.train.latest_checkpoint(save_model_dir))
# with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as clientSocket:
# connectedToServer = False
# while connectedToServer == False:
# try:
# clientSocket.connect((serverIPAddress, serverPortNumber))
# connectedToServer = True
# except Exception as e:
# print("Waiting for server...")
# while True:
# print("Waiting for data...")
# numberOfBytesReceived = 0
# messageSizeReceived = False
# currentMessageSize = 0
# bytesFromClient = b''
# newBytesFromClient = clientSocket.recv(1000024)
# print(len(newBytesFromClient))
# while len(newBytesFromClient) > 0:
# numberOfBytesReceived += len(newBytesFromClient)
# bytesFromClient += newBytesFromClient
# if(bytesFromClient == b''):
# break
# if messageSizeReceived == False:
# if numberOfBytesReceived >= 4:
# currentMessageSize = struct.unpack('i', bytesFromClient[:4])[0]
# messageSizeReceived = True
# else:
# continue
# if numberOfBytesReceived >= currentMessageSize + 4:
# f_lidar = bytesFromClient[:currentMessageSize]
# batch = sample_data_live(f_lidar)
# results = model.predict_step_live(sess, batch)
# #for result in zip(results):
# # labels = box3d_to_label([result[:, 1:8]], [result[:, 0]], [result[:, -1]], coordinate='lidar')[0]
# # print('write out {} objects'.format(len(labels)))
# print('write out {} objects'.format(len(results)))
# break
# newBytesFromClient = clientSocket.recv(1000024)
#if __name__ == '__main__':
# tf.app.run(main)
| 31.286996
| 132
| 0.54565
| 713
| 6,977
| 5.154278
| 0.246844
| 0.02449
| 0.026122
| 0.021769
| 0.75483
| 0.75483
| 0.739592
| 0.727619
| 0.727619
| 0.727619
| 0
| 0.015588
| 0.337968
| 6,977
| 223
| 133
| 31.286996
| 0.780039
| 0.612584
| 0
| 0
| 0
| 0
| 0.092379
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017241
| false
| 0
| 0.206897
| 0
| 0.224138
| 0.034483
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7dcbce74b688ffe7ed28feae607be9ea474e56c7
| 4,441
|
py
|
Python
|
tests/test_management.py
|
fsecada01/django-unused-media
|
ee177edb359e641e010671977fc336880a0a3862
|
[
"MIT"
] | 2
|
2021-12-02T11:41:02.000Z
|
2021-12-27T12:01:53.000Z
|
venv/Lib/site-packages/tests/test_management.py
|
serenasensini/TheRedCode_Docker-per-Django-e-Postgres
|
78a2ca1f09ab956a6936d14a5fd99336ff39f472
|
[
"BSD-3-Clause"
] | null | null | null |
venv/Lib/site-packages/tests/test_management.py
|
serenasensini/TheRedCode_Docker-per-Django-e-Postgres
|
78a2ca1f09ab956a6936d14a5fd99336ff39f472
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import mock
import six
from preggy import expect
from django.core.management import call_command
from .base import BaseTestCase
class TestManagementCommand(BaseTestCase):
def test_command_call(self):
expect(call_command('cleanup_unused_media', interactive=False)).Not.to_be_an_error()
def test_command_nothing_to_delete(self):
stdout = six.StringIO()
call_command('cleanup_unused_media', interactive=False, stdout=stdout)
expect(stdout.getvalue().split('\n'))\
.to_include(u'Nothing to delete. Exit')
def test_command_not_interactive(self):
self._media_create('file.txt')
stdout = six.StringIO()
call_command('cleanup_unused_media', interactive=False, stdout=stdout)
expect(stdout.getvalue().split('\n'))\
.to_include(u'Remove {}'.format(self._media_abs_path(u'file.txt')))\
.to_include(u'Done. Total files removed: 1')
expect(self._media_exists('file.txt')).to_be_false()
@mock.patch('six.moves.input', return_value='n')
def test_command_interactive_n(self, mock_input):
self._media_create(u'file.txt')
stdout = six.StringIO()
call_command('cleanup_unused_media', interactive=True, stdout=stdout)
expect(stdout.getvalue().split('\n'))\
.to_include(u'Interrupted by user. Exit.')
expect(self._media_exists(u'file.txt')).to_be_true()
@mock.patch('six.moves.input', return_value='Y')
def test_command_interactive_y(self, mock_input):
self._media_create(u'file.txt')
stdout = six.StringIO()
call_command('cleanup_unused_media', interactive=True, stdout=stdout)
expect(stdout.getvalue().split('\n')) \
.to_include(u'Remove {}'.format(self._media_abs_path(u'file.txt'))) \
.to_include(u'Done. Total files removed: 1')
expect(self._media_exists(u'file.txt')).to_be_false()
@mock.patch('six.moves.input', return_value='Y')
def test_command_interactive_y_with_ascii(self, mock_input):
self._media_create(u'Тест.txt')
expected_string = u'Remove {}'.format(self._media_abs_path(u'Тест.txt'))
if six.PY2:
expected_string = expected_string.encode('utf-8')
stdout = six.StringIO()
call_command('cleanup_unused_media', interactive=True, stdout=stdout)
expect(stdout.getvalue().split('\n')) \
.to_include(expected_string) \
.to_include(u'Done. Total files removed: 1')
expect(self._media_exists(u'Тест.txt')).to_be_false()
@mock.patch('django_unused_media.management.commands.cleanup_unused_media.remove_empty_dirs')
def test_command_do_not_remove_dirs(self, mock_remove_empty_dirs):
self._media_create(u'sub1/sub2/sub3/notused.txt')
call_command('cleanup_unused_media', interactive=False)
mock_remove_empty_dirs.assert_not_called()
@mock.patch('django_unused_media.management.commands.cleanup_unused_media.remove_empty_dirs')
def test_command_remove_dirs(self, mock_remove_empty_dirs):
self._media_create(u'sub1/sub2/sub3/notused.txt')
call_command('cleanup_unused_media', interactive=False, remove_empty_dirs=True)
mock_remove_empty_dirs.assert_called_once()
def test_command_dry_run(self):
self._media_create('file.txt')
stdout = six.StringIO()
call_command('cleanup_unused_media', interactive=False, dry_run=True, stdout=stdout)
expect(stdout.getvalue().split('\n')) \
.to_include(self._media_abs_path(u'file.txt')) \
.to_include(u'Total files will be removed: 1') \
.to_include(u'Dry run. Exit.')
expect(self._media_exists('file.txt')).to_be_true()
@mock.patch('six.moves.input', return_value='Y')
def test_command_interactive_y_verbosity_0(self, mock_input):
self._media_create(u'file.txt')
stdout = six.StringIO()
call_command('cleanup_unused_media', interactive=True, stdout=stdout, verbosity=0)
expect(stdout.getvalue().split('\n')) \
.Not.to_include(u'Files to remove:') \
.Not.to_include(self._media_abs_path(u'file.txt')) \
.Not.to_include(u'Remove {}'.format(self._media_abs_path(u'file.txt'))) \
.to_include(u'Done. Total files removed: 1')
expect(self._media_exists(u'file.txt')).to_be_false()
| 39.300885
| 97
| 0.677775
| 594
| 4,441
| 4.754209
| 0.148148
| 0.063739
| 0.076487
| 0.084986
| 0.793201
| 0.763456
| 0.759207
| 0.733003
| 0.714943
| 0.695467
| 0
| 0.004425
| 0.185769
| 4,441
| 112
| 98
| 39.651786
| 0.776549
| 0.004729
| 0
| 0.4875
| 0
| 0
| 0.201901
| 0.04708
| 0
| 0
| 0
| 0
| 0.025
| 1
| 0.125
| false
| 0
| 0.0625
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
814525ab201eae8cf04fea2013e429ce025ca6dc
| 55
|
py
|
Python
|
prefetch/__init__.py
|
ValHayot/rollingprefetch
|
dacb8b5e741461c268d960ace46e22a926a4339f
|
[
"MIT"
] | 1
|
2021-11-15T22:06:27.000Z
|
2021-11-15T22:06:27.000Z
|
prefetch/__init__.py
|
ValHayot/rollingprefetch
|
dacb8b5e741461c268d960ace46e22a926a4339f
|
[
"MIT"
] | 1
|
2021-07-28T21:47:24.000Z
|
2021-08-24T02:52:36.000Z
|
prefetch/__init__.py
|
ValHayot/rollingprefetch
|
dacb8b5e741461c268d960ace46e22a926a4339f
|
[
"MIT"
] | null | null | null |
from .core import S3PrefetchFileSystem, S3PrefetchFile
| 27.5
| 54
| 0.872727
| 5
| 55
| 9.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04
| 0.090909
| 55
| 1
| 55
| 55
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.