hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d1c6f7adc9880608e1bfc2ca1228d58adcc82681
| 26,827
|
py
|
Python
|
util/analysis/goal_optimization.py
|
joelouismarino/variational_rl
|
11dc14bfb56f3ebbfccd5de206b78712a8039a9a
|
[
"MIT"
] | 15
|
2020-10-20T22:09:36.000Z
|
2021-12-24T13:40:36.000Z
|
util/analysis/goal_optimization.py
|
joelouismarino/variational_rl
|
11dc14bfb56f3ebbfccd5de206b78712a8039a9a
|
[
"MIT"
] | null | null | null |
util/analysis/goal_optimization.py
|
joelouismarino/variational_rl
|
11dc14bfb56f3ebbfccd5de206b78712a8039a9a
|
[
"MIT"
] | 1
|
2020-10-23T19:48:06.000Z
|
2020-10-23T19:48:06.000Z
|
import comet_ml
import json
import torch
import copy
import pickle
import numpy as np
from torch import optim
from torch.distributions import Normal
from lib import create_agent
from lib.layers import FullyConnectedLayer
from lib.value_estimators import GoalBasedQEstimator
from lib.inference import GradientBasedInference, IterativeInferenceModel, CEMInference, DirectGoalInferenceModel
from util.env_util import create_env, SynchronousEnv
from util.plot_util import load_checkpoint
from local_vars import PROJECT_NAME, WORKSPACE, LOADING_API_KEY, LOGGING_API_KEY
from misc import divide_gradients_by_value
N_TOTAL_STEPS = 250
GOAL_INTERVAL = 20
GOAL_STD = 1e-2
RENDER = False
BATCH_SIZE = 256
GOAL_FLIP_PROB = 0.05
ENCODING_TYPE = 'grads'
TRAJECTORY_FOLLOW = False
# iterative optimizer exp: b1c6214082ed4e74af70c4215e275406
# model-based exp: 3bf28c960c224b989d26fa7c1237dd5e
def goal_optimization(model_exp_key, opt_exp_key=None, write_results=True):
"""
Optimize random goal states using a model-based estimator.
Note: tailored to HalfCheetah-v2 environment currently.
Args:
model_exp_key (str): model-based experiment key
opt_exp_key (str): optimizer experiment key
write_results (bool): whether to pickle results directly
"""
# load the experiment
comet_api = comet_ml.API(api_key=LOADING_API_KEY)
experiment = comet_api.get_experiment(project_name=PROJECT_NAME,
workspace=WORKSPACE,
experiment=model_exp_key)
# create the environment
param_summary = experiment.get_parameters_summary()
env_name = [a for a in param_summary if a['name'] == 'env'][0]['valueCurrent']
env = create_env(env_name)
# create the agent
asset_list = experiment.get_asset_list()
agent_config_asset_list = [a for a in asset_list if 'agent_args' in a['fileName']]
agent_args = None
if len(agent_config_asset_list) > 0:
# if we've saved the agent config dict, load it
agent_args = experiment.get_asset(agent_config_asset_list[0]['assetId'])
agent_args = json.loads(agent_args)
agent_args = agent_args if 'opt_type' in agent_args['inference_optimizer_args'] else None
agent = create_agent(env, agent_args=agent_args)[0]
# also, load the most recent episode to sample goal states
asset_times = [asset['createdAt'] for asset in asset_list if 'state' in asset['fileName']]
state_asset = [a for a in asset_list if a['createdAt'] == max(asset_times)][0]
episode_states = json.loads(experiment.get_asset(state_asset['assetId']))
# load the checkpoint
load_checkpoint(agent, model_exp_key)
# load the optimizer
if opt_exp_key is not None:
# load the experiment
comet_api = comet_ml.API(api_key=LOADING_API_KEY)
opt_experiment = comet_api.get_experiment(project_name=PROJECT_NAME,
workspace=WORKSPACE,
experiment=opt_exp_key)
# create the agent
asset_list = opt_experiment.get_asset_list()
agent_config_asset_list = [a for a in asset_list if 'agent_args' in a['fileName']]
agent_args = None
if len(agent_config_asset_list) > 0:
# if we've saved the agent config dict, load it
agent_args = opt_experiment.get_asset(agent_config_asset_list[0]['assetId'])
agent_args = json.loads(agent_args)
agent_args = agent_args if 'opt_type' in agent_args['inference_optimizer_args'] else None
opt_agent = create_agent(env, agent_args=agent_args)[0]
# load the checkpoint
load_checkpoint(opt_agent, opt_exp_key)
agent.inference_optimizer = opt_agent.inference_optimizer
agent.inference_optimizer.n_inf_iters = 20
else:
# create a gradient-based optimizer
agent.inference_optimizer = GradientBasedInference(lr=1e-3, n_inf_iters=50)
# swap out the value estimator for goal-based estimator
gb_estimator = GoalBasedQEstimator()
# copy over the dynamics model
gb_estimator.state_likelihood_model = agent.q_value_estimator.state_likelihood_model
gb_estimator.state_variable = agent.q_value_estimator.state_variable
# set the estimator
agent.q_value_estimator = gb_estimator
agent.q_value_estimator.set_goal_std(GOAL_STD)
# agent.alphas['pi'] = 0.
# optimize goal states
goal_states = []
traj_states = []
env_states = {'qpos': [], 'qvel': []}
actions = []
inf_objectives = []
agent.reset(); agent.eval()
state = env.reset()
if RENDER:
env.render()
goal_state = None
# goal_ind = 0
print('Collecting goal-optimization episode...')
for step_ind in range(N_TOTAL_STEPS):
print('STEP: ' + str(step_ind))
if step_ind % GOAL_INTERVAL == 0:
goal_state = episode_states[np.random.randint(0, 25)]
# goal_state = episode_states[goal_ind]
goal_state = torch.from_numpy(np.array(goal_state)).float().view(1,-1)
goal_state[:, 8:] *= 0.
if not TRAJECTORY_FOLLOW:
agent.q_value_estimator.set_goal_state(goal_state)
# goal_ind += 1
if TRAJECTORY_FOLLOW:
# define a sub-goal between current state and goal state
delta_state = goal_state - state
traj_state = state + 0.1 * delta_state
agent.q_value_estimator.set_goal_state(traj_state)
traj_states.append(traj_state)
else:
traj_states.append(goal_states)
goal_states.append(goal_state)
env_states['qpos'].append(copy.deepcopy(env.sim.data.qpos))
env_states['qvel'].append(copy.deepcopy(env.sim.data.qvel))
action = agent.act(state, eval=True)
state, _, _, _ = env.step(action)
inf_objectives.append(agent.inference_optimizer.estimated_objectives)
# import ipdb; ipdb.set_trace()
agent.inference_optimizer.reset(1)
if RENDER:
env.render()
actions.append(action)
print('Done.')
# save the results
results = {'goal_states': goal_states,
'traj_states': traj_states,
'env_states': env_states,
'actions': actions}
if write_results:
pickle.dump(results, open('goal_opt_' + model_exp_key + '.p', 'wb'))
return results
def goal_optimization_training(model_exp_key, opt_exp_key=None,
write_results=True, stochastic_model=False,
train_model=False):
"""
Optimize random goal states using a model-based estimator.
Train the policy optimizer online.
Note: tailored to HalfCheetah-v2 environment currently.
Args:
model_exp_key (str): model-based experiment key
opt_exp_key (str): optimizer experiment key. If None, trains from scratch
write_results (bool): whether to pickle results directly
stochastic_model (bool): whether to sample states or use mean estimate
train_model (bool) whether to train the model online
"""
# load the experiment
comet_api = comet_ml.API(api_key=LOADING_API_KEY)
experiment = comet_api.get_experiment(project_name=PROJECT_NAME,
workspace=WORKSPACE,
experiment=model_exp_key)
# create the environment
param_summary = experiment.get_parameters_summary()
env_name = [a for a in param_summary if a['name'] == 'env'][0]['valueCurrent']
env = create_env(env_name)
# create a synchronous env to parallelize training
sync_env = SynchronousEnv(env, BATCH_SIZE)
# create the agent
asset_list = experiment.get_asset_list()
agent_config_asset_list = [a for a in asset_list if 'agent_args' in a['fileName']]
agent_args = None
if len(agent_config_asset_list) > 0:
# if we've saved the agent config dict, load it
agent_args = experiment.get_asset(agent_config_asset_list[0]['assetId'])
agent_args = json.loads(agent_args)
agent_args = agent_args if 'opt_type' in agent_args['inference_optimizer_args'] else None
agent = create_agent(env, agent_args=agent_args)[0]
# also, load the most recent episode to sample goal states
asset_times = [asset['createdAt'] for asset in asset_list if 'state' in asset['fileName']]
state_asset = [a for a in asset_list if a['createdAt'] == max(asset_times)][0]
episode_states = json.loads(experiment.get_asset(state_asset['assetId']))
# load the checkpoint
load_checkpoint(agent, model_exp_key)
if stochastic_model:
agent.q_value_estimator.state_variable.cond_likelihood.stochastic = True
# load the optimizer
if opt_exp_key is not None:
# load the experiment
comet_api = comet_ml.API(api_key=LOADING_API_KEY)
opt_experiment = comet_api.get_experiment(project_name=PROJECT_NAME,
workspace=WORKSPACE,
experiment=opt_exp_key)
# create the agent
asset_list = opt_experiment.get_asset_list()
agent_config_asset_list = [a for a in asset_list if 'agent_args' in a['fileName']]
agent_args = None
if len(agent_config_asset_list) > 0:
# if we've saved the agent config dict, load it
agent_args = opt_experiment.get_asset(agent_config_asset_list[0]['assetId'])
agent_args = json.loads(agent_args)
agent_args = agent_args if 'opt_type' in agent_args['inference_optimizer_args'] else None
opt_agent = create_agent(env, agent_args=agent_args)[0]
# load the checkpoint
load_checkpoint(opt_agent, opt_exp_key)
agent.inference_optimizer = opt_agent.inference_optimizer
agent.inference_optimizer.n_inf_iters = 10
else:
# create an iterative amortized optimizer
n_input = 12
if ENCODING_TYPE == 'grads':
inputs = ['params', 'grads']
n_input += 12
elif ENCODING_TYPE == 'errors':
inputs = ['params', 'errors']
n_input += (17 + 17 + 6)
n_units = 512
# network_args = {'type': 'fully_connected',
# 'n_layers': 2,
# 'inputs': inputs,
# 'n_units': n_units,
# 'connectivity': 'highway',
# 'batch_norm': False,
# 'non_linearity': 'elu',
# 'dropout': None,
# 'separate_networks': False,
# 'n_input': n_input}
network_args = {'type': 'recurrent',
'n_layers': 2,
'inputs': inputs,
'n_units': n_units,
'connectivity': 'highway',
'batch_norm': False,
'dropout': None,
'separate_networks': False,
'n_input': n_input}
agent.inference_optimizer = IterativeInferenceModel(network_args=network_args, n_inf_iters=5, encoding_type=ENCODING_TYPE)
for m in agent.approx_post.models:
agent.approx_post.models[m] = FullyConnectedLayer(n_units, 6)
agent.approx_post.gates[m] = FullyConnectedLayer(n_units, 6, non_linearity='sigmoid')
# create a parameter optimizer for the inference model
inference_parameters = [_ for _ in agent.inference_optimizer.parameters()] + [_ for _ in agent.approx_post.parameters()]
param_opt = optim.Adam(inference_parameters, lr=3e-4)
# swap out the value estimator for goal-based estimator
gb_estimator = GoalBasedQEstimator()
# copy over the dynamics model
gb_estimator.state_likelihood_model = agent.q_value_estimator.state_likelihood_model
gb_estimator.state_variable = agent.q_value_estimator.state_variable
# set the estimator
agent.q_value_estimator = gb_estimator
agent.q_value_estimator.set_goal_std(GOAL_STD)
# agent.alphas['pi'] = 0.
model_param_opt = None
if train_model:
# create a parameter optimizer for the inference model
model_parameters = [_ for _ in agent.q_value_estimator.state_likelihood_model.parameters()] + [_ for _ in agent.q_value_estimator.state_variable.parameters()]
model_param_opt = optim.Adam(model_parameters, lr=3e-4)
# optimize goal states
goal_states = []
traj_states = []
env_states = {'qpos': [], 'qvel': []}
actions = []
inf_objectives = []
state_log_likelihoods = []
state_squared_errors = []
state_locs = []
state_scales = []
model_cll_training = []
agent.reset(batch_size=BATCH_SIZE); agent.eval()
state = sync_env.reset()
if RENDER:
env.render()
goal_state = None
state_likelihood = None
# goal_ind = 0
print('Collecting goal-optimization episode...')
for step_ind in range(N_TOTAL_STEPS):
print('STEP: ' + str(step_ind))
# if step_ind % GOAL_INTERVAL == 0:
if True:
new_goal_states = np.stack([episode_states[np.random.randint(0, 25)] for _ in range(BATCH_SIZE)])
# goal_state = episode_states[goal_ind]
new_goal_states = torch.from_numpy(new_goal_states).float().view(BATCH_SIZE,-1)
new_goal_states[:, 8:] *= 0.
if step_ind == 0:
goal_state = new_goal_states
else:
# randomly change the goal state with some small probability
flips = (torch.rand(BATCH_SIZE, 1) < GOAL_FLIP_PROB).float().repeat(1, new_goal_states.shape[-1])
goal_state = (1 - flips) * goal_state + flips * new_goal_states
if not TRAJECTORY_FOLLOW:
agent.q_value_estimator.set_goal_state(goal_state)
# goal_ind += 1
if TRAJECTORY_FOLLOW:
# define a sub-goal between current state and goal state
delta_state = goal_state - state
traj_state = state + 0.1 * delta_state
agent.q_value_estimator.set_goal_state(traj_state)
traj_states.append(traj_state)
else:
traj_states.append(goal_states)
goal_states.append(goal_state)
qpos = np.stack([copy.deepcopy(e.sim.data.qpos) for e in sync_env.envs])
qvel = np.stack([copy.deepcopy(e.sim.data.qvel) for e in sync_env.envs])
env_states['qpos'].append(qpos)
env_states['qvel'].append(qvel)
action = agent.act(state, eval=True)
state, _, _, _ = sync_env.step(action)
inf_objectives.append(agent.inference_optimizer.estimated_objectives)
if train_model:
agent.q_value_estimator.generate(agent)
cll = - agent.q_value_estimator.state_variable.cond_log_likelihood(state).view(-1, 1).mean()
model_cll_training.append(cll.detach().item())
cll.backward()
model_param_opt.step()
if state_likelihood is not None:
state_ll = state_likelihood.log_prob(state)
state_log_likelihoods.append(state_ll)
state_squared_error = (state_likelihood.loc - state).pow(2)
state_squared_errors.append(state_squared_error)
state_loc = agent.collector.distributions['state']['cond_like']['loc'][-1]
state_scale = agent.collector.distributions['state']['cond_like']['scale'][-1]
state_locs.append(state_loc)
state_scales.append(state_scale)
state_likelihood = Normal(state_loc, state_scale)
# update the inference optimizer
grads = [param.grad for param in inference_parameters]
divide_gradients_by_value(grads, agent.inference_optimizer.n_inf_iters)
divide_gradients_by_value(grads, BATCH_SIZE)
param_opt.step()
param_opt.zero_grad()
agent.inference_optimizer.reset(BATCH_SIZE)
if RENDER:
env.render()
actions.append(action)
print('Done.')
# save the results
results = {'goal_states': goal_states,
'traj_states': traj_states,
'env_states': env_states,
'actions': actions,
'inf_objectives': inf_objectives,
'state_locs': state_locs,
'state_scales': state_scales,
'state_log_likelihoods': state_log_likelihoods,
'state_squared_errors': state_squared_errors,
'model_cll_training': model_cll_training}
if write_results:
pickle.dump(results, open('goal_opt_' + model_exp_key + '.p', 'wb'))
return results
def collect_goal_optimization(agent, env, goals, inf_optim=None):
"""
Subroutine for goal optimization.
Args:
agent (Agent):
env (synchronous gym.Env)
goals (list of torch.Tensor)
inf_optim (optimizer, optional): optimizer for amortized inference model
"""
env_states = {'qpos': [], 'qvel': []}
actions = []
inf_objectives = []
state_log_likelihoods = []
state_squared_errors = []
state_locs = []
state_scales = []
model_cll_training = []
inference_parameters = []
if inf_optim is not None:
inference_parameters = [_ for _ in agent.inference_optimizer.parameters()] + [_ for _ in agent.approx_post.parameters()]
agent.reset(batch_size=BATCH_SIZE); agent.eval()
state = env.reset()
state_likelihood = None
print(' Collecting goal-optimization episode...')
for step_ind in range(N_TOTAL_STEPS):
if step_ind % 20 == 0:
print(' STEP: ' + str(step_ind))
# set the goal
agent.q_value_estimator.set_goal_state(goals[step_ind])
qpos = np.stack([copy.deepcopy(e.sim.data.qpos) for e in env.envs])
qvel = np.stack([copy.deepcopy(e.sim.data.qvel) for e in env.envs])
env_states['qpos'].append(qpos)
env_states['qvel'].append(qvel)
# interact, step environment
action = agent.act(state, eval=True)
state, _, _, _ = env.step(action)
inf_objectives.append(agent.inference_optimizer.estimated_objectives)
if state_likelihood is not None:
state_ll = state_likelihood.log_prob(state)
state_log_likelihoods.append(state_ll)
state_squared_error = (state_likelihood.loc - state).pow(2)
state_squared_errors.append(state_squared_error)
state_loc = agent.collector.distributions['state']['cond_like']['loc'][-1]
state_scale = agent.collector.distributions['state']['cond_like']['scale'][-1]
state_locs.append(state_loc)
state_scales.append(state_scale)
state_likelihood = Normal(state_loc, state_scale)
# update the inference optimizer
if inf_optim is not None:
# get the final optimizer objective
on_policy_action = agent.approx_post.sample(agent.n_action_samples)
obj = agent.estimate_objective(state, on_policy_action)
obj = obj.view(agent.n_action_samples, -1, 1).mean(dim=0)
obj = - obj * agent.batch_size
obj.mean(dim=0).sum().backward()
# update
grads = [param.grad for param in inference_parameters]
divide_gradients_by_value(grads, agent.inference_optimizer.n_inf_iters)
divide_gradients_by_value(grads, BATCH_SIZE)
inf_optim.step()
inf_optim.zero_grad()
agent.inference_optimizer.reset(BATCH_SIZE)
actions.append(action)
print(' Done.')
results_dict = {'env_states': env_states,
'actions': actions,
'inf_objectives': inf_objectives,
'state_log_likelihoods': state_log_likelihoods,
'state_squared_errors': state_squared_errors,
'state_locs': state_locs,
'state_scales': state_scales,
'model_cll_training': model_cll_training,
'goal_states': goals}
return results_dict
def compare_goal_optimizers(model_exp_key, opt_exp_key=None,
write_results=True, stochastic_model=False):
"""
Optimize random goal states using a model-based estimator.
Train the policy optimizer online. Compare with other optimizers.
Note: tailored to HalfCheetah-v2 environment currently.
Args:
model_exp_key (str): model-based experiment key
opt_exp_key (str): optimizer experiment key. If None, trains from scratch
write_results (bool): whether to pickle results directly
stochastic_model (bool): whether to sample states or use mean estimate
train_model (bool) whether to train the model online
"""
## MODEL
# load the model experiment
comet_api = comet_ml.API(api_key=LOADING_API_KEY)
experiment = comet_api.get_experiment(project_name=PROJECT_NAME,
workspace=WORKSPACE,
experiment=model_exp_key)
# create the environment
param_summary = experiment.get_parameters_summary()
env_name = [a for a in param_summary if a['name'] == 'env'][0]['valueCurrent']
env = create_env(env_name)
# create a synchronous env to parallelize training
sync_env = SynchronousEnv(env, BATCH_SIZE)
# create the agent
asset_list = experiment.get_asset_list()
agent_config_asset_list = [a for a in asset_list if 'agent_args' in a['fileName']]
agent_args = None
if len(agent_config_asset_list) > 0:
# if we've saved the agent config dict, load it
agent_args = experiment.get_asset(agent_config_asset_list[0]['assetId'])
agent_args = json.loads(agent_args)
agent_args = agent_args if 'opt_type' in agent_args['inference_optimizer_args'] else None
agent = create_agent(env, agent_args=agent_args)[0]
# also, load the most recent episode to sample goal states
asset_times = [asset['createdAt'] for asset in asset_list if 'state' in asset['fileName']]
state_asset = [a for a in asset_list if a['createdAt'] == max(asset_times)][0]
episode_states = json.loads(experiment.get_asset(state_asset['assetId']))
# load the checkpoint
load_checkpoint(agent, model_exp_key)
if stochastic_model:
agent.q_value_estimator.state_variable.cond_likelihood.stochastic = True
# swap out the value estimator for goal-based estimator
gb_estimator = GoalBasedQEstimator()
# copy over the dynamics model
gb_estimator.state_likelihood_model = agent.q_value_estimator.state_likelihood_model
gb_estimator.state_variable = agent.q_value_estimator.state_variable
# set the estimator
agent.q_value_estimator = gb_estimator
agent.q_value_estimator.set_goal_std(GOAL_STD)
# agent.alphas['pi'] = 0.
total_results = {'grad_based': None,
'cem': None,
'it_am': None,
'goal_cond': None}
goals = []
print('Sampling goals...')
for step_ind in range(N_TOTAL_STEPS):
new_goal_states = np.stack([episode_states[np.random.randint(0, 25)] for _ in range(BATCH_SIZE)])
# goal_state = episode_states[goal_ind]
new_goal_states = torch.from_numpy(new_goal_states).float().view(BATCH_SIZE,-1)
new_goal_states[:, 8:] *= 0.
if step_ind == 0:
goal_state = new_goal_states
else:
# randomly change the goal state with some small probability
flips = (torch.rand(BATCH_SIZE, 1) < GOAL_FLIP_PROB).float().repeat(1, new_goal_states.shape[-1])
goal_state = (1 - flips) * goal_state + flips * new_goal_states
goals.append(goal_state)
print('Evaluating gradient-based agent...')
agent.inference_optimizer = GradientBasedInference(lr=1e-3, n_inf_iters=50)
grad_based_results = collect_goal_optimization(agent, sync_env, goals)
total_results['grad_based'] = grad_based_results
print('Done.')
# print('Evaluating CEM agent...')
# agent.inference_optimizer = CEMInference(lr=1e-3, n_top_samples=10, n_inf_iters=50)
# agent.n_action_samples = 100
# cem_results = collect_goal_optimization(agent, sync_env, goals)
# total_results['cem'] = cem_results
# print('Done.')
print('Evaluating iterative amortized agent...')
# create an iterative amortized optimizer
inputs = ['params', 'grads', 'state']
n_input = 24
if 'state' in inputs:
n_input += 17
network_args = {'type': 'recurrent',
'n_layers': 2,
'inputs': inputs,
'n_units': 512,
'connectivity': 'highway',
'n_input': n_input}
agent.inference_optimizer = IterativeInferenceModel(network_args=network_args, n_inf_iters=10)
for m in agent.approx_post.models:
agent.approx_post.models[m] = FullyConnectedLayer(512, 6)
agent.approx_post.gates[m] = FullyConnectedLayer(512, 6, non_linearity='sigmoid')
agent.approx_post.update = 'iterative'
# create a parameter optimizer for the inference model
inference_parameters = [_ for _ in agent.inference_optimizer.parameters()] + [_ for _ in agent.approx_post.parameters()]
inf_optim = optim.Adam(inference_parameters, lr=3e-4)
it_am_results = collect_goal_optimization(agent, sync_env, goals, inf_optim=inf_optim)
total_results['it_am'] = it_am_results
print('Done.')
print('Evaluating goal-conditioned agent...')
# create a direct, goal-conditioned network
network_args = {'type': 'fully_connected',
'n_layers': 2,
'inputs': ['state', 'goal'],
'n_units': 512,
'connectivity': 'highway',
'n_input': 17 + 17}
agent.inference_optimizer = DirectGoalInferenceModel(network_args=network_args)
for m in agent.approx_post.models:
agent.approx_post.models[m] = FullyConnectedLayer(512, 6)
agent.approx_post.update = 'direct'
# create a parameter optimizer for the inference model
inference_parameters = [_ for _ in agent.inference_optimizer.parameters()] + [_ for _ in agent.approx_post.parameters()]
inf_optim = optim.Adam(inference_parameters, lr=3e-4)
goal_cond_results = collect_goal_optimization(agent, sync_env, goals, inf_optim=inf_optim)
total_results['goal_cond'] = goal_cond_results
print('Done.')
if write_results:
pickle.dump(total_results, open('comp_goal_opt_' + model_exp_key + '.p', 'wb'))
return total_results
| 42.247244
| 166
| 0.650576
| 3,350
| 26,827
| 4.916119
| 0.09194
| 0.027324
| 0.033518
| 0.027931
| 0.83982
| 0.820936
| 0.80861
| 0.792519
| 0.771449
| 0.74455
| 0
| 0.010726
| 0.256272
| 26,827
| 634
| 167
| 42.31388
| 0.814705
| 0.169382
| 0
| 0.714286
| 0
| 0
| 0.07187
| 0.007355
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009524
| false
| 0
| 0.038095
| 0
| 0.057143
| 0.038095
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d1d59db327690fc8010dfdd1ff50625c6b22d6bd
| 3,964
|
py
|
Python
|
test_autogalaxy/quantity/plot/test_fit_quantity_plotters.py
|
caoxiaoyue/PyAutoGalaxy
|
ad2b4b27404f5bf0f65ba9a0cd7c3ee6570e2d05
|
[
"MIT"
] | 4
|
2019-10-29T13:27:23.000Z
|
2020-03-24T11:13:35.000Z
|
test_autogalaxy/quantity/plot/test_fit_quantity_plotters.py
|
caoxiaoyue/PyAutoGalaxy
|
ad2b4b27404f5bf0f65ba9a0cd7c3ee6570e2d05
|
[
"MIT"
] | null | null | null |
test_autogalaxy/quantity/plot/test_fit_quantity_plotters.py
|
caoxiaoyue/PyAutoGalaxy
|
ad2b4b27404f5bf0f65ba9a0cd7c3ee6570e2d05
|
[
"MIT"
] | 3
|
2020-02-12T10:29:59.000Z
|
2020-03-24T11:13:53.000Z
|
from os import path
import pytest
import autogalaxy.plot as aplt
directory = path.dirname(path.realpath(__file__))
@pytest.fixture(name="plot_path")
def make_galaxy_fit_plotter_setup():
return path.join(
"{}".format(path.dirname(path.realpath(__file__))),
"files",
"plots",
"galaxy_fitting",
)
def test__fit_individuals__source_and_galaxy__dependent_on_input(
fit_quantity_7x7_array_2d,
fit_quantity_7x7_vector_yx_2d,
include_2d_all,
plot_path,
plot_patch,
):
fit_quantity_plotter = aplt.FitQuantityPlotter(
fit=fit_quantity_7x7_array_2d,
include_2d=include_2d_all,
mat_plot_2d=aplt.MatPlot2D(output=aplt.Output(plot_path, format="png")),
)
fit_quantity_plotter.figures_2d(
image=True,
noise_map=False,
signal_to_noise_map=False,
model_image=True,
chi_squared_map=True,
)
assert path.join(plot_path, "image_2d.png") in plot_patch.paths
assert path.join(plot_path, "noise_map.png") not in plot_patch.paths
assert path.join(plot_path, "signal_to_noise_map.png") not in plot_patch.paths
assert path.join(plot_path, "model_image.png") in plot_patch.paths
assert path.join(plot_path, "residual_map.png") not in plot_patch.paths
assert path.join(plot_path, "normalized_residual_map.png") not in plot_patch.paths
assert path.join(plot_path, "chi_squared_map.png") in plot_patch.paths
fit_quantity_plotter = aplt.FitQuantityPlotter(
fit=fit_quantity_7x7_vector_yx_2d,
include_2d=include_2d_all,
mat_plot_2d=aplt.MatPlot2D(output=aplt.Output(plot_path, format="png")),
)
fit_quantity_plotter.figures_2d(
image=True,
noise_map=False,
signal_to_noise_map=False,
model_image=True,
chi_squared_map=True,
)
assert path.join(plot_path, "image_2d_y.png") in plot_patch.paths
assert path.join(plot_path, "noise_map_y.png") not in plot_patch.paths
assert path.join(plot_path, "signal_to_noise_map_y.png") not in plot_patch.paths
assert path.join(plot_path, "model_image_y.png") in plot_patch.paths
assert path.join(plot_path, "residual_map_y.png") not in plot_patch.paths
assert path.join(plot_path, "normalized_residual_map_y.png") not in plot_patch.paths
assert path.join(plot_path, "chi_squared_map_y.png") in plot_patch.paths
assert path.join(plot_path, "image_2d_x.png") in plot_patch.paths
assert path.join(plot_path, "noise_map_x.png") not in plot_patch.paths
assert path.join(plot_path, "signal_to_noise_map_x.png") not in plot_patch.paths
assert path.join(plot_path, "model_image_x.png") in plot_patch.paths
assert path.join(plot_path, "residual_map_x.png") not in plot_patch.paths
assert path.join(plot_path, "normalized_residual_map_x.png") not in plot_patch.paths
assert path.join(plot_path, "chi_squared_map_x.png") in plot_patch.paths
def test__fit_sub_plot__all_types_of_fit(
fit_quantity_7x7_array_2d,
fit_quantity_7x7_vector_yx_2d,
include_2d_all,
plot_patch,
plot_path,
):
fit_quantity_plotter = aplt.FitQuantityPlotter(
fit=fit_quantity_7x7_array_2d,
mat_plot_2d=aplt.MatPlot2D(output=aplt.Output(path=plot_path, format="png")),
)
fit_quantity_plotter.subplot_fit_quantity()
assert path.join(plot_path, "subplot_fit_quantity.png") in plot_patch.paths
fit_quantity_plotter = aplt.FitQuantityPlotter(
fit=fit_quantity_7x7_vector_yx_2d,
mat_plot_2d=aplt.MatPlot2D(output=aplt.Output(path=plot_path, format="png")),
)
fit_quantity_plotter.subplot_fit_quantity()
print(plot_patch.paths)
assert path.join(plot_path, "subplot_fit_quantity_y.png") in plot_patch.paths
assert path.join(plot_path, "subplot_fit_quantity_x.png") in plot_patch.paths
| 36.703704
| 89
| 0.719475
| 593
| 3,964
| 4.411467
| 0.121417
| 0.094801
| 0.133792
| 0.165138
| 0.897171
| 0.876529
| 0.860092
| 0.857416
| 0.842125
| 0.842125
| 0
| 0.01335
| 0.187437
| 3,964
| 107
| 90
| 37.046729
| 0.79882
| 0
| 0
| 0.470588
| 0
| 0
| 0.136375
| 0.071558
| 0
| 0
| 0
| 0
| 0.282353
| 1
| 0.035294
| false
| 0
| 0.035294
| 0.011765
| 0.082353
| 0.011765
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d1ea781a5ad7017d367882ab6b3bbd23d2918481
| 548
|
py
|
Python
|
Semana 5/ejemplo2.py
|
CesarEdmundo/actividades_en_ccss
|
8fef4bf10a74eb2424470e7196c8f00e193bb020
|
[
"MIT"
] | 8
|
2021-04-06T23:12:32.000Z
|
2022-02-24T06:06:13.000Z
|
Semana 5/ejemplo2.py
|
CesarEdmundo/actividades_en_ccss
|
8fef4bf10a74eb2424470e7196c8f00e193bb020
|
[
"MIT"
] | null | null | null |
Semana 5/ejemplo2.py
|
CesarEdmundo/actividades_en_ccss
|
8fef4bf10a74eb2424470e7196c8f00e193bb020
|
[
"MIT"
] | 5
|
2021-04-18T01:23:02.000Z
|
2021-07-08T01:08:36.000Z
|
def eleva_potencia_cc(a,b):
'''
Eleva número "a" a la "b" potencia.
Insumo (input):
a: número
b: número
Producto (output):
resultado: un número
'''
z_var = 11
c = 10 + z_var
resultado = a**b + c
return resultado
def eleva_potencia_dd(a,b):
'''
Eleva número "a" a la "b" potencia.
Insumo (input):
a: número
b: número
Producto (output):
resultado: un número
'''
z_var = 11
c = 10 + z_var
resultado = a**b + c
return resultado
| 18.896552
| 39
| 0.525547
| 74
| 548
| 3.783784
| 0.283784
| 0.028571
| 0.114286
| 0.092857
| 0.871429
| 0.871429
| 0.871429
| 0.871429
| 0.871429
| 0.871429
| 0
| 0.022857
| 0.361314
| 548
| 29
| 40
| 18.896552
| 0.777143
| 0.45438
| 0
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ae1b6cc1b83eecebca194551b36de02a0ba63394
| 93
|
py
|
Python
|
modularity/__init__.py
|
akhambhati/modularity
|
202e2e1405942844aedc7c76c5c843f19b459848
|
[
"MIT"
] | null | null | null |
modularity/__init__.py
|
akhambhati/modularity
|
202e2e1405942844aedc7c76c5c843f19b459848
|
[
"MIT"
] | null | null | null |
modularity/__init__.py
|
akhambhati/modularity
|
202e2e1405942844aedc7c76c5c843f19b459848
|
[
"MIT"
] | null | null | null |
from . import matrix
from .genlouvain import genlouvain
from .genlouvain import _metanetwork
| 23.25
| 36
| 0.83871
| 11
| 93
| 7
| 0.454545
| 0.363636
| 0.519481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 93
| 3
| 37
| 31
| 0.950617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ae24381236415f1739edc376d2fcbc005533ba05
| 131
|
py
|
Python
|
txaws/server/tests/fixtures/importerror/amodule.py
|
vargas/txaws
|
b75d00e042c6e7e1609c05e01ee54e1c72b1eaf6
|
[
"MIT"
] | 24
|
2017-01-09T23:48:30.000Z
|
2020-10-07T09:54:45.000Z
|
txaws/server/tests/fixtures/importerror/amodule.py
|
vargas/txaws
|
b75d00e042c6e7e1609c05e01ee54e1c72b1eaf6
|
[
"MIT"
] | 85
|
2016-12-16T06:24:51.000Z
|
2020-05-29T20:39:37.000Z
|
txaws/server/tests/fixtures/importerror/amodule.py
|
vargas/txaws
|
b75d00e042c6e7e1609c05e01ee54e1c72b1eaf6
|
[
"MIT"
] | 12
|
2016-12-26T12:49:57.000Z
|
2020-05-11T19:49:32.000Z
|
from txaws.server.method import Method
from txaws.server.tests.fixtures import method
@method
class TestMethod(Method):
pass
| 16.375
| 46
| 0.793893
| 18
| 131
| 5.777778
| 0.555556
| 0.173077
| 0.288462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137405
| 131
| 7
| 47
| 18.714286
| 0.920354
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
ae2ab95c475bdea0d1aa048d7f244cceebadb9e3
| 283
|
py
|
Python
|
sources/__init__.py
|
Groomsha/backup-kvm
|
fa72a4f45b47f1695652dfc701437bdd51dce1aa
|
[
"Apache-2.0"
] | null | null | null |
sources/__init__.py
|
Groomsha/backup-kvm
|
fa72a4f45b47f1695652dfc701437bdd51dce1aa
|
[
"Apache-2.0"
] | null | null | null |
sources/__init__.py
|
Groomsha/backup-kvm
|
fa72a4f45b47f1695652dfc701437bdd51dce1aa
|
[
"Apache-2.0"
] | null | null | null |
from .backup.backup_kvm_lvm import *
from .backup.backup_kvm_image import *
from .backup.backup_dir_ssh import *
from .restore.restore_kvm_lvm import *
from .restore.restore_kvm_image import *
from .service.delete_folder_backup import *
from .service.messenger_application import *
| 31.444444
| 44
| 0.823322
| 41
| 283
| 5.365854
| 0.341463
| 0.272727
| 0.218182
| 0.172727
| 0.245455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102474
| 283
| 9
| 44
| 31.444444
| 0.866142
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
884ae5c9e5f9f399b45ef662e76a84a6b052a03f
| 4,275
|
py
|
Python
|
museos/migrations/0005_auto_20180504_1806.py
|
LydiaGarrido/X-Serv-Practica-Museos
|
0089dc0a54d3daacaf91abe1441d5cbbc1f761b2
|
[
"Apache-2.0"
] | null | null | null |
museos/migrations/0005_auto_20180504_1806.py
|
LydiaGarrido/X-Serv-Practica-Museos
|
0089dc0a54d3daacaf91abe1441d5cbbc1f761b2
|
[
"Apache-2.0"
] | null | null | null |
museos/migrations/0005_auto_20180504_1806.py
|
LydiaGarrido/X-Serv-Practica-Museos
|
0089dc0a54d3daacaf91abe1441d5cbbc1f761b2
|
[
"Apache-2.0"
] | 6
|
2019-05-22T11:52:38.000Z
|
2021-07-05T09:46:21.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('museos', '0004_auto_20180502_1405'),
]
operations = [
migrations.AlterField(
model_name='comentarios',
name='comentario',
field=models.TextField(default='Null'),
),
migrations.AlterField(
model_name='configuracion',
name='color_fondo',
field=models.CharField(default='Null', max_length=50),
),
migrations.AlterField(
model_name='configuracion',
name='letra_size',
field=models.CharField(default='Null', max_length=50),
),
migrations.AlterField(
model_name='configuracion',
name='titulo',
field=models.CharField(default='Null', max_length=100),
),
migrations.AlterField(
model_name='museo',
name='barrio',
field=models.CharField(default='Null', max_length=50),
),
migrations.AlterField(
model_name='museo',
name='clase_vial',
field=models.CharField(default='Null', max_length=50),
),
migrations.AlterField(
model_name='museo',
name='content_url',
field=models.URLField(default='Null', max_length=350),
),
migrations.AlterField(
model_name='museo',
name='descripcion',
field=models.TextField(default='Null'),
),
migrations.AlterField(
model_name='museo',
name='descripcion_entidad',
field=models.TextField(default='Null'),
),
migrations.AlterField(
model_name='museo',
name='distrito',
field=models.CharField(default='Null', max_length=50),
),
migrations.AlterField(
model_name='museo',
name='email',
field=models.CharField(default='Null', max_length=100),
),
migrations.AlterField(
model_name='museo',
name='equipamiento',
field=models.TextField(default='Null'),
),
migrations.AlterField(
model_name='museo',
name='fax',
field=models.CharField(default='Null', max_length=100),
),
migrations.AlterField(
model_name='museo',
name='horario',
field=models.TextField(default='Null'),
),
migrations.AlterField(
model_name='museo',
name='localidad',
field=models.CharField(default='Null', max_length=50),
),
migrations.AlterField(
model_name='museo',
name='nombre',
field=models.CharField(default='Null', max_length=150),
),
migrations.AlterField(
model_name='museo',
name='nombre_via',
field=models.CharField(default='Null', max_length=50),
),
migrations.AlterField(
model_name='museo',
name='num',
field=models.CharField(default='Null', max_length=50),
),
migrations.AlterField(
model_name='museo',
name='orientacion',
field=models.TextField(default='Null'),
),
migrations.AlterField(
model_name='museo',
name='planta',
field=models.CharField(default='Null', max_length=50),
),
migrations.AlterField(
model_name='museo',
name='provincia',
field=models.CharField(default='Null', max_length=50),
),
migrations.AlterField(
model_name='museo',
name='telefono',
field=models.CharField(default='Null', max_length=150),
),
migrations.AlterField(
model_name='museo',
name='tipo_num',
field=models.CharField(default='Null', max_length=10),
),
migrations.AlterField(
model_name='museo',
name='transporte',
field=models.TextField(default='Null'),
),
]
| 31.666667
| 67
| 0.53614
| 371
| 4,275
| 6.026954
| 0.183288
| 0.214669
| 0.268336
| 0.31127
| 0.818426
| 0.804562
| 0.779964
| 0.747764
| 0.727191
| 0.700358
| 0
| 0.020106
| 0.336842
| 4,275
| 134
| 68
| 31.902985
| 0.768607
| 0.004912
| 0
| 0.726563
| 0
| 0
| 0.113829
| 0.005409
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.015625
| 0
| 0.039063
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
88665d1248f58980060c6a82ed9cdf4373128b43
| 1,811
|
py
|
Python
|
tests/models/test_pyod_integrations.py
|
selimfirat/pysad
|
dff2ff38258eb8a85c9d34cf5f0b876fc1dc9ede
|
[
"BSD-3-Clause"
] | 155
|
2020-08-17T12:52:38.000Z
|
2022-03-19T02:59:26.000Z
|
tests/models/test_pyod_integrations.py
|
shubhsoni/pysad
|
dff2ff38258eb8a85c9d34cf5f0b876fc1dc9ede
|
[
"BSD-3-Clause"
] | 2
|
2020-10-22T09:50:28.000Z
|
2021-02-15T02:01:44.000Z
|
tests/models/test_pyod_integrations.py
|
shubhsoni/pysad
|
dff2ff38258eb8a85c9d34cf5f0b876fc1dc9ede
|
[
"BSD-3-Clause"
] | 14
|
2020-10-09T17:08:23.000Z
|
2022-03-25T11:30:12.000Z
|
def test_reference_window(test_path):
from sklearn.utils import shuffle
from pysad.models.integrations import ReferenceWindowModel
from pysad.utils import Data
from pysad.evaluation import AUROCMetric
from pysad.utils import ArrayStreamer
import os
from pyod.models.iforest import IForest
data = Data(os.path.join(test_path,"../../examples/data"))
X_all, y_all = data.get_data("arrhythmia.mat")
X_all, y_all = shuffle(X_all, y_all)
model = ReferenceWindowModel(model_cls=IForest, window_size=240, sliding_size=30,
initial_window_X=X_all[:100])
iterator = ArrayStreamer(shuffle=False)
auroc = AUROCMetric()
y_pred = []
for X, y in iterator.iter(X_all[100:], y_all[100:]):
model.fit_partial(X)
score = model.score_partial(X)
y_pred.append(score)
auroc.update(y, score)
print("AUROC: ", auroc.get())
def test_one_fit(test_path):
from sklearn.utils import shuffle
from pysad.utils import Data
from pysad.evaluation import AUROCMetric
from pysad.utils import ArrayStreamer
import os
from pyod.models.iforest import IForest
from pysad.models.integrations.one_fit_model import OneFitModel
data = Data(os.path.join(test_path, "../../examples/data"))
X_all, y_all = data.get_data("arrhythmia.mat")
print(X_all, y_all)
X_all, y_all = shuffle(X_all, y_all)
model = OneFitModel(model_cls=IForest, initial_X=X_all[:100])
iterator = ArrayStreamer(shuffle=False)
auroc = AUROCMetric()
y_pred = []
for X, y in iterator.iter(X_all[100:], y_all[100:]):
model.fit_partial(X)
score = model.score_partial(X)
y_pred.append(score)
auroc.update(y, score)
print("AUROC: ", auroc.get())
| 27.439394
| 85
| 0.673661
| 251
| 1,811
| 4.673307
| 0.207171
| 0.037511
| 0.029838
| 0.047741
| 0.783461
| 0.783461
| 0.783461
| 0.783461
| 0.783461
| 0.712702
| 0
| 0.01622
| 0.217007
| 1,811
| 65
| 86
| 27.861538
| 0.811001
| 0
| 0
| 0.818182
| 0
| 0
| 0.044199
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0
| 0.318182
| 0
| 0.363636
| 0.068182
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
88712afa2aec8f2b4ca3a89926710881dd826b79
| 151,488
|
py
|
Python
|
mythic-docker/app/routes/websocket_routes.py
|
djhohnstein/Mythic
|
22b69c0658c157be109fea9a424145a58b8a4f7a
|
[
"BSD-3-Clause"
] | 1
|
2020-11-29T02:14:06.000Z
|
2020-11-29T02:14:06.000Z
|
mythic-docker/app/routes/websocket_routes.py
|
euphrat1ca/Mythic
|
2938e08dd26d8044c7586513bca23bb5808beed0
|
[
"BSD-3-Clause"
] | null | null | null |
mythic-docker/app/routes/websocket_routes.py
|
euphrat1ca/Mythic
|
2938e08dd26d8044c7586513bca23bb5808beed0
|
[
"BSD-3-Clause"
] | 2
|
2020-12-29T02:34:13.000Z
|
2021-06-24T04:07:38.000Z
|
from app import mythic, db_objects, use_ssl
import aiopg
import ujson as js
import asyncio
from app.database_models.model import (
Callback,
Payload,
PayloadType,
C2Profile,
Credential,
FileMeta,
Task,
TaskArtifact,
)
from sanic_jwt.decorators import scoped, inject_user
import app.database_models.model as db_model
import aio_pika
import sys
import base64
from app.api.processlist_api import get_process_tree
# --------------- TASKS --------------------------
# notifications for new tasks
@mythic.websocket("/ws/tasks")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_tasks(request, ws, user):
if not await valid_origin_header(request):
return
try:
if not user["admin"]:
await ws.send(js.dumps({"status": "error", "error": "must be an admin"}))
return
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newtask";')
# before we start getting new things, update with all of the old data
query = await db_model.task_query()
tasks_with_all_info = await db_objects.execute(
query.order_by(db_model.Task.id)
)
# callbacks_with_operators = await db_objects.prefetch(callbacks, operators)
for task in tasks_with_all_info:
await ws.send(js.dumps(task.to_json()))
await ws.send("")
# now pull off any new tasks we got queued up while processing the old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
query = await db_model.task_query()
tsk = await db_objects.get(query, id=id)
await ws.send(js.dumps(tsk.to_json()))
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
finally:
# print("closed /ws/tasks")
pool.close()
@mythic.websocket("/ws/tasks/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_tasks(request, ws, user):
if not await valid_origin_header(request):
return
try:
op_query = await db_model.operation_query()
op = await db_objects.get(op_query, name=user["current_operation"])
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newtask";')
await cur.execute('LISTEN "updatedtask";')
# before we start getting new things, update with all of the old data
query = await db_model.task_query()
tasks_with_all_info = await db_objects.execute(
query.where(db_model.Callback.operation == op).order_by(
db_model.Task.id
)
)
# callbacks_with_operators = await db_objects.prefetch(callbacks, operators)
for task in tasks_with_all_info:
taskj = task.to_json()
taskj["callback"] = task.callback.to_json()
await ws.send(js.dumps(taskj))
await ws.send("")
# now pull off any new tasks we got queued up while processing the old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
tsk = await db_objects.get(query, id=id)
if tsk.callback.operation == op:
taskj = tsk.to_json()
taskj["callback"] = tsk.callback.to_json()
await ws.send(js.dumps(taskj))
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
finally:
# print("closed /ws/tasks")
pool.close()
@mythic.websocket("/ws/tasks/new/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_tasks(request, ws, user):
if not await valid_origin_header(request):
return
try:
op_query = await db_model.operation_query()
op = await db_objects.get(op_query, name=user["current_operation"])
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newtask";')
await cur.execute('LISTEN "updatedtask";')
# before we start getting new things, update with all of the old data
query = await db_model.task_query()
# now pull off any new tasks we got queued up while processing the old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
tsk = await db_objects.get(query, id=id)
if tsk.callback.operation == op:
taskj = tsk.to_json()
taskj["callback"] = tsk.callback.to_json()
await ws.send(js.dumps(taskj))
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
finally:
# print("closed /ws/tasks")
pool.close()
@mythic.websocket("/ws/task/<tid:int>")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_updates_for_task(request, ws, user, tid):
if not await valid_origin_header(request):
return
try:
query = await db_model.operation_query()
operation = await db_objects.get(query, name=user["current_operation"])
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "updatedtask";')
# before we start getting new things, update with all of the old data
query = await db_model.task_query()
task = await db_objects.get(query, id=tid)
if task.callback.operation == operation:
await ws.send(js.dumps(task.to_json()))
else:
return
# now pull off any new tasks we got queued up while processing the old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
query = await db_model.task_query()
tsk = await db_objects.get(query, id=id)
if tsk.id == task.id:
await ws.send(js.dumps(tsk.to_json()))
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
finally:
# print("closed /ws/tasks")
pool.close()
@mythic.websocket("/ws/task_feed/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_tasks_current_operation(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newtask";')
await cur.execute('LISTEN "updatedtask";')
if user["current_operation"] != "":
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
query = await db_model.task_query()
# to avoid being too slow, just get the latest 200
initial_tasks = await db_objects.execute(
query.where(Callback.operation == operation)
.order_by(Task.timestamp)
.limit(200)
)
for t in initial_tasks:
await ws.send(
js.dumps(
{
**t.to_json(),
"host": t.callback.host,
"user": t.callback.user,
}
)
)
await ws.send("")
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
t = await db_objects.get(query, id=id)
if t.callback.operation == operation:
await ws.send(
js.dumps(
{
**t.to_json(),
"host": t.callback.host,
"user": t.callback.user,
}
)
)
except asyncio.QueueEmpty as e:
await asyncio.sleep(0.5)
await ws.send(
""
) # this is our test to see if the client is still there
except Exception as e:
print(e)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
# print("closed /ws/tasks")
pool.close()
# --------------- RESPONSES ---------------------------
# notifications for task updates
@mythic.websocket("/ws/responses")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_responses(request, ws, user):
if not await valid_origin_header(request):
return
if not user["admin"]:
await ws.send(js.dumps({"status": "error", "error": "must be an admin"}))
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newresponse";')
query = await db_model.response_query()
responses_with_tasks = await db_objects.execute(
query.order_by(db_model.Response.id)
)
for resp in responses_with_tasks:
await ws.send(js.dumps(resp.to_json()))
await ws.send("")
# now pull off any new responses we got queued up while processing old responses
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
rsp = await db_objects.get(query, id=id)
await ws.send(js.dumps(rsp.to_json()))
# print(msg.payload)
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
finally:
# print("closed /ws/task_updates")
pool.close()
@mythic.websocket("/ws/responses/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_responses(request, ws, user):
if not await valid_origin_header(request):
return
try:
query = await db_model.operation_query()
op = await db_objects.get(query, name=user["current_operation"])
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newresponse";')
query = await db_model.response_query()
responses_with_tasks = await db_objects.execute(
query.where(db_model.Callback.operation == op).order_by(
db_model.Response.id
)
)
for resp in responses_with_tasks:
await ws.send(js.dumps(resp.to_json()))
await ws.send("")
# now pull off any new responses we got queued up while processing old responses
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
rsp = await db_objects.get(query, id=id)
if rsp.task.callback.operation == op:
await ws.send(js.dumps(rsp.to_json()))
# print(msg.payload)
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
finally:
# print("closed /ws/task_updates")
pool.close()
@mythic.websocket("/ws/responses/new/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_responses(request, ws, user):
if not await valid_origin_header(request):
return
try:
query = await db_model.operation_query()
op = await db_objects.get(query, name=user["current_operation"])
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newresponse";')
query = await db_model.response_query()
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
rsp = await db_objects.get(query, id=id)
if rsp.task.callback.operation == op:
await ws.send(js.dumps(rsp.to_json()))
# print(msg.payload)
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
finally:
# print("closed /ws/task_updates")
pool.close()
@mythic.websocket("/ws/responses/by_task/<tid:int>")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_responses(request, ws, user, tid):
if not await valid_origin_header(request):
return
if not user["admin"]:
await ws.send(js.dumps({"status": "error", "error": "must be an admin"}))
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newresponse";')
tquery = await db_model.task_query()
task = await db_objects.get(tquery, id=tid)
if (
task.callback.operation.name not in user["operations"]
and task.callback.operation.name not in user["admin_operations"]
):
await ws.send(
js.dumps(
{
"error": "task not in one of your operations",
"status": "error",
}
)
)
return
query = await db_model.response_query()
responses_with_tasks = await db_objects.execute(
query.where(db_model.Response.task == task).order_by(
db_model.Response.id
)
)
for resp in responses_with_tasks:
await ws.send(js.dumps(resp.to_json()))
if task.completed:
return
await ws.send("")
# now pull off any new responses we got queued up while processing old responses
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
query = await db_model.response_query()
rsp = await db_objects.get(query, id=id)
if rsp.task == task:
await ws.send(js.dumps(rsp.to_json()))
if rsp.task.completed:
return
# print(msg.payload)
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
finally:
# print("closed /ws/task_updates")
pool.close()
# --------------------- CALLBACKS ------------------
@mythic.websocket("/ws/callbacks/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_callbacks_current_operation(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newcallback";')
if user["current_operation"] != "":
# before we start getting new things, update with all of the old data
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
query = await db_model.callback_query()
callbackc2profilequery = (
await db_model.callbackc2profiles_query()
)
c2profileparametersinstancequery = (
await db_model.c2profileparametersinstance_query()
)
callbacks_with_operators = await db_objects.execute(
query.where(
(Callback.operation == operation)
& (Callback.active == True)
).order_by(Callback.id)
)
for cb in callbacks_with_operators:
cb_json = cb.to_json()
callbackc2profiles = await db_objects.execute(
callbackc2profilequery.where(
db_model.CallbackC2Profiles.callback == cb
)
)
c2_profiles_info = []
for c2p in callbackc2profiles:
profile_info = {
"name": c2p.c2_profile.name,
"is_p2p": c2p.c2_profile.is_p2p,
"parameters": {},
}
c2_profile_params = await db_objects.execute(
c2profileparametersinstancequery.where(
(
db_model.C2ProfileParametersInstance.callback
== cb
)
& (
db_model.C2ProfileParametersInstance.c2_profile
== c2p.c2_profile
)
)
)
for param in c2_profile_params:
profile_info["parameters"][
param.c2_profile_parameters.name
] = param.value
c2_profiles_info.append(profile_info)
cb_json["supported_profiles"] = c2_profiles_info
await ws.send(js.dumps(cb_json))
await ws.send("")
# now pull off any new callbacks we got queued up while processing the old data
while True:
# msg = await conn.notifies.get()
try:
msg = conn.notifies.get_nowait()
id = msg.payload
cb = await db_objects.get(
query, id=id, operation=operation
)
cb_json = cb.to_json()
callbackc2profiles = await db_objects.execute(
callbackc2profilequery.where(
db_model.CallbackC2Profiles.callback == cb
)
)
c2_profiles_info = []
for c2p in callbackc2profiles:
profile_info = {
"name": c2p.c2_profile.name,
"is_p2p": c2p.c2_profile.is_p2p,
"parameters": {},
}
c2_profile_params = await db_objects.execute(
c2profileparametersinstancequery.where(
(
db_model.C2ProfileParametersInstance.callback
== cb
)
& (
db_model.C2ProfileParametersInstance.c2_profile
== c2p.c2_profile
)
)
)
for param in c2_profile_params:
profile_info["parameters"][
param.c2_profile_parameters.name
] = param.value
c2_profiles_info.append(profile_info)
cb_json["supported_profiles"] = c2_profiles_info
await ws.send(js.dumps(cb_json))
except asyncio.QueueEmpty as e:
await asyncio.sleep(0.5)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(
"exception in callbacks/current_operation: {}".format(
str(e)
)
)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
pool.close()
@mythic.websocket("/ws/new_callbacks/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_callbacks_current_operation(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newcallback";')
if user["current_operation"] != "":
# before we start getting new things, update with all of the old data
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
query = await db_model.callback_query()
callbackc2profilequery = (
await db_model.callbackc2profiles_query()
)
c2profileparametersinstancequery = (
await db_model.c2profileparametersinstance_query()
)
callbacks_with_operators = await db_objects.execute(
query.where(
(Callback.operation == operation)
& (Callback.active == True)
).order_by(Callback.id)
)
await ws.send("")
# now pull off any new callbacks we got queued up while processing the old data
while True:
# msg = await conn.notifies.get()
try:
msg = conn.notifies.get_nowait()
id = msg.payload
cb = await db_objects.get(
query, id=id, operation=operation
)
cb_json = cb.to_json()
callbackc2profiles = await db_objects.execute(
callbackc2profilequery.where(
db_model.CallbackC2Profiles.callback == cb
)
)
c2_profiles_info = []
for c2p in callbackc2profiles:
profile_info = {
"name": c2p.c2_profile.name,
"is_p2p": c2p.c2_profile.is_p2p,
"parameters": {},
}
c2_profile_params = await db_objects.execute(
c2profileparametersinstancequery.where(
(
db_model.C2ProfileParametersInstance.callback
== cb
)
& (
db_model.C2ProfileParametersInstance.c2_profile
== c2p.c2_profile
)
)
)
for param in c2_profile_params:
profile_info["parameters"][
param.c2_profile_parameters.name
] = param.value
c2_profiles_info.append(profile_info)
cb_json["supported_profiles"] = c2_profiles_info
await ws.send(js.dumps(cb_json))
except asyncio.QueueEmpty as e:
await asyncio.sleep(0.5)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(
"exception in callbacks/current_operation: {}".format(
str(e)
)
)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
pool.close()
@mythic.websocket("/ws/unified_callback/<cid:int>")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_unified_single_callback_current_operation(request, ws, user, cid):
if not await valid_origin_header(request):
return
try:
# print("opened socket on webserver for " + str(cid))
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "updatedcallback";')
await cur.execute('LISTEN "newtask";')
await cur.execute('LISTEN "updatedtask";')
await cur.execute('LISTEN "newresponse";')
await cur.execute('LISTEN "newfilemeta";')
await cur.execute('LISTEN "updatedfilemeta";')
await cur.execute('LISTEN "newloadedcommands";')
await cur.execute('LISTEN "updatedloadedcommands";')
await cur.execute('LISTEN "deletedloadedcommands";')
if user["current_operation"] != "":
# before we start getting new things, update with all of the old data
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
callbackquery = await db_model.callback_query()
callback = await db_objects.get(
callbackquery, operation=operation, id=cid
)
taskquery = await db_model.task_query()
filemetaquery = await db_model.filemeta_query()
responsequery = await db_model.response_query()
loadedcommandsquery = await db_model.loadedcommands_query()
cur_loaded = await db_objects.execute(loadedcommandsquery.where(
(db_model.LoadedCommands.callback == callback)
))
for c in cur_loaded:
await ws.send(js.dumps({**c.to_json(), "channel": "newloadedcommand"}))
await ws.send("")
# now pull off any new callbacks we got queued up while processing the old data
while True:
# msg = await conn.notifies.get()
try:
msg = conn.notifies.get_nowait()
id = msg.payload
# only get updates for the callback we specified
if msg.channel == "updatedcallback":
if str(id) != str(callback.id):
continue
obj = await db_objects.get(
callbackquery, id=id, operation=operation
)
obj_json = obj.to_json()
elif "task" in msg.channel:
obj = await db_objects.get(
taskquery, id=id, callback=callback
)
obj_json = obj.to_json()
elif "filemeta" in msg.channel:
obj = await db_objects.get(
filemetaquery, id=id, operation=operation
)
obj_json = obj.to_json()
if obj.task is not None:
obj_json["callback_id"] = obj.task.callback.id
else:
obj_json["callback_id"] = 0
elif "loadedcommand" in msg.channel:
if "deleted" in msg.channel:
obj_json = js.loads(msg.payload)
obj_json["callback"] = obj_json["callback_id"]
obj_json["channel"] = "deletedloadedcommand"
else:
obj = await db_objects.get(loadedcommandsquery,
id=id, callback=callback)
obj_json = obj.to_json()
else:
obj = await db_objects.get(responsequery, id=id)
if obj.task.callback.id != callback.id:
continue
obj_json = obj.to_json()
# print(obj)
obj_json["channel"] = msg.channel
await ws.send(js.dumps(obj_json))
except asyncio.QueueEmpty as e:
await asyncio.sleep(0.5)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
if "Notify(" in str(msg):
continue
else:
print(
str(sys.exc_info()[-1].tb_lineno)
+ str(e)
+ " "
+ str(msg)
)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
# print("closed socket on webserver for " + str(cid))
pool.close()
# notifications for updated callbacks
@mythic.websocket("/ws/updatedcallbacks/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_callbacks_updated_current_operation(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "updatedcallback";')
await cur.execute('LISTEN "newcallbackc2profiles";')
if user["current_operation"] != "":
# just want updates, not anything else
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
updatedcallbackquery = await db_model.callback_query()
newcallbackc2profilequery = (
await db_model.callbackc2profiles_query()
)
while True:
# msg = await conn.notifies.get()
try:
msg = conn.notifies.get_nowait()
# print("got an update for a callback")
id = msg.payload
if "profiles" in msg.channel:
profile = await db_objects.get(
newcallbackc2profilequery.where(
(db_model.CallbackC2Profiles.id == id)
& (db_model.Callback.operation == operation)
)
)
obj = profile.to_json()
obj["channel"] = "newcallbackc2profiles"
else:
callback = await db_objects.get(
updatedcallbackquery, id=id, operation=operation
)
obj = callback.to_json()
obj["channel"] = "updatedcallback"
await ws.send(js.dumps(obj))
except asyncio.QueueEmpty as e:
await asyncio.sleep(0.5)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
pool.close()
# --------------- PAYLOADS -----------------------
# notifications for new payloads
@mythic.websocket("/ws/payloads/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_payloads_current_operation(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newpayload";')
await cur.execute('LISTEN "updatedpayload";')
# BEFORE WE START GETTING NEW THINGS, UPDATE WITH ALL OF THE OLD DATA
if user["current_operation"] != "":
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
query = await db_model.payload_query()
payloads = await db_objects.execute(
query.where(
(Payload.operation == operation)
#& (Payload.deleted == False)
#& (Payload.auto_generated == False)
).order_by(Payload.id)
)
for p in payloads:
await ws.send(js.dumps(p.to_json()))
await ws.send("")
# now pull off any new payloads we got queued up while processing old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
query = await db_model.payload_query()
p = await db_objects.get(query, id=id)
if p.operation == operation:
await ws.send(js.dumps(p.to_json()))
except asyncio.QueueEmpty as e:
await asyncio.sleep(1)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(
"error in websocket for current operation payloads:"
+ str(e)
)
print("Most likely payload was deleted")
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
pool.close()
@mythic.websocket("/ws/payloads/info/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_payloads_current_operation(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newpayload";')
await cur.execute('LISTEN "updatedpayload";')
from app.api.payloads_api import get_payload_config
# BEFORE WE START GETTING NEW THINGS, UPDATE WITH ALL OF THE OLD DATA
if user["current_operation"] != "":
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
query = await db_model.payload_query()
payloads = await db_objects.execute(
query.where((Payload.operation == operation)).order_by(
Payload.id
)
)
for p in payloads:
pinfo = await get_payload_config(p)
pinfo.pop("status", None)
await ws.send(js.dumps(pinfo))
await ws.send("")
# now pull off any new payloads we got queued up while processing old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
query = await db_model.payload_query()
p = await db_objects.get(query, id=id)
if p.operation == operation:
pinfo = await get_payload_config(p)
pinfo.pop("status", None)
await ws.send(js.dumps(pinfo))
except asyncio.QueueEmpty as e:
await asyncio.sleep(1)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(
"error in websocket for current operation payloads:"
+ str(e)
)
print("Most likely payload was deleted")
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
pool.close()
@mythic.websocket("/ws/payloads/<puuid:uuid>")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_updates_for_payload(request, ws, user, puuid):
if not await valid_origin_header(request):
return
try:
query = await db_model.operation_query()
operation = await db_objects.get(query, name=user["current_operation"])
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "updatedpayload";')
# before we start getting new things, update with all of the old data
query = await db_model.payload_query()
payload = await db_objects.get(query, uuid=puuid)
if payload.operation == operation:
await ws.send(js.dumps(payload.to_json()))
else:
return
# now pull off any new tasks we got queued up while processing the old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
tsk = await db_objects.get(query, id=id)
if tsk.id == payload.id:
await ws.send(js.dumps(tsk.to_json()))
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
finally:
# print("closed /ws/tasks")
pool.close()
# --------------- C2PROFILES -----------------------
# notifications for new c2profiles
@mythic.websocket("/ws/c2profiles")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_c2profile_current_operation(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newc2profile";')
await cur.execute('LISTEN "updatedc2profile";')
# BEFORE WE START GETTING NEW THINGS, UPDATE WITH ALL OF THE OLD DATA
query = await db_model.c2profile_query()
profiles = await db_objects.execute(
query.where(C2Profile.deleted == False)
)
for p in profiles:
await ws.send(js.dumps(p.to_json()))
await ws.send("")
# now pull off any new payloads we got queued up while processing old data
query = await db_model.c2profile_query()
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
p = await db_objects.get(query, id=id)
await ws.send(js.dumps(p.to_json()))
except asyncio.QueueEmpty as e:
await asyncio.sleep(1)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
finally:
pool.close()
@mythic.websocket("/ws/payloadtypec2profile")
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_payloadtypec2profile(request, ws):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newpayloadtypec2profile";')
# BEFORE WE START GETTING NEW THINGS, UPDATE WITH ALL OF THE OLD DATA
query = await db_model.payloadtypec2profile_query()
profiles = await db_objects.execute(query)
for p in profiles:
await ws.send(js.dumps(p.to_json()))
await ws.send("")
# now pull off any new payloads we got queued up while processing old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
query = await db_model.payloadtypec2profile_query()
p = await db_objects.get(query, id=id)
await ws.send(js.dumps(p.to_json()))
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
finally:
pool.close()
# ---------------- OPERATORS --------------------------
# notifications for new operators
@mythic.websocket("/ws/operators")
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_operators(request, ws):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newoperator";')
# BEFORE WE START GETTING NEW THINGS, UPDATE WITH ALL OF THE OLD DATA
query = await db_model.operator_query()
operators = await db_objects.execute(
query.where(db_model.Operator.deleted == False)
)
for o in operators:
await ws.send(js.dumps(o.to_json()))
await ws.send("")
# now pull off any new payloads we got queued up while processing old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
query = await db_model.operator_query()
p = await db_objects.get(query, id=id)
await ws.send(js.dumps(p.to_json()))
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
finally:
pool.close()
# notifications for updated operators
@mythic.websocket("/ws/updatedoperators")
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_updated_operators(request, ws):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "updatedoperator";')
# just want updates, not anything else
while True:
# msg = await conn.notifies.get()
try:
msg = conn.notifies.get_nowait()
# print("got an update for a callback")
id = msg.payload
query = await db_model.operator_query()
cb = await db_objects.get(query, id=id, deleted=False)
await ws.send(js.dumps(cb.to_json()))
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
finally:
pool.close()
# ---------------- PAYLOADTYPES --------------------------
# notifications for new payloadtypes
@mythic.websocket("/ws/payloadtypes")
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_payloadtypes(request, ws):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newpayloadtype";')
await cur.execute('LISTEN "updatedpayloadtype";')
await cur.execute('LISTEN "newwrappedpayloadtypes";')
await cur.execute('LISTEN "updatedwrappedpayloadtypes";')
await cur.execute('LISTEN "deletedwrappedpayloadtypes";')
# BEFORE WE START GETTING NEW THINGS, UPDATE WITH ALL OF THE OLD DATA
query = await db_model.payloadtype_query()
wrappedpayloadtypesquery = (
await db_model.wrappedpayloadtypes_query()
)
build_params_query = await db_model.buildparameter_query()
payloadtypes = await db_objects.execute(
query.where(db_model.PayloadType.deleted == False).order_by(
PayloadType.id
)
)
for p in payloadtypes:
wrappedpayloadtypes = await db_objects.execute(
wrappedpayloadtypesquery.where(
db_model.WrappedPayloadTypes.wrapper == p
)
)
await ws.send(
js.dumps(
{
**p.to_json(),
"wrapped_payload_types": [
w.to_json() for w in wrappedpayloadtypes
],
}
)
)
await ws.send("")
# now pull off any new payloads we got queued up while processing old data
query = await db_model.payloadtype_query()
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
if "deleted" in msg.channel:
await ws.send("")
else:
p = await db_objects.get(query, id=id, deleted=False)
await ws.send(js.dumps(p.to_json()))
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
finally:
pool.close()
# ---------------- COMMANDS --------------------------
# notifications for new commands
@mythic.websocket("/ws/all_command_info")
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_commands(request, ws):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newcommandparameters";')
await cur.execute('LISTEN "updatedcommandparameters";')
await cur.execute('LISTEN "deletedcommandparameters";')
await cur.execute('LISTEN "newcommand";')
await cur.execute('LISTEN "updatedcommand";')
# now pull off any new payloads we got queued up while processing old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
msg_dict = {}
if (
"parameters" in msg.channel
and "deleted" not in msg.channel
):
query = await db_model.commandparameters_query()
p = await db_objects.get(query, id=id)
elif "deleted" not in msg.channel:
query = await db_model.command_query()
p = await db_objects.get(query, id=id)
elif "deleted" in msg.channel:
# print(msg)
query = await db_model.command_query()
p = await db_objects.get(
query, id=js.loads(id)["command_id"]
)
msg_dict = {**js.loads(id)}
await ws.send(
js.dumps(
{**p.to_json(), **msg_dict, "notify": msg.channel}
)
)
except asyncio.QueueEmpty as e:
await asyncio.sleep(1)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
finally:
pool.close()
# basic info of just new commmands for the payload types page
@mythic.websocket("/ws/commands")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_commands(request, ws, user):
if not await valid_origin_header(request):
return
if user["current_operation"] == "":
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newcommand";')
# BEFORE WE START GETTING NEW THINGS, UPDATE WITH ALL OF THE OLD DATA
query = await db_model.command_query()
commands = await db_objects.execute(
query.where(db_model.Command.deleted == False)
)
for c in commands:
await ws.send(js.dumps(c.to_json()))
await ws.send("")
# now pull off any new payloads we got queued up while processing old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
p = await db_objects.get(query, id=id, deleted=False)
await ws.send(js.dumps(p.to_json()))
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
finally:
pool.close()
# ------------- FILEMETA ---------------------------
# notifications for new screenshots
@mythic.websocket("/ws/screenshots")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_screenshots(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newfilemeta";')
# BEFORE WE START GETTING NEW THINGS, UPDATE WITH ALL OF THE OLD DATA
if user["current_operation"] != "":
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
query = await db_model.filemeta_query()
files = await db_objects.execute(
query.where(
(FileMeta.operation == operation)
& (FileMeta.is_screenshot == True)
).order_by(FileMeta.id)
)
for f in files:
query = await db_model.task_query()
task = await db_objects.get(query, id=f.task)
await ws.send(
js.dumps(
{
**task.callback.to_json(),
**f.to_json(),
"callback_id": task.callback.id,
"comment": task.comment,
}
)
)
await ws.send("")
# now pull off any new payloads we got queued up while processing old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
query = await db_model.filemeta_query()
f = await db_objects.get(
query,
id=id,
operation=operation,
is_screenshot=True,
)
query = await db_model.task_query()
task = await db_objects.get(query, id=f.task)
await ws.send(
js.dumps(
{
**task.callback.to_json(),
**f.to_json(),
"callback_id": task.callback.id,
"comment": task.comment,
}
)
)
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
pool.close()
# notifications for updated screenshots
@mythic.websocket("/ws/updated_screenshots")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_updated_screenshots(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "updatedfilemeta";')
# BEFORE WE START GETTING NEW THINGS, UPDATE WITH ALL OF THE OLD DATA
if user["current_operation"] != "":
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
query = await db_model.filemeta_query()
f = await db_objects.get(
query,
id=id,
is_screenshot=True,
operation=operation,
)
query = await db_model.task_query()
task = await db_objects.get(query, id=f.task)
await ws.send(
js.dumps(
{
**task.callback.to_json(),
**f.to_json(),
"callback_id": task.callback.id,
"comment": task.comment,
}
)
)
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
pool.close()
# notifications for new files in the current operation
@mythic.websocket("/ws/files/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_files_current_operation(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newfilemeta";')
await cur.execute('LISTEN "updatedfilemeta";')
# BEFORE WE START GETTING NEW THINGS, UPDATE WITH ALL OF THE OLD DATA
if user["current_operation"] != "":
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
query = await db_model.filemeta_query()
files = await db_objects.execute(
query.where(
(FileMeta.operation == operation)
& (FileMeta.is_screenshot == False)
& (FileMeta.is_payload == False)
& (FileMeta.deleted == False)
).order_by(FileMeta.id)
)
for f in files:
if not f.is_download_from_agent:
# this means it's an upload, so supply additional information as well
if f.task is not None:
query = await db_model.callback_query()
callback = await db_objects.get(
query, id=f.task.callback
)
await ws.send(
js.dumps(
{
**f.to_json(),
"comment": f.task.comment,
"host": callback.host,
"upload": f.task.params,
}
)
)
else:
# this is a file download, so it's straight forward
query = await db_model.callback_query()
callback = await db_objects.get(
query, id=f.task.callback
)
await ws.send(
js.dumps(
{
**f.to_json(),
"comment": f.task.comment,
"host": callback.host,
"params": f.task.params,
}
)
)
await ws.send("")
# now pull off any new payloads we got queued up while processing old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
query = await db_model.filemeta_query()
f = await db_objects.get(
query,
id=id,
operation=operation,
is_screenshot=False,
is_payload=False,
)
try:
if not f.is_download_from_agent:
# this means it's an upload, so supply additional information as well
# could be upload via task or manual
if (
f.task is not None
): # this is an upload via gent tasking
query = await db_model.task_query()
task = await db_objects.get(
query, id=f.task
)
await ws.send(
js.dumps(
{
**f.to_json(),
"comment": f.task.comment,
"host": task.callback.host,
"upload": task.params,
}
)
)
else:
# this is a file download, so it's straight forward
query = await db_model.task_query()
task = await db_objects.get(query, id=f.task)
await ws.send(
js.dumps(
{
**f.to_json(),
"comment": f.task.comment,
"host": task.callback.host,
"params": task.params,
}
)
)
except Exception as e:
pass # we got a file that's just not part of our current operation, so move on
except asyncio.QueueEmpty as e:
await asyncio.sleep(1)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
pool.close()
@mythic.websocket("/ws/files/new/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_new_files_current_operation(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newfilemeta";')
await cur.execute('LISTEN "updatedfilemeta";')
# BEFORE WE START GETTING NEW THINGS, UPDATE WITH ALL OF THE OLD DATA
if user["current_operation"] != "":
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
# now pull off any new payloads we got queued up while processing old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
query = await db_model.filemeta_query()
f = await db_objects.get(
query,
id=id,
operation=operation,
is_screenshot=False,
is_payload=False,
)
try:
if not f.is_download_from_agent:
# this means it's an upload, so supply additional information as well
# could be upload via task or manual
if (
f.task is not None
): # this is an upload via gent tasking
query = await db_model.task_query()
task = await db_objects.get(
query, id=f.task
)
await ws.send(
js.dumps(
{
**f.to_json(),
"comment": f.task.comment,
"host": task.callback.host,
"upload": task.params,
}
)
)
else:
# this is a file download, so it's straight forward
query = await db_model.task_query()
task = await db_objects.get(query, id=f.task)
await ws.send(
js.dumps(
{
**f.to_json(),
"comment": f.task.comment,
"host": task.callback.host,
"params": task.params,
}
)
)
except Exception as e:
pass # we got a file that's just not part of our current operation, so move on
except asyncio.QueueEmpty as e:
await asyncio.sleep(1)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
pool.close()
# notifications for new files in the current operation
@mythic.websocket("/ws/manual_files/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_manual_files_current_operation(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newfilemeta";')
await cur.execute('LISTEN "updatedfilemeta";')
# BEFORE WE START GETTING NEW THINGS, UPDATE WITH ALL OF THE OLD DATA
if user["current_operation"] != "":
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
query = await db_model.filemeta_query()
files = await db_objects.execute(
query.where(
(FileMeta.operation == operation)
& (FileMeta.deleted == False)
& (FileMeta.is_payload == False)
& (FileMeta.task == None)
).order_by(FileMeta.id)
)
for f in files:
if f.task is None:
await ws.send(js.dumps({**f.to_json()}))
await ws.send("")
# now pull off any new payloads we got queued up while processing old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
query = await db_model.filemeta_query()
f = await db_objects.get(
query, id=id, operation=operation
)
await ws.send(js.dumps({**f.to_json()}))
except asyncio.QueueEmpty as e:
await asyncio.sleep(1)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
pool.close()
# ------------- CREDENTIAL ---------------------------
# notifications for new credentials
@mythic.websocket("/ws/credentials/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_credentials_current_operation(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newcredential";')
# BEFORE WE START GETTING NEW THINGS, UPDATE WITH ALL OF THE OLD DATA
if user["current_operation"] != "":
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
query = await db_model.credential_query()
creds = await db_objects.execute(
query.where(
(Credential.operation == operation)
& (Credential.deleted == False)
).order_by(db_model.Credential.id)
)
for c in creds:
await ws.send(js.dumps({**c.to_json()}))
await ws.send("")
# now pull off any new payloads we got queued up while processing old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
try:
c = await db_objects.get(
query, id=id, operation=operation, deleted=False
)
await ws.send(js.dumps({**c.to_json()}))
except Exception as e:
pass # we got a file that's just not part of our current operation, so move on
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
pool.close()
@mythic.websocket("/ws/credentials/new/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_credentials_new_current_operation(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newcredential";')
# BEFORE WE START GETTING NEW THINGS, UPDATE WITH ALL OF THE OLD DATA
if user["current_operation"] != "":
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
query = await db_model.credential_query()
# now pull off any new payloads we got queued up while processing old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
try:
c = await db_objects.get(
query, id=id, operation=operation, deleted=False
)
await ws.send(js.dumps({**c.to_json()}))
except Exception as e:
pass # we got a file that's just not part of our current operation, so move on
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
pool.close()
# ------------- KEYLOG ---------------------------
# notifications for new keylogs
@mythic.websocket("/ws/keylogs/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_keylogs_current_operation(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newkeylog";')
# BEFORE WE START GETTING NEW THINGS, UPDATE WITH ALL OF THE OLD DATA
if user["current_operation"] != "":
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
# now pull off any new payloads we got queued up while processing old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
try:
query = await db_model.keylog_query()
c = await db_objects.get(
query, id=id, operation=operation
)
await ws.send(js.dumps({**c.to_json()}))
except Exception as e:
pass # we got a file that's just not part of our current operation, so move on
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
pool.close()
# ------ OPERATING COMMAND POPUP INFORMATION --------------------
# ----- INCLUDES CREDENTIALS, PAYLOADS, PAYLOADSONHOST ------------
# notifications for new credentials
@mythic.websocket("/ws/parameter_hints/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_parameter_hints_current_operation(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newcredential";')
await cur.execute('LISTEN "updatedcredential";')
await cur.execute('LISTEN "newpayload";')
await cur.execute('LISTEN "updatedpayload";')
await cur.execute('LISTEN "newpayloadonhost";')
await cur.execute('LISTEN "updatedpayloadonhost";')
# BEFORE WE START GETTING NEW THINGS, UPDATE WITH ALL OF THE OLD DATA
if user["current_operation"] != "":
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
credquery = await db_model.credential_query()
creds = await db_objects.execute(
credquery.where(
(Credential.operation == operation)
& (Credential.deleted == False)
)
)
for c in creds:
await ws.send(
js.dumps({**c.to_json(), "channel": "newcredential"})
)
payloadquery = await db_model.payload_query()
payloads = await db_objects.execute(
payloadquery.where(
(Payload.operation == operation)
& (Payload.auto_generated == False)
& (Payload.deleted == False)
& (Payload.build_phase == "success")
)
)
c2profileparameterinstancequery = (
await db_model.c2profileparametersinstance_query()
)
c2profilepayloadquery = await db_model.payloadc2profiles_query()
for p in payloads:
if p.wrapped_payload is not None:
cur_payload = p.wrapped_payload
else:
cur_payload = p
c2profiles = await db_objects.execute(
c2profilepayloadquery.where(
db_model.PayloadC2Profiles.payload == cur_payload
)
)
supported_profiles = []
for c2p in c2profiles:
profile_info = {
"name": c2p.c2_profile.name,
"is_p2p": c2p.c2_profile.is_p2p,
"parameters": {},
}
c2profiledata = await db_objects.execute(
c2profileparameterinstancequery.where(
(
db_model.C2ProfileParametersInstance.payload
== cur_payload
)
& (
db_model.C2ProfileParametersInstance.c2_profile
== c2p.c2_profile
)
)
)
for c in c2profiledata:
profile_info["parameters"][
c.c2_profile_parameters.name
] = c.value
supported_profiles.append(profile_info)
await ws.send(
js.dumps(
{
**p.to_json(),
"supported_profiles": supported_profiles,
"channel": "newpayload",
}
)
)
payloadonhostquery = await db_model.payloadonhost_query()
payloadonhost = await db_objects.execute(
payloadonhostquery.where(
(db_model.PayloadOnHost.operation == operation)
& (db_model.PayloadOnHost.deleted == False)
)
)
for p in payloadonhost:
if p.payload.wrapped_payload is not None:
cur_payload = p.payload.wrapped_payload
else:
cur_payload = p.payload
c2profiles = await db_objects.execute(
c2profilepayloadquery.where(
db_model.PayloadC2Profiles.payload == cur_payload
)
)
supported_profiles = []
for c2p in c2profiles:
profile_info = {
"name": c2p.c2_profile.name,
"is_p2p": c2p.c2_profile.is_p2p,
"parameters": {},
}
c2profiledata = await db_objects.execute(
c2profileparameterinstancequery.where(
(
db_model.C2ProfileParametersInstance.payload
== cur_payload
)
& (
db_model.C2ProfileParametersInstance.c2_profile
== c2p.c2_profile
)
)
)
for c in c2profiledata:
profile_info["parameters"][
c.c2_profile_parameters.name
] = c.value
supported_profiles.append(profile_info)
await ws.send(
js.dumps(
{
**p.to_json(),
"supported_profiles": supported_profiles,
"channel": "newpayloadonhost",
}
)
)
await ws.send("")
# now pull off any new payloads we got queued up while processing old data
while True:
try:
msg = conn.notifies.get_nowait()
# print(msg)
id = msg.payload
try:
if "credential" in msg.channel:
obj = await db_objects.get(
credquery, id=id, operation=operation
)
obj_json = obj.to_json()
elif "onhost" in msg.channel:
payloadonhost = await db_objects.get(
payloadonhostquery,
operation=operation,
id=id,
)
if (
payloadonhost.payload.wrapped_payload
is not None
):
cur_payload = (
payloadonhost.payload.wrapped_payload
)
else:
cur_payload = payloadonhost.payload
c2profiles = await db_objects.execute(
c2profilepayloadquery.where(
db_model.PayloadC2Profiles.payload
== cur_payload
)
)
supported_profiles = []
for c2p in c2profiles:
profile_info = {
"name": c2p.c2_profile.name,
"is_p2p": c2p.c2_profile.is_p2p,
"parameters": {},
}
c2profiledata = await db_objects.execute(
c2profileparameterinstancequery.where(
(
db_model.C2ProfileParametersInstance.payload
== cur_payload
)
& (
db_model.C2ProfileParametersInstance.c2_profile
== c2p.c2_profile
)
)
)
for c in c2profiledata:
profile_info["parameters"][
c.c2_profile_parameters.name
] = c.value
supported_profiles.append(profile_info)
obj_json = {
**payloadonhost.to_json(),
"supported_profiles": supported_profiles,
}
else:
# this is just for new payloads
payload = await db_objects.get(
payloadquery.where(
(Payload.operation == operation)
& (Payload.id == id)
& (Payload.deleted == False)
& (Payload.build_phase == "success")
)
)
if payload.wrapped_payload is not None:
cur_payload = payload.wrapped_payload
else:
cur_payload = payload
c2profiles = await db_objects.execute(
c2profilepayloadquery.where(
db_model.PayloadC2Profiles.payload
== cur_payload
)
)
supported_profiles = []
for c2p in c2profiles:
profile_info = {
"name": c2p.c2_profile.name,
"is_p2p": c2p.c2_profile.is_p2p,
"parameters": {},
}
c2profiledata = await db_objects.execute(
c2profileparameterinstancequery.where(
(
db_model.C2ProfileParametersInstance.payload
== cur_payload
)
& (
db_model.C2ProfileParametersInstance.c2_profile
== c2p.c2_profile
)
)
)
for c in c2profiledata:
profile_info["parameters"][
c.c2_profile_parameters.name
] = c.value
supported_profiles.append(profile_info)
obj_json = {
**payload.to_json(),
"supported_profiles": supported_profiles,
}
obj_json["channel"] = msg.channel
await ws.send(js.dumps(obj_json))
except Exception as e:
print(e)
pass # we got a file that's just not part of our current operation, so move on
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
pool.close()
# ------------- RABBITMQ DATA ---------------------------
# messages back from rabbitmq with key: c2.status.#
@mythic.websocket("/ws/rabbitmq/c2_status")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_c2_status_messages(request, ws, user):
if not await valid_origin_header(request):
return
async def send_data(message: aio_pika.IncomingMessage):
base_username = base64.b64encode(user["username"].encode()).decode("utf-8")
with message.process():
if message.routing_key.split(".")[5] == base_username:
data = {
"status": "success",
"body": message.body.decode("utf-8"),
"routing_key": message.routing_key,
}
try:
await ws.send(js.dumps(data))
except Exception as e:
pass
try:
connection = await aio_pika.connect(
host="127.0.0.1",
login="mythic_user",
password="mythic_password",
virtualhost="mythic_vhost",
)
channel = await connection.channel()
# declare our exchange
await channel.declare_exchange("mythic_traffic", aio_pika.ExchangeType.TOPIC)
# get a random queue that only the mythic server will use to listen on to catch all heartbeats
queue = await channel.declare_queue("", exclusive=True)
await queue.bind(exchange="mythic_traffic", routing_key="c2.status.#")
await channel.set_qos(prefetch_count=50)
print(" [*] Waiting for messages in websocket. To exit press CTRL+C")
await queue.consume(send_data)
while True:
try:
await ws.send("")
await asyncio.sleep(2)
except Exception as e:
return
except Exception as e:
print("Exception in ws_c2_status_messages: {}".format(str(sys.exc_info())))
await ws.send(
js.dumps(
{
"status": "error",
"error": "Failed to connect to rabbitmq, {}".format(str(e)),
}
)
)
# messages back from rabbitmq with key: pt.status.#
@mythic.websocket("/ws/rabbitmq/pt_status")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_payload_type_status_messages(request, ws, user):
if not await valid_origin_header(request):
return
async def send_data(message: aio_pika.IncomingMessage):
base_username = base64.b64encode(user["username"].encode()).decode("utf-8")
with message.process():
# print(message.routing_key)
if message.routing_key.split(".")[-1] == base_username:
data = {
"status": "success",
"body": message.body.decode("utf-8"),
"routing_key": message.routing_key,
}
try:
await ws.send(js.dumps(data))
except Exception as e:
pass
try:
connection = await aio_pika.connect(
host="127.0.0.1",
login="mythic_user",
password="mythic_password",
virtualhost="mythic_vhost",
)
channel = await connection.channel()
# declare our exchange
await channel.declare_exchange("mythic_traffic", aio_pika.ExchangeType.TOPIC)
# get a random queue that only the mythic server will use to listen on to catch all heartbeats
queue = await channel.declare_queue("", exclusive=True)
# bind the queue to the exchange so we can actually catch messages
await queue.bind(exchange="mythic_traffic", routing_key="pt.status.#")
await channel.set_qos(prefetch_count=50)
print(" [*] Waiting for messages in websocket. To exit press CTRL+C")
await queue.consume(send_data)
while True:
try:
await ws.send("")
await asyncio.sleep(2)
except Exception as e:
return
except Exception as e:
print(
"Exception in ws_payload_type_status_messages: {}".format(
str(sys.exc_info())
)
)
await ws.send(
js.dumps(
{
"status": "error",
"error": "Failed to connect to rabbitmq, {}".format(str(e)),
}
)
)
# ============= BROWSER SCRIPTING WEBSOCKETS ===============
@mythic.websocket("/ws/browser_scripts")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_browserscripts(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newbrowserscript";')
await cur.execute('LISTEN "updatedbrowserscript";')
await cur.execute('LISTEN "newbrowserscriptoperation";')
await cur.execute('LISTEN "updatedbrowserscriptoperation";')
await cur.execute('LISTEN "deletedbrowserscriptoperation";')
# before we start getting new things, update with all of the old data
try:
query = await db_model.operator_query()
operator = await db_objects.get(
query, username=user["username"]
)
script_query = await db_model.browserscript_query()
all_scripts = await db_objects.execute(
script_query.where(
db_model.BrowserScript.operator == operator
)
)
for s in all_scripts:
await ws.send(
js.dumps({"type": "browserscript", **s.to_json()})
)
try:
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
scriptoperation_query = (
await db_model.browserscriptoperation_query()
)
all_scripts = await db_objects.execute(
scriptoperation_query.where(
db_model.BrowserScriptOperation.operation
== operation
)
)
for s in all_scripts:
await ws.send(
js.dumps(
{
"type": "browserscriptoperation",
**s.to_json(),
}
)
)
except Exception as e:
operation = None
pass # user might not have an operation assigned, so still
await ws.send("")
except Exception as e:
print(str(sys.exc_info()[-1].tb_lineno) + " " + str(e))
return
# now pull off any new tasks we got queued up while processing the old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
if "operation" in msg.channel:
if operation is not None:
if "deleted" in msg.channel:
await ws.send(
js.dumps(
{
"type": "deletedbrowserscriptoperation",
"info": id,
}
)
)
else:
s = await db_objects.get(
scriptoperation_query,
id=id,
operation=operation,
)
await ws.send(
js.dumps(
{
"type": "browserscriptoperation",
**s.to_json(),
}
)
)
else:
s = await db_objects.get(
script_query, id=id, operator=operator
)
await ws.send(
js.dumps({"type": "browserscript", **s.to_json()})
)
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
finally:
# print("closed /ws/tasks")
pool.close()
# ============= ARTIFACT WEBSOCKETS ===============
@mythic.websocket("/ws/artifacts")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_artifacts(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newartifact";')
await cur.execute('LISTEN "newtaskartifact";')
if user["current_operation"] != "":
# before we start getting new things, update with all of the old data
query = await db_model.artifact_query()
base_artifacts = await db_objects.execute(query)
for b in base_artifacts:
await ws.send(
js.dumps({**b.to_json(), "channel": "artifact"})
)
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
query = await db_model.callback_query()
callbacks = query.where(Callback.operation == operation).select(
Callback.id
)
task_query = await db_model.taskartifact_query()
artifact_tasks = await db_objects.execute(
task_query.where(Task.callback.in_(callbacks))
)
manual_tasks = await db_objects.execute(
task_query.where(TaskArtifact.operation == operation)
)
for a in artifact_tasks:
await ws.send(
js.dumps({**a.to_json(), "channel": "taskartifact"})
)
for m in manual_tasks:
await ws.send(
js.dumps({**m.to_json(), "channel": "taskartifact"})
)
await ws.send("")
# now pull off any new tasks we got queued up while processing the old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
if msg.channel == "newartifact":
query = await db_model.artifact_query()
artifact = await db_objects.get(query, id=id)
await ws.send(
js.dumps(
{
**artifact.to_json(),
"channel": "artifact",
}
)
)
elif msg.channel == "newtaskartifact":
query = await db_model.taskartifact_query()
artifact = await db_objects.get(query, id=id)
if artifact.operation == operation or (
artifact.task is not None
and artifact.task.callback.operation
== operation
):
await ws.send(
js.dumps(
{
**artifact.to_json(),
"channel": "taskartifact",
}
)
)
await ws.send("")
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
# print("closed /ws/tasks")
pool.close()
# ============= PROCESS LIST WEBSOCKETS ===============
@mythic.websocket("/ws/process_list/<cid:int>")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_process_list(request, ws, user, cid):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newprocesslist";')
if user["current_operation"] != "":
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
query = await db_model.callback_query()
callback = await db_objects.get(
query, operation=operation, id=cid
)
# now pull off any new tasks we got queued up while processing the old data
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
process_list = await db_objects.get(
db_model.ProcessList,
id=id,
operation=operation,
host=callback.host,
)
plist = process_list.to_json()
try:
tree = await get_process_tree(
js.loads(plist["process_list"])
)
except Exception as e:
print(e)
tree = {}
await ws.send(
js.dumps({"process_list": plist, "tree_list": tree})
)
except asyncio.QueueEmpty as e:
await asyncio.sleep(2)
await ws.send(
""
) # this is our test to see if the client is still there
continue
except Exception as e:
print(e)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
# print("closed /ws/tasks")
pool.close()
# -------------- EVENT LOGS ----------------------
@mythic.websocket("/ws/events/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_events_current_operation(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newoperationeventlog";')
await cur.execute('LISTEN "updatedoperationeventlog";')
if user["current_operation"] != "":
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
query = await db_model.operationeventlog_query()
initial_events = await db_objects.execute(
query.where(
(db_model.OperationEventLog.operation == operation)
& (db_model.OperationEventLog.deleted == False)
).order_by(db_model.OperationEventLog.id)
)
events = []
for i in initial_events:
op_msg = i.to_json()
if op_msg["operator"] is None:
op_msg["operator"] = "Mythic"
events.append(op_msg)
await ws.send(js.dumps(events))
await ws.send("")
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
t = await db_objects.get(query, id=id)
if t.operation == operation:
op_msg = t.to_json()
if op_msg["operator"] is None:
op_msg["operator"] = "Mythic"
await ws.send(js.dumps(op_msg))
except asyncio.QueueEmpty as e:
await asyncio.sleep(0.5)
await ws.send(
""
) # this is our test to see if the client is still there
except Exception as e:
print(e)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
pool.close()
@mythic.websocket("/ws/events_all/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_events_current_operation(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newoperationeventlog";')
await cur.execute('LISTEN "updatedoperationeventlog";')
if user["current_operation"] != "":
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
query = await db_model.operationeventlog_query()
initial_events = await db_objects.execute(
query.where(
(db_model.OperationEventLog.operation == operation)
& (db_model.OperationEventLog.deleted == False)
).order_by(db_model.OperationEventLog.id)
)
for i in initial_events:
op_msg = i.to_json()
if op_msg["operator"] == "null":
op_msg["operator"] = "Mythic"
await ws.send(js.dumps(op_msg))
await ws.send("")
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
t = await db_objects.get(query, id=id)
if t.operation == operation:
op_msg = t.to_json()
if op_msg["operator"] == "null":
op_msg["operator"] = "Mythic"
await ws.send(js.dumps(op_msg))
except asyncio.QueueEmpty as e:
await asyncio.sleep(0.5)
await ws.send(
""
) # this is our test to see if the client is still there
except Exception as e:
print(e)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
pool.close()
# the main operator callback page doesn't need all historic events, just new ones
@mythic.websocket("/ws/events_notifier/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_events_notifier_current_operation(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newoperationeventlog";')
await cur.execute('LISTEN "updatedoperationeventlog";')
if user["current_operation"] != "":
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
query = await db_model.operationeventlog_query()
from app.api.event_message_api import get_old_event_alerts
alert_counts = await get_old_event_alerts(user)
if alert_counts["status"] == "success":
alert_counts.pop("status", None)
alert_counts["channel"] = "historic"
await ws.send(js.dumps(alert_counts))
else:
print(alert_counts["error"])
return
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
t = await db_objects.get(query, id=id)
if t.operation == operation:
op_msg = t.to_json()
if op_msg["operator"] == "null":
op_msg["operator"] = "Mythic"
op_msg["channel"] = msg.channel
await ws.send(js.dumps(op_msg))
except asyncio.QueueEmpty as e:
await asyncio.sleep(0.5)
await ws.send(
""
) # this is our test to see if the client is still there
except Exception as e:
print(e)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
pool.close()
# -------------- CALLBACK GRAPH EDGE CONNECTIONS ----------------------
@mythic.websocket("/ws/graph_edges/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_graph_edges_current_operation(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newcallbackgraphedge";')
await cur.execute('LISTEN "updatedcallbackgraphedge";')
if user["current_operation"] != "":
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
c2query = await db_model.c2profile_query()
profiles = await db_objects.execute(
c2query.where(db_model.C2Profile.is_p2p == False)
)
for p in profiles:
await ws.send(
js.dumps(
{
"id": (-1 * p.id),
"destination": js.dumps({"id": 0}),
"source": js.dumps(
{
"id": "c" + str(p.id),
"payload_type": "mythic",
"user": p.name,
"integrity_level": 0,
"host": p.name,
"description": p.description,
}
),
"direction": 1,
"metadata": "",
"name": p.name,
"end_timestamp": None,
}
)
)
query = await db_model.callbackgraphedge_query()
initial_edges = await db_objects.execute(
query.where(
(db_model.CallbackGraphEdge.operation == operation)
& (db_model.CallbackGraphEdge.end_timestamp == None)
).order_by(db_model.CallbackGraphEdge.id)
)
for i in initial_edges:
if i.source.id == i.destination.id:
await ws.send(
js.dumps(
{
**i.to_json(),
"destination": js.dumps(
{"id": "c" + str(i.c2_profile.id)}
),
}
)
)
else:
await ws.send(js.dumps(i.to_json()))
await ws.send("")
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
i = await db_objects.get(
query, id=id, operation=operation
)
if i.source.id == i.destination.id:
await ws.send(
js.dumps(
{
**i.to_json(),
"destination": js.dumps(
{"id": "c" + str(i.c2_profile.id)}
),
}
)
)
else:
await ws.send(js.dumps(i.to_json()))
except asyncio.QueueEmpty as e:
await asyncio.sleep(0.5)
await ws.send(
""
) # this is our test to see if the client is still there
except Exception as e:
print(e)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
pool.close()
# -------------- FILE BROWSER INFORMATION ----------------------
@mythic.websocket("/ws/file_browser/current_operation")
@inject_user()
@scoped(
["auth:user", "auth:apitoken_user"], False
) # user or user-level api token are ok
async def ws_file_browser_objects(request, ws, user):
if not await valid_origin_header(request):
return
try:
async with aiopg.create_pool(mythic.config["DB_POOL_CONNECT_STRING"]) as pool:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute('LISTEN "newfilebrowserobj";')
await cur.execute('LISTEN "updatedfilebrowserobj";')
await cur.execute('LISTEN "newfilemeta";')
await cur.execute('LISTEN "updatedfilemeta";')
if user["current_operation"] != "":
query = await db_model.operation_query()
operation = await db_objects.get(
query, name=user["current_operation"]
)
from app.api.file_browser_api import (
get_filebrowser_tree_for_operation,
)
burst = await get_filebrowser_tree_for_operation(
user["current_operation"]
)
await ws.send(js.dumps(burst["output"]))
await ws.send("")
query = await db_model.filebrowserobj_query()
filequery = await db_model.filemeta_query()
while True:
try:
msg = conn.notifies.get_nowait()
id = msg.payload
if "filemeta" in msg.channel:
i = await db_objects.get(
filequery, id=id, operation=operation
)
if i.file_browser is not None:
i = await db_objects.get(
query,
id=i.file_browser,
operation=operation,
)
ij = i.to_json()
ij["files"] = []
for f in i.files:
fjson = f.to_json()
if (
f.task is not None
and f.task.comment != ""
):
fjson["comment"] = f.task.comment
ij["files"].append(fjson)
else:
ij = i.to_json()
ij["files"] = []
else:
i = await db_objects.get(
query, id=id, operation=operation
)
ij = i.to_json()
ij["files"] = []
for f in i.files:
fjson = f.to_json()
if f.task is not None and f.task.comment != "":
fjson["comment"] = f.task.comment
ij["files"].append(fjson)
await ws.send(js.dumps(ij))
except asyncio.QueueEmpty as e:
await asyncio.sleep(0.5)
await ws.send(
""
) # this is our test to see if the client is still there
except Exception as e:
print(e)
continue
else:
await ws.send("no_operation")
while True:
await ws.send("")
await asyncio.sleep(0.5)
finally:
pool.close()
# CHECK ORIGIN HEADERS FOR WEBSOCKETS
async def valid_origin_header(request):
if "origin" in request.headers:
if use_ssl:
if request.headers["origin"] == "https://{}".format(
request.headers["host"]
):
return True
else:
if request.headers["origin"] == "http://{}".format(request.headers["host"]):
return True
return False
elif "apitoken" in request.headers:
return True
else:
return False
| 49.136555
| 115
| 0.39314
| 12,116
| 151,488
| 4.787224
| 0.038544
| 0.031861
| 0.039447
| 0.028723
| 0.870332
| 0.849747
| 0.828713
| 0.816679
| 0.801093
| 0.790576
| 0
| 0.004782
| 0.534821
| 151,488
| 3,082
| 116
| 49.152498
| 0.818303
| 0.079881
| 0
| 0.726271
| 0
| 0
| 0.065213
| 0.019153
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.00391
| 0.004977
| 0
| 0.026306
| 0.017775
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ee02f0e401aa2aef241f3cedea211ad078f6acb2
| 39
|
py
|
Python
|
file01.py
|
banxiabanxin/basic3
|
811cf4880accc3cece501f587183f6ef6433673d
|
[
"MIT"
] | null | null | null |
file01.py
|
banxiabanxin/basic3
|
811cf4880accc3cece501f587183f6ef6433673d
|
[
"MIT"
] | null | null | null |
file01.py
|
banxiabanxin/basic3
|
811cf4880accc3cece501f587183f6ef6433673d
|
[
"MIT"
] | null | null | null |
num1=1
<<<<<<< HEAD
num2=2
num4=4
| 4.333333
| 12
| 0.487179
| 7
| 39
| 2.714286
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.206897
| 0.25641
| 39
| 8
| 13
| 4.875
| 0.448276
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ee775453c271a1f6c8cf743243e95c901b377899
| 3,467
|
py
|
Python
|
dbdaora/hash/repositories/_tests/test_integration_repository_aioredis_hash_get_many.py
|
dutradda/sqldataclass
|
5c87a3818e9d736bbf5e1438edc5929a2f5acd3f
|
[
"MIT"
] | 21
|
2019-10-14T14:33:33.000Z
|
2022-02-11T04:43:07.000Z
|
dbdaora/hash/repositories/_tests/test_integration_repository_aioredis_hash_get_many.py
|
dutradda/sqldataclass
|
5c87a3818e9d736bbf5e1438edc5929a2f5acd3f
|
[
"MIT"
] | null | null | null |
dbdaora/hash/repositories/_tests/test_integration_repository_aioredis_hash_get_many.py
|
dutradda/sqldataclass
|
5c87a3818e9d736bbf5e1438edc5929a2f5acd3f
|
[
"MIT"
] | 1
|
2019-09-29T23:51:44.000Z
|
2019-09-29T23:51:44.000Z
|
import asynctest
import pytest
from jsondaora import dataclasses
@pytest.mark.asyncio
async def test_should_set_already_not_found_error_when_get_many(
repository, mocker
):
fake_entity = 'fake'
repository.memory_data_source.hgetall = asynctest.CoroutineMock(
return_value=None
)
repository.memory_data_source.exists = asynctest.CoroutineMock(
side_effect=[False]
)
repository.fallback_data_source.get = asynctest.CoroutineMock(
return_value=None
)
repository.memory_data_source.set = asynctest.CoroutineMock()
repository.memory_data_source.hmset = asynctest.CoroutineMock()
assert [
e async for e in repository.query(many=[fake_entity]).entities
] == []
assert repository.memory_data_source.hgetall.call_args_list == [
mocker.call('fake:fake'),
]
assert repository.memory_data_source.exists.call_args_list == [
mocker.call('fake:not-found:fake')
]
assert repository.fallback_data_source.get.call_args_list == [
mocker.call('fake:fake')
]
assert repository.memory_data_source.set.call_args_list == [
mocker.call('fake:not-found:fake', '1')
]
assert not repository.memory_data_source.hmset.called
@pytest.mark.asyncio
async def test_should_get_many_from_fallback(repository, fake_entity):
await repository.memory_data_source.delete('fake:fake')
await repository.memory_data_source.delete('fake:not-found:fake')
repository.fallback_data_source.db['fake:fake'] = dataclasses.asdict(
fake_entity
)
entities = [
e async for e in repository.query(many=[fake_entity.id]).entities
]
assert entities == [fake_entity]
assert repository.memory_data_source.exists('fake:fake')
@pytest.mark.asyncio
async def test_should_get_many_with_one_item_already_not_found_from_fallback(
repository, fake_entity
):
await repository.memory_data_source.delete('fake:fake')
await repository.memory_data_source.delete('fake:fake2')
await repository.memory_data_source.delete('fake:not-found:fake')
await repository.memory_data_source.set('fake:not-found:fake2', '1')
repository.fallback_data_source.db['fake:fake'] = dataclasses.asdict(
fake_entity
)
entities = [
e
async for e in repository.query(
many=[fake_entity.id, 'fake2']
).entities
]
assert entities == [fake_entity]
assert repository.memory_data_source.exists('fake:fake')
@pytest.mark.asyncio
async def test_should_get_many_with_one_item_already_not_found_and_another_not_found_from_fallback(
repository, fake_entity
):
await repository.memory_data_source.delete('fake:fake')
await repository.memory_data_source.delete('fake:fake2')
await repository.memory_data_source.delete('fake:fake3')
await repository.memory_data_source.delete('fake:not-found:fake')
await repository.memory_data_source.delete('fake:not-found:fake3')
await repository.memory_data_source.set('fake:not-found:fake2', '1')
repository.fallback_data_source.db['fake:fake'] = dataclasses.asdict(
fake_entity
)
entities = [
e
async for e in repository.query(
many=[fake_entity.id, 'fake2', 'fake3']
).entities
]
assert entities == [fake_entity]
assert repository.memory_data_source.exists('fake:fake')
assert repository.memory_data_source.exists('fake:not-found:fake3')
| 33.019048
| 99
| 0.724257
| 438
| 3,467
| 5.452055
| 0.139269
| 0.121441
| 0.201005
| 0.261307
| 0.88861
| 0.801508
| 0.79732
| 0.780988
| 0.762563
| 0.660385
| 0
| 0.004526
| 0.171618
| 3,467
| 104
| 100
| 33.336538
| 0.82695
| 0
| 0
| 0.454545
| 0
| 0
| 0.094029
| 0
| 0
| 0
| 0
| 0
| 0.147727
| 1
| 0
| false
| 0
| 0.034091
| 0
| 0.034091
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ee8877ef10b07f3f74022c08c6e04301bf09bc3b
| 1,421
|
py
|
Python
|
cvnets/layers/dropout.py
|
KelOdgSmile/ml-cvnets
|
503ec3b4ec187cfa0ed451d0f61de22f669b0081
|
[
"AML"
] | 1
|
2021-12-20T09:25:18.000Z
|
2021-12-20T09:25:18.000Z
|
cvnets/layers/dropout.py
|
footh/ml-cvnets
|
d9064fe7e7a2d6a7a9817df936432856a0500a25
|
[
"AML"
] | null | null | null |
cvnets/layers/dropout.py
|
footh/ml-cvnets
|
d9064fe7e7a2d6a7a9817df936432856a0500a25
|
[
"AML"
] | null | null | null |
#
# For licensing see accompanying LICENSE file.
# Copyright (C) 2020 Apple Inc. All Rights Reserved.
#
from torch import nn, Tensor
class Dropout(nn.Dropout):
def __init__(self, p: float = 0.5, inplace: bool = False):
"""
During training, randomly zeroes some of the elements of the input tensor with probability `p` using samples \
from a Bernoulli distribution.
:param p: probability of an element to be zeroed. Default: 0.5
:param inplace: If set to ``True``, will do this operation in-place. Default: ``False``
"""
super(Dropout, self).__init__(p=p, inplace=inplace)
def profile_module(self, input: Tensor) -> (Tensor, float, float):
input = self.forward(input)
return input, 0.0, 0.0
class Dropout2d(nn.Dropout2d):
def __init__(self, p: float = 0.5, inplace: bool = False):
"""
During training, randomly zeroes some of the elements of the input tensor with probability `p` using samples \
from a Bernoulli distribution.
:param p: probability of an element to be zeroed. Default: 0.5
:param inplace: If set to ``True``, will do this operation in-place. Default: ``False``
"""
super(Dropout2d, self).__init__(p=p, inplace=inplace)
def profile_module(self, input: Tensor) -> (Tensor, float, float):
input = self.forward(input)
return input, 0.0, 0.0
| 37.394737
| 118
| 0.647431
| 195
| 1,421
| 4.625641
| 0.353846
| 0.013304
| 0.013304
| 0.026608
| 0.820399
| 0.820399
| 0.820399
| 0.820399
| 0.820399
| 0.820399
| 0
| 0.021415
| 0.244194
| 1,421
| 38
| 119
| 37.394737
| 0.818436
| 0.489796
| 0
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.307692
| false
| 0
| 0.076923
| 0
| 0.692308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a02076691f307956dc0616f73a9fd88bfe7d1093
| 4,012
|
py
|
Python
|
django_email_test/tests.py
|
morlandi/django-email-test
|
c59ad3652a08a7e580a6888cd9c0615d20034a1f
|
[
"BSD-3-Clause"
] | 1
|
2020-10-05T21:01:42.000Z
|
2020-10-05T21:01:42.000Z
|
django_email_test/tests.py
|
morlandi/django-email-test
|
c59ad3652a08a7e580a6888cd9c0615d20034a1f
|
[
"BSD-3-Clause"
] | null | null | null |
django_email_test/tests.py
|
morlandi/django-email-test
|
c59ad3652a08a7e580a6888cd9c0615d20034a1f
|
[
"BSD-3-Clause"
] | 1
|
2020-10-05T21:01:56.000Z
|
2020-10-05T21:01:56.000Z
|
# -*- coding: utf-8 -*-
#Copyright (C) 2011 Seán Hayes
#SEE LICENSE file
#Python imports
from datetime import datetime
#Django imports
from django.core import mail
from django.core.mail import EmailMessage
from django.test import TestCase
#App imports
from models import TestEmail, test_email_pre_save_handler
class TestEmailTestCase(TestCase):
def setUp(self):
self.te = TestEmail(
date = datetime.now(),
from_email = 'foo@example.com',
to = 'bar@example.com'
)
def test_send_success(self):
self.assertEquals(len(mail.outbox), 0)
self.assertEquals(self.te.sent, False)
self.assertEquals(self.te.error, '')
self.assertEquals(self.te.id, None)
self.te.send()
self.assertEquals(len(mail.outbox), 1)
self.assertEquals(self.te.sent, True)
self.assertEquals(self.te.error, '')
self.assertEquals(self.te.id, None)
def test_send_error(self):
#TODO: move this to setUp() so that failures here don't affect other tests
#need EmailMessage.send() (which is called in TestEmail.send() to throw an error)
old_send = EmailMessage.send
error = Exception('an error occurred!')
def send_gives_error(*args, **kwargs):
raise error
EmailMessage.send = send_gives_error
self.assertEquals(len(mail.outbox), 0)
self.assertEquals(self.te.sent, False)
self.assertEquals(self.te.error, '')
self.assertEquals(self.te.id, None)
self.te.send()
self.assertEquals(len(mail.outbox), 0)
self.assertEquals(self.te.sent, False)
try:
error = unicode(error)
except:
error = str(error)
self.assertTrue(error in self.te.error)
self.assertEquals(self.te.id, None)
#restore the original method so other tests can pass
EmailMessage.send = old_send
def test_test_email_post_save_handler_success(self):
self.assertEquals(len(mail.outbox), 0)
self.assertEquals(self.te.sent, False)
self.assertEquals(self.te.error, '')
self.assertEquals(self.te.id, None)
self.te.save()
#get object from DB to ensure 'sent' and 'error' were saved
te = TestEmail.objects.all()[0]
self.assertEquals(len(mail.outbox), 1)
self.assertEquals(te.sent, True)
self.assertEquals(te.error, '')
self.assertNotEquals(te.id, None)
def test_test_email_post_save_handler_error(self):
#need EmailMessage.send() (which is called in TestEmail.send() to throw an error)
old_send = EmailMessage.send
error = Exception('an error occurred!')
def send_gives_error(*args, **kwargs):
raise error
EmailMessage.send = send_gives_error
self.assertEquals(len(mail.outbox), 0)
self.assertEquals(self.te.sent, False)
self.assertEquals(self.te.error, '')
self.assertEquals(self.te.id, None)
self.te.save()
#get object from DB to ensure 'sent' and 'error' were saved
te = TestEmail.objects.all()[0]
self.assertEquals(len(mail.outbox), 0)
self.assertEquals(te.sent, False)
try:
error = unicode(error)
except:
error = str(error)
self.assertTrue(error in self.te.error)
self.assertNotEquals(te.id, None)
#restore the original method so other tests can pass
EmailMessage.send = old_send
def test_test_email_pre_save_handler_resets_error_and_sent_to_default_for_new_objects(self):
self.te.sent = True
self.te.error = 'Couldn\'t send email.'
self.assertEquals(self.te.sent, True)
self.assertEquals(self.te.error, 'Couldn\'t send email.')
self.assertEquals(self.te.id, None)
test_email_pre_save_handler(TestEmail, self.te)
self.assertEquals(self.te.sent, False)
self.assertEquals(self.te.error, '')
self.assertEquals(self.te.id, None)
def test_test_email_pre_save_handler_leaves_saved_objects_alone(self):
self.te.id = 1
self.te.sent = True
self.te.error = 'Couldn\'t send email.'
self.assertEquals(self.te.sent, True)
self.assertEquals(self.te.error, 'Couldn\'t send email.')
self.assertEquals(self.te.id, 1)
test_email_pre_save_handler(TestEmail, self.te)
self.assertEquals(self.te.sent, True)
self.assertEquals(self.te.error, 'Couldn\'t send email.')
self.assertEquals(self.te.id, 1)
| 28.055944
| 93
| 0.734297
| 599
| 4,012
| 4.811352
| 0.183639
| 0.089521
| 0.201249
| 0.221374
| 0.826162
| 0.80916
| 0.807772
| 0.761971
| 0.749827
| 0.744275
| 0
| 0.005207
| 0.138335
| 4,012
| 142
| 94
| 28.253521
| 0.828464
| 0.138584
| 0
| 0.768421
| 0
| 0
| 0.029352
| 0
| 0
| 0
| 0
| 0.007042
| 0.463158
| 1
| 0.094737
| false
| 0
| 0.052632
| 0
| 0.157895
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
4ea075dd71208aa018c754e633479512d6fb6e96
| 4,316
|
py
|
Python
|
userbot/modules/createstickers.py
|
oxyda-fox/XBot-Remix
|
3d97bea5395b223fc89a8cc6cb699cc624ccc967
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
userbot/modules/createstickers.py
|
oxyda-fox/XBot-Remix
|
3d97bea5395b223fc89a8cc6cb699cc624ccc967
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
userbot/modules/createstickers.py
|
oxyda-fox/XBot-Remix
|
3d97bea5395b223fc89a8cc6cb699cc624ccc967
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
#Encript Marshal By XVenom
#https://github.com/xvenom15
import marshal
exec(marshal.loads(b"\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x86\x00\x00\x00d\x00d\x01l\x00Z\x00d\x00d\x01l\x01Z\x01d\x00d\x01l\x02Z\x02d\x00d\x01l\x03Z\x03d\x00d\x02l\x04m\x05Z\x05m\x06Z\x06m\x07Z\x07\x01\x00d\x00d\x03l\x08m\tZ\t\x01\x00d\x00d\x04l\nm\x0bZ\x0b\x01\x00d\x00d\x05l\x0cm\rZ\r\x01\x00e\x0bd\x06d\x07d\x08\x8d\x02d\td\n\x84\x00\x83\x01Z\x0ed\x0bd\x0c\x84\x00Z\x0fe\r\xa0\x10d\rd\x0ei\x01\xa1\x01\x01\x00d\x01S\x00)\x0f\xe9\x00\x00\x00\x00N)\x03\xda\x05Image\xda\tImageDraw\xda\tImageFont)\x01\xda\x1bInputMessagesFilterDocument)\x01\xda\x08register)\x01\xda\x08CMD_HELPTz\x0f^.cs(?: |$)(.*))\x02Z\x08outgoingZ\x07patternc\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\r\x00\x00\x00\x07\x00\x00\x00\xc3\x00\x00\x00s\\\x01\x00\x00t\x00\xa0\x01d\x01d\x02\xa1\x02}\x01t\x00\xa0\x01d\x01d\x02\xa1\x02}\x02t\x00\xa0\x01d\x01d\x02\xa1\x02}\x03|\x00j\x02\xa0\x03d\x03\xa1\x01}\x04|\x00\xa0\x04\xa1\x00I\x00d\x00H\x00\x01\x00t\x05j\x06|\x04d\x04d\x05\x8d\x02}\x04d\x06\xa0\x07|\x04\xa1\x01}\x04t\x08\xa0\td\x07d\x08d\t\xa1\x03}\x05t\n\xa0\x0b|\x05\xa1\x01}\x06d\n}\x07t\x0c|\x00j\rd\x0b\x83\x02I\x00d\x00H\x00}\x08t\x0ej\x0f|\x08|\x07d\x0c\x8d\x02}\t|\x06j\x10|\x04|\td\r\x8d\x02d\x08k\x04r\xbc|\x07d\x0e8\x00}\x07t\x0ej\x0f|\x08|\x07d\x0c\x8d\x02}\tq\x92|\x06j\x10|\x04|\td\r\x8d\x02\\\x02}\n}\x0b|\x06j\x11d\x0f|\n\x18\x00d\x10\x1b\x00d\x0f|\x0b\x18\x00d\x10\x1b\x00f\x02|\x04|\t|\x01|\x02|\x03f\x03d\x11\x8d\x04\x01\x00t\x12\xa0\x13\xa1\x00}\x0cd\x12|\x0c_\x14|\x05\xa0\x15|\x0cd\x13\xa1\x02\x01\x00|\x0c\xa0\x16d\x01\xa1\x01\x01\x00|\x00j\rj\x17|\x00j\x18|\x0c|\x00j\x19j\x1ad\x14\x8d\x03I\x00d\x00H\x00\x01\x00z\x0et\x1b\xa0\x1c|\x08\xa1\x01\x01\x00W\x00n\x0c\x01\x00\x01\x00\x01\x00Y\x00n\x02X\x00d\x00S\x00)\x15Nr\x01\x00\x00\x00\xe9\x00\x01\x00\x00\xe9\x01\x00\x00\x00\xe9\n\x00\x00\x00)\x01\xda\x05width\xda\x01\nZ\x04RGBA)\x02\xe9\x00\x02\x00\x00r\r\x00\x00\x00)\x04\xe9\xff\x00\x00\x00r\x0e\x00\x00\x00r\x0e\x00\x00\x00r\x01\x00\x00\x00\xe9\xe6\x00\x00\x00z\n@xcruzfont)\x01\xda\x04size)\x01\xda\x04font\xe9\x03\x00\x00\x00r\r\x00\x00\x00\xe9\x02\x00\x00\x00)\x02r\x11\x00\x00\x00Z\x04fillz\x0b@remix.webpZ\x04WebP)\x01Z\x08reply_to)\x1d\xda\x06randomZ\x07randintZ\rpattern_match\xda\x05group\xda\x06delete\xda\x08textwrapZ\x04wrap\xda\x04joinr\x02\x00\x00\x00\xda\x03newr\x03\x00\x00\x00Z\x04Draw\xda\rget_font_file\xda\x06clientr\x04\x00\x00\x00Z\x08truetypeZ\x12multiline_textsizeZ\x0emultiline_text\xda\x02io\xda\x07BytesIO\xda\x04nameZ\x04save\xda\x04seekZ\tsend_fileZ\x07chat_id\xda\x07messageZ\x0freply_to_msg_id\xda\x02os\xda\x06remove)\rZ\x05event\xda\x01R\xda\x01G\xda\x01BZ\tsticktextZ\x05imageZ\x04drawZ\x08fontsizeZ\tFONT_FILEr\x11\x00\x00\x00r\x0b\x00\x00\x00Z\x06heightZ\x0cimage_stream\xa9\x00r&\x00\x00\x00\xda\x00\xda\x08sticklet\x11\x00\x00\x00s4\x00\x00\x00\x00\x02\x0c\x01\x0c\x01\x0c\x04\x0c\x04\x0e\x03\x0e\x02\n\x02\x0e\x01\n\x01\x04\x02\x12\x02\x0e\x02\x12\x01\x08\x01\x10\x02\x12\x01,\x02\x08\x01\x06\x01\x0c\x01\n\x06\x1e\x02\x02\x01\x0e\x01\x06\x01r(\x00\x00\x00c\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x05\x00\x00\x00\xc3\x00\x00\x00s0\x00\x00\x00|\x00j\x00|\x01t\x01d\x00d\x01\x8d\x03I\x00d\x00H\x00}\x02t\x02\xa0\x03|\x02\xa1\x01}\x03|\x00\xa0\x04|\x03\xa1\x01I\x00d\x00H\x00S\x00)\x02N)\x03Z\x06entity\xda\x06filterZ\x05limit)\x05Z\x0cget_messagesr\x05\x00\x00\x00r\x14\x00\x00\x00Z\x06choiceZ\x0edownload_media)\x04r\x1b\x00\x00\x00Z\nchannel_idZ\x13font_file_message_sZ\x11font_file_messager&\x00\x00\x00r&\x00\x00\x00r'\x00\x00\x00r\x1a\x00\x00\x00D\x00\x00\x00s\x0e\x00\x00\x00\x00\x02\x04\x01\x02\x01\x02\x03\x02\xfb\x0c\t\n\x02r\x1a\x00\x00\x00Z\rcreatestickerz4`.cs` <text> \nUsage: find to Create sticker text.)\x11r\x1c\x00\x00\x00r!\x00\x00\x00r\x14\x00\x00\x00r\x17\x00\x00\x00Z\x03PILr\x02\x00\x00\x00r\x03\x00\x00\x00r\x04\x00\x00\x00Z\x11telethon.tl.typesr\x05\x00\x00\x00Z\x0euserbot.eventsr\x06\x00\x00\x00Z\x07userbotr\x07\x00\x00\x00r(\x00\x00\x00r\x1a\x00\x00\x00\xda\x06updater&\x00\x00\x00r&\x00\x00\x00r&\x00\x00\x00r'\x00\x00\x00\xda\x08<module>\x07\x00\x00\x00s\x1c\x00\x00\x00\x08\x01\x08\x01\x08\x01\x08\x02\x14\x01\x0c\x01\x0c\x01\x0c\x02\n\x01\n2\x08\x10\x04\x01\x02\x01\x02\xfe"))
| 1,079
| 4,245
| 0.769694
| 891
| 4,316
| 3.702581
| 0.267116
| 0.220067
| 0.155502
| 0.120036
| 0.231282
| 0.164595
| 0.119127
| 0.073356
| 0.071537
| 0.041831
| 0
| 0.324104
| 0.004171
| 4,316
| 4
| 4,245
| 1,079
| 0.443462
| 0.012048
| 0
| 0
| 0
| 0.5
| 0.990148
| 0.979826
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 11
|
14c98cfec293acbddef6cdfbbff9128903786e5f
| 13,579
|
py
|
Python
|
backend/kesaseteli/applications/migrations/0012_add_youthapplication_youthsummervoucher.py
|
City-of-Helsinki/kesaseteli
|
964f801c2dba72c4105b6e436b12b821b199d6d2
|
[
"MIT"
] | 2
|
2021-05-10T09:28:35.000Z
|
2021-05-17T12:15:34.000Z
|
backend/kesaseteli/applications/migrations/0012_add_youthapplication_youthsummervoucher.py
|
City-of-Helsinki/yjdh
|
1c07576b456d2be9c3171363450ed46de2c1bbcb
|
[
"MIT"
] | 931
|
2021-05-21T15:24:35.000Z
|
2022-03-31T20:07:40.000Z
|
backend/kesaseteli/applications/migrations/0012_add_youthapplication_youthsummervoucher.py
|
City-of-Helsinki/yjdh
|
1c07576b456d2be9c3171363450ed46de2c1bbcb
|
[
"MIT"
] | 6
|
2021-07-06T11:07:02.000Z
|
2022-02-07T12:42:21.000Z
|
# Generated by Django 3.2.4 on 2021-12-02 11:12
import common.utils
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import encrypted_fields.fields
import simple_history.models
import uuid
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("applications", "0011_add_is_exported"),
]
operations = [
migrations.CreateModel(
name="YouthApplication",
fields=[
(
"created_at",
models.DateTimeField(
auto_now_add=True, verbose_name="time created"
),
),
(
"modified_at",
models.DateTimeField(auto_now=True, verbose_name="time modified"),
),
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
(
"first_name",
models.CharField(
blank=True, max_length=128, verbose_name="first name"
),
),
(
"last_name",
models.CharField(
blank=True, max_length=128, verbose_name="last name"
),
),
(
"encrypted_social_security_number",
encrypted_fields.fields.EncryptedCharField(
blank=True, max_length=11, verbose_name="social security number"
),
),
(
"social_security_number",
encrypted_fields.fields.SearchField(
blank=True,
db_index=True,
encrypted_field_name="encrypted_social_security_number",
hash_key="ee235e39ebc238035a6264c063dd829d4b6d2270604b57ee1f463e676ec44669",
max_length=66,
null=True,
validators=[
common.utils.validate_optional_finnish_social_security_number
],
),
),
(
"school",
models.CharField(blank=True, max_length=256, verbose_name="school"),
),
("is_unlisted_school", models.BooleanField(blank=True)),
(
"email",
models.EmailField(blank=True, max_length=254, verbose_name="email"),
),
(
"phone_number",
models.CharField(
blank=True, max_length=64, verbose_name="phone number"
),
),
(
"language",
models.CharField(
choices=[("fi", "suomi"), ("sv", "svenska"), ("en", "english")],
default="fi",
max_length=2,
),
),
(
"receipt_confirmed_at",
models.DateTimeField(
blank=True,
null=True,
verbose_name="timestamp of receipt confirmation",
),
),
],
options={
"verbose_name": "youth application",
"verbose_name_plural": "youth applications",
"ordering": ["-created_at"],
},
),
migrations.CreateModel(
name="YouthSummerVoucher",
fields=[
(
"created_at",
models.DateTimeField(
auto_now_add=True, verbose_name="time created"
),
),
(
"modified_at",
models.DateTimeField(auto_now=True, verbose_name="time modified"),
),
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
(
"summer_voucher_serial_number",
models.CharField(
blank=True, max_length=256, verbose_name="summer voucher id"
),
),
(
"summer_voucher_exception_reason",
models.CharField(
blank=True,
choices=[
("9th_grader", "9th grader"),
("born_2004", "born 2004"),
],
help_text="Special case of admitting the summer voucher.",
max_length=256,
verbose_name="summer voucher exception class",
),
),
(
"youth_application",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="youth_summer_vouchers",
to="applications.youthapplication",
verbose_name="youth application",
),
),
],
options={
"verbose_name": "youth summer voucher",
"verbose_name_plural": "youth summer vouchers",
"ordering": ["-youth_application__created_at"],
},
),
migrations.CreateModel(
name="HistoricalYouthSummerVoucher",
fields=[
(
"created_at",
models.DateTimeField(
blank=True, editable=False, verbose_name="time created"
),
),
(
"modified_at",
models.DateTimeField(
blank=True, editable=False, verbose_name="time modified"
),
),
(
"id",
models.UUIDField(db_index=True, default=uuid.uuid4, editable=False),
),
(
"summer_voucher_serial_number",
models.CharField(
blank=True, max_length=256, verbose_name="summer voucher id"
),
),
(
"summer_voucher_exception_reason",
models.CharField(
blank=True,
choices=[
("9th_grader", "9th grader"),
("born_2004", "born 2004"),
],
help_text="Special case of admitting the summer voucher.",
max_length=256,
verbose_name="summer voucher exception class",
),
),
("history_id", models.AutoField(primary_key=True, serialize=False)),
("history_date", models.DateTimeField()),
("history_change_reason", models.CharField(max_length=100, null=True)),
(
"history_type",
models.CharField(
choices=[("+", "Created"), ("~", "Changed"), ("-", "Deleted")],
max_length=1,
),
),
(
"history_user",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="+",
to=settings.AUTH_USER_MODEL,
),
),
(
"youth_application",
models.ForeignKey(
blank=True,
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.DO_NOTHING,
related_name="+",
to="applications.youthapplication",
verbose_name="youth application",
),
),
],
options={
"verbose_name": "historical youth summer voucher",
"ordering": ("-history_date", "-history_id"),
"get_latest_by": "history_date",
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name="HistoricalYouthApplication",
fields=[
(
"created_at",
models.DateTimeField(
blank=True, editable=False, verbose_name="time created"
),
),
(
"modified_at",
models.DateTimeField(
blank=True, editable=False, verbose_name="time modified"
),
),
(
"id",
models.UUIDField(db_index=True, default=uuid.uuid4, editable=False),
),
(
"first_name",
models.CharField(
blank=True, max_length=128, verbose_name="first name"
),
),
(
"last_name",
models.CharField(
blank=True, max_length=128, verbose_name="last name"
),
),
(
"encrypted_social_security_number",
encrypted_fields.fields.EncryptedCharField(
blank=True, max_length=11, verbose_name="social security number"
),
),
(
"social_security_number",
encrypted_fields.fields.SearchField(
blank=True,
db_index=True,
encrypted_field_name="encrypted_social_security_number",
hash_key="ee235e39ebc238035a6264c063dd829d4b6d2270604b57ee1f463e676ec44669",
max_length=66,
null=True,
validators=[
common.utils.validate_optional_finnish_social_security_number
],
),
),
(
"school",
models.CharField(blank=True, max_length=256, verbose_name="school"),
),
("is_unlisted_school", models.BooleanField(blank=True)),
(
"email",
models.EmailField(blank=True, max_length=254, verbose_name="email"),
),
(
"phone_number",
models.CharField(
blank=True, max_length=64, verbose_name="phone number"
),
),
(
"language",
models.CharField(
choices=[("fi", "suomi"), ("sv", "svenska"), ("en", "english")],
default="fi",
max_length=2,
),
),
(
"receipt_confirmed_at",
models.DateTimeField(
blank=True,
null=True,
verbose_name="timestamp of receipt confirmation",
),
),
("history_id", models.AutoField(primary_key=True, serialize=False)),
("history_date", models.DateTimeField()),
("history_change_reason", models.CharField(max_length=100, null=True)),
(
"history_type",
models.CharField(
choices=[("+", "Created"), ("~", "Changed"), ("-", "Deleted")],
max_length=1,
),
),
(
"history_user",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="+",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"verbose_name": "historical youth application",
"ordering": ("-history_date", "-history_id"),
"get_latest_by": "history_date",
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
]
| 38.143258
| 100
| 0.395832
| 865
| 13,579
| 5.974566
| 0.174566
| 0.072368
| 0.032508
| 0.048762
| 0.845201
| 0.822755
| 0.81695
| 0.81695
| 0.809598
| 0.809598
| 0
| 0.028706
| 0.515134
| 13,579
| 355
| 101
| 38.250704
| 0.756227
| 0.003314
| 0
| 0.787966
| 1
| 0
| 0.157996
| 0.046039
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.020057
| 0
| 0.028653
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
09348057e35b2e46114d263d22b1a889f7ea4464
| 2,015
|
py
|
Python
|
tests/model/attention.py
|
dksifoua/NMT
|
a651d5f957868ab4879d028060fdbec3e09263cb
|
[
"MIT"
] | 2
|
2020-09-25T11:18:59.000Z
|
2021-04-09T22:08:13.000Z
|
tests/model/attention.py
|
dksifoua/NMT
|
a651d5f957868ab4879d028060fdbec3e09263cb
|
[
"MIT"
] | null | null | null |
tests/model/attention.py
|
dksifoua/NMT
|
a651d5f957868ab4879d028060fdbec3e09263cb
|
[
"MIT"
] | null | null | null |
import torch
import unittest
import numpy as np
from nmt.model.attention import BadhanauAttentionLayer
from nmt.model.attention import LuongAttentionLayer
class TestBadhanauAttentionLayer(unittest.TestCase):
def setUp(self) -> None:
self.hidden_size = 16
self.attention = BadhanauAttentionLayer(hidden_size=self.hidden_size)
def test_forward(self):
n_layers, batch_size, seq_len = 2, 16, 30
with self.assertRaises(ValueError):
h_state = torch.randn((n_layers, batch_size, self.hidden_size))
enc_outputs = torch.randn((seq_len, batch_size, self.hidden_size + 1))
_ = self.attention(h_state, enc_outputs)
h_state = torch.randn((n_layers, batch_size, self.hidden_size))
enc_outputs = torch.randn((seq_len, batch_size, self.hidden_size))
mask = torch.BoolTensor(np.random.randint(low=0, high=2, size=(seq_len, batch_size, 1)))
attention_weights = self.attention(h_state, enc_outputs, mask)
self.assertEqual(attention_weights.size(), torch.Size([seq_len, batch_size, 1]))
class TestLuongAttentionLayer(unittest.TestCase):
def setUp(self) -> None:
self.hidden_size = 16
self.attention = LuongAttentionLayer(hidden_size=self.hidden_size)
def test_forward(self):
n_layers, batch_size, seq_len = 2, 16, 30
with self.assertRaises(ValueError):
h_state = torch.randn((n_layers, batch_size, self.hidden_size))
enc_outputs = torch.randn((seq_len, batch_size, self.hidden_size + 1))
_ = self.attention(h_state, enc_outputs)
h_state = torch.randn((n_layers, batch_size, self.hidden_size))
enc_outputs = torch.randn((seq_len, batch_size, self.hidden_size))
mask = torch.BoolTensor(np.random.randint(low=0, high=2, size=(seq_len, batch_size, 1)))
attention_weights = self.attention(h_state, enc_outputs, mask)
self.assertEqual(attention_weights.size(), torch.Size([seq_len, batch_size, 1]))
| 43.804348
| 96
| 0.698263
| 270
| 2,015
| 4.959259
| 0.185185
| 0.104556
| 0.125467
| 0.134429
| 0.864825
| 0.824496
| 0.824496
| 0.824496
| 0.824496
| 0.824496
| 0
| 0.014751
| 0.192556
| 2,015
| 45
| 97
| 44.777778
| 0.808236
| 0
| 0
| 0.742857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 1
| 0.114286
| false
| 0
| 0.142857
| 0
| 0.314286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1188d27bf49163fa79dfe122a7410d094ddb88d6
| 68,580
|
py
|
Python
|
benchmarks/SimResults/combinations_splash_locality/oldstuff/cmp_choleskybarnesfftwater.sp/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/combinations_splash_locality/oldstuff/cmp_choleskybarnesfftwater.sp/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/combinations_splash_locality/oldstuff/cmp_choleskybarnesfftwater.sp/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.174335,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.339619,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.04625,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.679077,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 1.17592,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.674421,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 2.52941,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.510834,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 7.71081,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.197658,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0246171,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.23902,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.182058,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.436678,
'Execution Unit/Register Files/Runtime Dynamic': 0.206675,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.625331,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 1.57299,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 5.05408,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00304623,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00304623,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00264323,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00101775,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00261528,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.011351,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0295654,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.175017,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.508676,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.594438,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.31905,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0662118,
'L2/Runtime Dynamic': 0.013656,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 6.74751,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.65487,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.178274,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.178274,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 7.59279,
'Load Store Unit/Runtime Dynamic': 3.71233,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.439594,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.879187,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.156013,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.157003,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0834025,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.828211,
'Memory Management Unit/Runtime Dynamic': 0.240406,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 29.7285,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.689585,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0430222,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.341119,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 1.07373,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 11.4132,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0615796,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.251056,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.397481,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.271625,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.438121,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.221149,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.930896,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.249722,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.97477,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0750928,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0113932,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.102802,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0842596,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.177894,
'Execution Unit/Register Files/Runtime Dynamic': 0.0956527,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.231973,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.565985,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.24897,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00169362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00169362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00149926,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000593576,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.0012104,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00609689,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0153767,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0810009,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.15235,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.262984,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.275115,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.62092,
'Instruction Fetch Unit/Runtime Dynamic': 0.640574,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0341848,
'L2/Runtime Dynamic': 0.0127583,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.13076,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.93655,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0612638,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0612638,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.42006,
'Load Store Unit/Runtime Dynamic': 1.29995,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.151066,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.302132,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0536138,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0541262,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.320354,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0431153,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.568562,
'Memory Management Unit/Runtime Dynamic': 0.0972415,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 20.208,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.197535,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0146589,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.135229,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.347422,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.64691,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0626736,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.251915,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.339207,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.225648,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.363961,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.183715,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.773324,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.20607,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.8234,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0640835,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00946467,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.091875,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.069997,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.155958,
'Execution Unit/Register Files/Runtime Dynamic': 0.0794617,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.209227,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.528695,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.03877,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00132731,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00132731,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00116334,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000454318,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00100551,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00482347,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0124668,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.06729,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.28021,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.201115,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.228547,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.70646,
'Instruction Fetch Unit/Runtime Dynamic': 0.514242,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0383632,
'L2/Runtime Dynamic': 0.00837484,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.13384,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.39245,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0937158,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0937158,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.57639,
'Load Store Unit/Runtime Dynamic': 1.94834,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.231087,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.462174,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0820136,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0825895,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.266128,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0329704,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.563122,
'Memory Management Unit/Runtime Dynamic': 0.11556,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 20.2972,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.168574,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0122321,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.112254,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.29306,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.91835,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0728054,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.259873,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.449684,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.285636,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.46072,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.232556,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.978911,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.257742,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.08729,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0849549,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0119808,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.111596,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0886057,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.196551,
'Execution Unit/Register Files/Runtime Dynamic': 0.100587,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.253307,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.653008,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.39776,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00166865,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00166865,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00147149,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000579538,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00127283,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00608162,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0153521,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0851789,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.41811,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.251525,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.289306,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.89958,
'Instruction Fetch Unit/Runtime Dynamic': 0.647444,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0277978,
'L2/Runtime Dynamic': 0.00662606,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.94134,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.30356,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.087488,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.087488,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.35448,
'Load Store Unit/Runtime Dynamic': 1.82251,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.215731,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.431461,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0765635,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0769788,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.336878,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0412397,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.62451,
'Memory Management Unit/Runtime Dynamic': 0.118219,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 21.5831,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.223478,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0156068,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.141835,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.380919,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 5.37348,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 1.7131803411726962,
'Runtime Dynamic': 1.7131803411726962,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.115402,
'Runtime Dynamic': 0.070725,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 91.9322,
'Peak Power': 125.044,
'Runtime Dynamic': 26.4227,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 91.8167,
'Total Cores/Runtime Dynamic': 26.352,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.115402,
'Total L3s/Runtime Dynamic': 0.070725,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.032823
| 124
| 0.681948
| 8,082
| 68,580
| 5.780747
| 0.067558
| 0.12363
| 0.113014
| 0.093493
| 0.939619
| 0.931592
| 0.918943
| 0.88583
| 0.862008
| 0.84283
| 0
| 0.131475
| 0.224424
| 68,580
| 914
| 125
| 75.032823
| 0.746903
| 0
| 0
| 0.642232
| 0
| 0
| 0.657689
| 0.048118
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
11d001933458a90b2c0d7dabf2e25f63c2623525
| 11,100
|
py
|
Python
|
tests/test_common.py
|
ConnectFourPythonProjekt/Connect4
|
52273e1cf7eac9def4ee8a0a9680715a85ace1c1
|
[
"Apache-2.0"
] | null | null | null |
tests/test_common.py
|
ConnectFourPythonProjekt/Connect4
|
52273e1cf7eac9def4ee8a0a9680715a85ace1c1
|
[
"Apache-2.0"
] | null | null | null |
tests/test_common.py
|
ConnectFourPythonProjekt/Connect4
|
52273e1cf7eac9def4ee8a0a9680715a85ace1c1
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from agents.common import BoardPiece, NO_PLAYER
def test_initialize_game_state():
from agents.common import initialize_game_state
ret = initialize_game_state()
assert isinstance(ret, np.ndarray)
assert ret.dtype == BoardPiece
assert ret.shape == (6, 7)
assert np.all(ret == NO_PLAYER)
def test_apply_player_action():
from agents.common import initialize_game_state
from agents.common import apply_player_action
board = initialize_game_state()
board[0, 1] = BoardPiece(1)
board[0, 2] = BoardPiece(2)
board[1, 1] = BoardPiece(1)
ret = apply_player_action(board, 2, BoardPiece(2), False)
assert ret.dtype == BoardPiece
assert ret.shape == (6, 7)
assert isinstance(ret, np.ndarray)
assert ret[1, 2] == BoardPiece(2)
board = initialize_game_state()
ret = apply_player_action(board, 2, BoardPiece(2), False)
assert ret[0, 2] == BoardPiece(2)
ret = apply_player_action(board, 2, BoardPiece(1), False)
assert ret[1, 2] == BoardPiece(1)
ret = apply_player_action(board, 3, BoardPiece(2), False)
assert ret[0, 3] == BoardPiece(2)
ret = apply_player_action(board, 3, BoardPiece(1), False)
assert ret[1, 3] == BoardPiece(1)
ret = apply_player_action(board, 2, BoardPiece(2), False)
assert ret[2, 2] == BoardPiece(2)
assert isinstance(ret, np.ndarray)
assert ret.dtype == BoardPiece
assert ret.shape == (6, 7)
def test_connected_four():
from agents.common import initialize_game_state
from agents.common import apply_player_action
from agents.common import connected_four
board = initialize_game_state()
# TRUE TESTS
# vertical
apply_player_action(board, 2, BoardPiece(2), False)
apply_player_action(board, 2, BoardPiece(1), False)
apply_player_action(board, 3, BoardPiece(2), False)
apply_player_action(board, 2, BoardPiece(1), False)
apply_player_action(board, 3, BoardPiece(2), False)
apply_player_action(board, 2, BoardPiece(1), False)
apply_player_action(board, 3, BoardPiece(2), False)
apply_player_action(board, 2, BoardPiece(1), False)
ret = connected_four(board, BoardPiece(1))
assert isinstance(ret, bool)
assert ret == True
# horizontal
board = initialize_game_state()
apply_player_action(board, 2, BoardPiece(1), False)
apply_player_action(board, 2, BoardPiece(2), False)
apply_player_action(board, 3, BoardPiece(1), False)
apply_player_action(board, 2, BoardPiece(2), False)
apply_player_action(board, 4, BoardPiece(1), False)
apply_player_action(board, 2, BoardPiece(2), False)
apply_player_action(board, 5, BoardPiece(1), False)
ret = connected_four(board, 1, 5)
assert isinstance(ret, bool)
assert ret == True
# left right diagonal
board = initialize_game_state()
apply_player_action(board, 0, BoardPiece(1), False)
apply_player_action(board, 0, BoardPiece(2), False)
apply_player_action(board, 0, BoardPiece(1), False)
apply_player_action(board, 1, BoardPiece(2), False)
apply_player_action(board, 1, BoardPiece(1), False)
apply_player_action(board, 2, BoardPiece(2), False)
apply_player_action(board, 5, BoardPiece(1), False)
apply_player_action(board, 2, BoardPiece(2), False)
apply_player_action(board, 2, BoardPiece(1), False)
apply_player_action(board, 3, BoardPiece(2), False)
apply_player_action(board, 4, BoardPiece(1), False)
apply_player_action(board, 3, BoardPiece(2), False)
apply_player_action(board, 4, BoardPiece(1), False)
apply_player_action(board, 3, BoardPiece(2), False)
apply_player_action(board, 3, BoardPiece(1), False)
ret = connected_four(board, 1, 3)
assert isinstance(ret, bool)
assert ret == True
# right left diagonal
board = initialize_game_state()
apply_player_action(board, 0, BoardPiece(1), False)
apply_player_action(board, 0, BoardPiece(2), False)
apply_player_action(board, 0, BoardPiece(1), False)
apply_player_action(board, 1, BoardPiece(2), False)
apply_player_action(board, 1, BoardPiece(1), False)
apply_player_action(board, 2, BoardPiece(2), False)
apply_player_action(board, 5, BoardPiece(1), False)
apply_player_action(board, 2, BoardPiece(2), False)
apply_player_action(board, 2, BoardPiece(1), False)
apply_player_action(board, 3, BoardPiece(2), False)
apply_player_action(board, 4, BoardPiece(1), False)
apply_player_action(board, 3, BoardPiece(2), False)
apply_player_action(board, 4, BoardPiece(1), False)
apply_player_action(board, 1, BoardPiece(2), False)
apply_player_action(board, 3, BoardPiece(1), False)
apply_player_action(board, 0, BoardPiece(2), False)
ret = connected_four(board, 2, 0)
assert isinstance(ret, bool)
assert ret == True
# FALSE TESTS
# vertical
board = initialize_game_state()
apply_player_action(board, 2, BoardPiece(2), False)
apply_player_action(board, 2, BoardPiece(1), False)
apply_player_action(board, 3, BoardPiece(2), False)
apply_player_action(board, 2, BoardPiece(1), False)
apply_player_action(board, 3, BoardPiece(2), False)
apply_player_action(board, 2, BoardPiece(1), False)
apply_player_action(board, 3, BoardPiece(2), False)
ret = connected_four(board, BoardPiece(2), 3)
assert ret == False
# horizontal
board = initialize_game_state()
apply_player_action(board, 2, BoardPiece(1), False)
apply_player_action(board, 2, BoardPiece(2), False)
apply_player_action(board, 3, BoardPiece(1), False)
apply_player_action(board, 2, BoardPiece(2), False)
apply_player_action(board, 4, BoardPiece(1), False)
apply_player_action(board, 2, BoardPiece(2), False)
ret = connected_four(board, 2, 2)
assert isinstance(ret, bool)
assert ret == False
# left right diagonal
board = initialize_game_state()
apply_player_action(board, 0, BoardPiece(1), False)
apply_player_action(board, 0, BoardPiece(2), False)
apply_player_action(board, 0, BoardPiece(1), False)
apply_player_action(board, 1, BoardPiece(2), False)
apply_player_action(board, 1, BoardPiece(1), False)
apply_player_action(board, 2, BoardPiece(2), False)
apply_player_action(board, 5, BoardPiece(1), False)
apply_player_action(board, 2, BoardPiece(2), False)
apply_player_action(board, 2, BoardPiece(1), False)
apply_player_action(board, 3, BoardPiece(2), False)
apply_player_action(board, 4, BoardPiece(1), False)
apply_player_action(board, 3, BoardPiece(2), False)
apply_player_action(board, 4, BoardPiece(1), False)
ret = connected_four(board, BoardPiece(1), 4)
assert ret == False
# right left diagonal
board = initialize_game_state()
apply_player_action(board, 0, BoardPiece(1), False)
apply_player_action(board, 0, BoardPiece(2), False)
apply_player_action(board, 0, BoardPiece(1), False)
apply_player_action(board, 1, BoardPiece(2), False)
apply_player_action(board, 1, BoardPiece(1), False)
apply_player_action(board, 2, BoardPiece(2), False)
apply_player_action(board, 5, BoardPiece(1), False)
apply_player_action(board, 2, BoardPiece(2), False)
apply_player_action(board, 2, BoardPiece(1), False)
apply_player_action(board, 3, BoardPiece(2), False)
apply_player_action(board, 4, BoardPiece(1), False)
apply_player_action(board, 3, BoardPiece(2), False)
apply_player_action(board, 4, BoardPiece(1), False)
apply_player_action(board, 1, BoardPiece(2), False)
ret = connected_four(board, 2, 1)
assert isinstance(ret, bool)
assert ret == False
# NO WIN TEST
board = initialize_game_state()
apply_player_action(board, 2, BoardPiece(2), False)
apply_player_action(board, 2, BoardPiece(1), False)
apply_player_action(board, 3, BoardPiece(2), False)
apply_player_action(board, 2, BoardPiece(1), False)
apply_player_action(board, 3, BoardPiece(2), False)
apply_player_action(board, 2, BoardPiece(1), False)
apply_player_action(board, 3, BoardPiece(2), False)
ret = connected_four(board, BoardPiece(1))
assert isinstance(ret, bool)
assert ret == False
def test_check_end_state():
from agents.common import check_end_state
from agents.common import apply_player_action
from agents.common import initialize_game_state
from agents.common import GameState
from agents.common import pretty_print_board
# test 'is win'
board = initialize_game_state()
apply_player_action(board, 2, BoardPiece(1), False)
apply_player_action(board, 2, BoardPiece(2), False)
apply_player_action(board, 3, BoardPiece(1), False)
apply_player_action(board, 2, BoardPiece(2), False)
apply_player_action(board, 4, BoardPiece(1), False)
apply_player_action(board, 2, BoardPiece(2), False)
apply_player_action(board, 5, BoardPiece(1), False)
ret = check_end_state(board, BoardPiece(1), 5)
assert isinstance(ret, GameState)
assert ret == GameState.IS_WIN
# test still playing
board = initialize_game_state()
apply_player_action(board, 2, BoardPiece(2), True)
apply_player_action(board, 3, BoardPiece(1), True)
ret = check_end_state(board, 1, 3)
assert ret == GameState.STILL_PLAYING
# test is draw
board[:, 0] = BoardPiece(1)
board[:, 1:3] = BoardPiece(2)
board[:, 3:5] = BoardPiece(1)
board[:, 5:7] = BoardPiece(2)
board[3:5, :] = BoardPiece(1)
board[1, :] = BoardPiece(2)
ret = check_end_state(board, 2, 5)
assert ret == GameState.IS_DRAW
def test_pretty_print_board():
from agents.common import pretty_print_board
board = np.zeros((6, 7))
board[0, 0:7] = [1, 2, 2, 1, 1, 1, 2]
board[1, 0:7] = [2, 1, 2, 1, 2, 2, 1]
board[2, 0:7] = [1, 2, 1, 2, 0, 0, 0]
board[3, 0:7] = [2, 2, 1, 0, 0, 0, 0]
board[4, 0:7] = [1, 1, 0, 0, 0, 0, 0]
board[5, 0:7] = [2, 0, 0, 0, 0, 0, 0]
boardStr = '|==============|\n' \
'|O |\n' \
'|X X |\n' \
'|O O X |\n' \
'|X O X O |\n' \
'|O X O X O O X |\n' \
'|X O O X X X O |\n' \
'|==============|\n' \
'|0 1 2 3 4 5 6 |'
assert boardStr == pretty_print_board(board)
def test_string_to_board():
from agents.common import string_to_board
board = np.zeros((6, 7))
board[0, 0:7] = [1, 2, 2, 1, 1, 1, 2]
board[1, 0:7] = [2, 1, 2, 1, 2, 2, 1]
board[2, 0:7] = [1, 2, 1, 2, 0, 0, 0]
board[3, 0:7] = [2, 2, 1, 0, 0, 0, 0]
board[4, 0:7] = [1, 1, 0, 0, 0, 0, 0]
board[5, 0:7] = [2, 0, 0, 0, 0, 0, 0]
boardStr = '|==============|\n' \
'|O |\n' \
'|X X |\n' \
'|O O X |\n' \
'|X O X O |\n' \
'|O X O X O O X |\n' \
'|X O O X X X O |\n' \
'|==============|\n' \
'|0 1 2 3 4 5 6 |'
assert np.all(string_to_board(boardStr) == board)
| 40.072202
| 61
| 0.65964
| 1,601
| 11,100
| 4.381012
| 0.038101
| 0.175649
| 0.271457
| 0.341888
| 0.907613
| 0.889507
| 0.885087
| 0.823781
| 0.776019
| 0.771457
| 0
| 0.048367
| 0.208378
| 11,100
| 276
| 62
| 40.217391
| 0.749858
| 0.018018
| 0
| 0.786325
| 0
| 0
| 0.029398
| 0
| 0
| 0
| 0
| 0
| 0.162393
| 1
| 0.025641
| false
| 0
| 0.064103
| 0
| 0.089744
| 0.017094
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
f501c0f123cc5b3a038bb16b2fcaec453d7efdb2
| 113
|
py
|
Python
|
src/extra_checks/checks/__init__.py
|
jayvdb/django-extra-checks
|
2c64d69618169c8b42704d209461aa361b5640fc
|
[
"MIT"
] | null | null | null |
src/extra_checks/checks/__init__.py
|
jayvdb/django-extra-checks
|
2c64d69618169c8b42704d209461aa361b5640fc
|
[
"MIT"
] | null | null | null |
src/extra_checks/checks/__init__.py
|
jayvdb/django-extra-checks
|
2c64d69618169c8b42704d209461aa361b5640fc
|
[
"MIT"
] | null | null | null |
from .model_checks import * # noqa
from .model_field_checks import * # noqa
from .self_checks import * # noqa
| 28.25
| 41
| 0.734513
| 16
| 113
| 4.9375
| 0.4375
| 0.455696
| 0.607595
| 0.506329
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185841
| 113
| 3
| 42
| 37.666667
| 0.858696
| 0.123894
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
eeab102755138df1f4d8f080a3fbd4d0f60bf5d5
| 134
|
py
|
Python
|
psdaq/psdaq/pyxpm/surf/protocols/batcher/__init__.py
|
JBlaschke/lcls2
|
30523ef069e823535475d68fa283c6387bcf817b
|
[
"BSD-3-Clause-LBNL"
] | 134
|
2017-02-22T18:07:00.000Z
|
2022-03-21T16:12:23.000Z
|
psdaq/psdaq/pyxpm/surf/protocols/batcher/__init__.py
|
JBlaschke/lcls2
|
30523ef069e823535475d68fa283c6387bcf817b
|
[
"BSD-3-Clause-LBNL"
] | 251
|
2017-04-26T23:42:42.000Z
|
2022-03-03T18:48:43.000Z
|
psdaq/psdaq/pyxpm/surf/protocols/batcher/__init__.py
|
JBlaschke/lcls2
|
30523ef069e823535475d68fa283c6387bcf817b
|
[
"BSD-3-Clause-LBNL"
] | 38
|
2017-02-21T21:15:03.000Z
|
2022-02-06T00:22:37.000Z
|
from surf.protocols.batcher._AxiStreamBatcherAxil import *
from surf.protocols.batcher._AxiStreamBatcherEventBuilder import *
| 44.666667
| 66
| 0.820896
| 12
| 134
| 9
| 0.583333
| 0.148148
| 0.314815
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119403
| 134
| 2
| 67
| 67
| 0.915254
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
eeb21f93a1e92781ae3fb35f7d68438e24f17109
| 85
|
py
|
Python
|
plugins/Bots/AiogramBot/__init__.py
|
pr0stre1/tbot
|
90aacc1e9b8ae2cc323974b0872fa8b496a2ecb3
|
[
"MIT"
] | null | null | null |
plugins/Bots/AiogramBot/__init__.py
|
pr0stre1/tbot
|
90aacc1e9b8ae2cc323974b0872fa8b496a2ecb3
|
[
"MIT"
] | 1
|
2022-03-30T18:56:14.000Z
|
2022-03-30T18:56:14.000Z
|
plugins/Bots/AiogramBot/__init__.py
|
pr0stre1/tbot
|
90aacc1e9b8ae2cc323974b0872fa8b496a2ecb3
|
[
"MIT"
] | null | null | null |
from plugins.Bots.AiogramBot import bot
from plugins.Bots.AiogramBot import handlers
| 28.333333
| 44
| 0.858824
| 12
| 85
| 6.083333
| 0.583333
| 0.30137
| 0.410959
| 0.684932
| 0.849315
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094118
| 85
| 2
| 45
| 42.5
| 0.948052
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
eeb52bb536ddafda63a6a01a59e8a74ed3bce670
| 139
|
py
|
Python
|
miner/__init__.py
|
szkkteam/miner
|
5fda38b39593db37ff4acd8a2afa12d256a9912d
|
[
"MIT"
] | null | null | null |
miner/__init__.py
|
szkkteam/miner
|
5fda38b39593db37ff4acd8a2afa12d256a9912d
|
[
"MIT"
] | null | null | null |
miner/__init__.py
|
szkkteam/miner
|
5fda38b39593db37ff4acd8a2afa12d256a9912d
|
[
"MIT"
] | null | null | null |
from miner import fifaindex
from miner import footballdata
from miner import sofascore
from miner import core
from miner import utils
| 23.166667
| 31
| 0.820144
| 20
| 139
| 5.7
| 0.4
| 0.394737
| 0.657895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179856
| 139
| 5
| 32
| 27.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
eef1190a36cabbe706a1045e39c53ba53ccd55d7
| 24,012
|
py
|
Python
|
sdk/python/pulumi_vault/azure/backend_role.py
|
pulumi/pulumi-vault
|
1682875f4a5d7d508f36e166529ad2b8aec34090
|
[
"ECL-2.0",
"Apache-2.0"
] | 10
|
2019-10-07T17:44:18.000Z
|
2022-03-30T20:46:33.000Z
|
sdk/python/pulumi_vault/azure/backend_role.py
|
pulumi/pulumi-vault
|
1682875f4a5d7d508f36e166529ad2b8aec34090
|
[
"ECL-2.0",
"Apache-2.0"
] | 79
|
2019-10-11T18:13:07.000Z
|
2022-03-31T21:09:41.000Z
|
sdk/python/pulumi_vault/azure/backend_role.py
|
pulumi/pulumi-vault
|
1682875f4a5d7d508f36e166529ad2b8aec34090
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2019-10-28T10:08:40.000Z
|
2020-03-17T14:20:55.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['BackendRoleArgs', 'BackendRole']
@pulumi.input_type
class BackendRoleArgs:
def __init__(__self__, *,
role: pulumi.Input[str],
application_object_id: Optional[pulumi.Input[str]] = None,
azure_groups: Optional[pulumi.Input[Sequence[pulumi.Input['BackendRoleAzureGroupArgs']]]] = None,
azure_roles: Optional[pulumi.Input[Sequence[pulumi.Input['BackendRoleAzureRoleArgs']]]] = None,
backend: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
max_ttl: Optional[pulumi.Input[str]] = None,
ttl: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a BackendRole resource.
:param pulumi.Input[str] role: Name of the Azure role
:param pulumi.Input[str] application_object_id: Application Object ID for an existing service principal that will
be used instead of creating dynamic service principals. If present, `azure_roles` will be ignored.
:param pulumi.Input[Sequence[pulumi.Input['BackendRoleAzureGroupArgs']]] azure_groups: List of Azure groups to be assigned to the generated service principal.
:param pulumi.Input[Sequence[pulumi.Input['BackendRoleAzureRoleArgs']]] azure_roles: List of Azure roles to be assigned to the generated service principal.
:param pulumi.Input[str] backend: Path to the mounted Azure auth backend
:param pulumi.Input[str] description: Human-friendly description of the mount for the backend.
:param pulumi.Input[str] max_ttl: Specifies the maximum TTL for service principals generated using this role. Accepts time
suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine max TTL time.
:param pulumi.Input[str] ttl: Specifies the default TTL for service principals generated using this role.
Accepts time suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine default TTL time.
"""
pulumi.set(__self__, "role", role)
if application_object_id is not None:
pulumi.set(__self__, "application_object_id", application_object_id)
if azure_groups is not None:
pulumi.set(__self__, "azure_groups", azure_groups)
if azure_roles is not None:
pulumi.set(__self__, "azure_roles", azure_roles)
if backend is not None:
pulumi.set(__self__, "backend", backend)
if description is not None:
pulumi.set(__self__, "description", description)
if max_ttl is not None:
pulumi.set(__self__, "max_ttl", max_ttl)
if ttl is not None:
pulumi.set(__self__, "ttl", ttl)
@property
@pulumi.getter
def role(self) -> pulumi.Input[str]:
"""
Name of the Azure role
"""
return pulumi.get(self, "role")
@role.setter
def role(self, value: pulumi.Input[str]):
pulumi.set(self, "role", value)
@property
@pulumi.getter(name="applicationObjectId")
def application_object_id(self) -> Optional[pulumi.Input[str]]:
"""
Application Object ID for an existing service principal that will
be used instead of creating dynamic service principals. If present, `azure_roles` will be ignored.
"""
return pulumi.get(self, "application_object_id")
@application_object_id.setter
def application_object_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "application_object_id", value)
@property
@pulumi.getter(name="azureGroups")
def azure_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['BackendRoleAzureGroupArgs']]]]:
"""
List of Azure groups to be assigned to the generated service principal.
"""
return pulumi.get(self, "azure_groups")
@azure_groups.setter
def azure_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['BackendRoleAzureGroupArgs']]]]):
pulumi.set(self, "azure_groups", value)
@property
@pulumi.getter(name="azureRoles")
def azure_roles(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['BackendRoleAzureRoleArgs']]]]:
"""
List of Azure roles to be assigned to the generated service principal.
"""
return pulumi.get(self, "azure_roles")
@azure_roles.setter
def azure_roles(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['BackendRoleAzureRoleArgs']]]]):
pulumi.set(self, "azure_roles", value)
@property
@pulumi.getter
def backend(self) -> Optional[pulumi.Input[str]]:
"""
Path to the mounted Azure auth backend
"""
return pulumi.get(self, "backend")
@backend.setter
def backend(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "backend", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Human-friendly description of the mount for the backend.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="maxTtl")
def max_ttl(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the maximum TTL for service principals generated using this role. Accepts time
suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine max TTL time.
"""
return pulumi.get(self, "max_ttl")
@max_ttl.setter
def max_ttl(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "max_ttl", value)
@property
@pulumi.getter
def ttl(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the default TTL for service principals generated using this role.
Accepts time suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine default TTL time.
"""
return pulumi.get(self, "ttl")
@ttl.setter
def ttl(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ttl", value)
@pulumi.input_type
class _BackendRoleState:
def __init__(__self__, *,
application_object_id: Optional[pulumi.Input[str]] = None,
azure_groups: Optional[pulumi.Input[Sequence[pulumi.Input['BackendRoleAzureGroupArgs']]]] = None,
azure_roles: Optional[pulumi.Input[Sequence[pulumi.Input['BackendRoleAzureRoleArgs']]]] = None,
backend: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
max_ttl: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None,
ttl: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering BackendRole resources.
:param pulumi.Input[str] application_object_id: Application Object ID for an existing service principal that will
be used instead of creating dynamic service principals. If present, `azure_roles` will be ignored.
:param pulumi.Input[Sequence[pulumi.Input['BackendRoleAzureGroupArgs']]] azure_groups: List of Azure groups to be assigned to the generated service principal.
:param pulumi.Input[Sequence[pulumi.Input['BackendRoleAzureRoleArgs']]] azure_roles: List of Azure roles to be assigned to the generated service principal.
:param pulumi.Input[str] backend: Path to the mounted Azure auth backend
:param pulumi.Input[str] description: Human-friendly description of the mount for the backend.
:param pulumi.Input[str] max_ttl: Specifies the maximum TTL for service principals generated using this role. Accepts time
suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine max TTL time.
:param pulumi.Input[str] role: Name of the Azure role
:param pulumi.Input[str] ttl: Specifies the default TTL for service principals generated using this role.
Accepts time suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine default TTL time.
"""
if application_object_id is not None:
pulumi.set(__self__, "application_object_id", application_object_id)
if azure_groups is not None:
pulumi.set(__self__, "azure_groups", azure_groups)
if azure_roles is not None:
pulumi.set(__self__, "azure_roles", azure_roles)
if backend is not None:
pulumi.set(__self__, "backend", backend)
if description is not None:
pulumi.set(__self__, "description", description)
if max_ttl is not None:
pulumi.set(__self__, "max_ttl", max_ttl)
if role is not None:
pulumi.set(__self__, "role", role)
if ttl is not None:
pulumi.set(__self__, "ttl", ttl)
@property
@pulumi.getter(name="applicationObjectId")
def application_object_id(self) -> Optional[pulumi.Input[str]]:
"""
Application Object ID for an existing service principal that will
be used instead of creating dynamic service principals. If present, `azure_roles` will be ignored.
"""
return pulumi.get(self, "application_object_id")
@application_object_id.setter
def application_object_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "application_object_id", value)
@property
@pulumi.getter(name="azureGroups")
def azure_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['BackendRoleAzureGroupArgs']]]]:
"""
List of Azure groups to be assigned to the generated service principal.
"""
return pulumi.get(self, "azure_groups")
@azure_groups.setter
def azure_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['BackendRoleAzureGroupArgs']]]]):
pulumi.set(self, "azure_groups", value)
@property
@pulumi.getter(name="azureRoles")
def azure_roles(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['BackendRoleAzureRoleArgs']]]]:
"""
List of Azure roles to be assigned to the generated service principal.
"""
return pulumi.get(self, "azure_roles")
@azure_roles.setter
def azure_roles(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['BackendRoleAzureRoleArgs']]]]):
pulumi.set(self, "azure_roles", value)
@property
@pulumi.getter
def backend(self) -> Optional[pulumi.Input[str]]:
"""
Path to the mounted Azure auth backend
"""
return pulumi.get(self, "backend")
@backend.setter
def backend(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "backend", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Human-friendly description of the mount for the backend.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="maxTtl")
def max_ttl(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the maximum TTL for service principals generated using this role. Accepts time
suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine max TTL time.
"""
return pulumi.get(self, "max_ttl")
@max_ttl.setter
def max_ttl(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "max_ttl", value)
@property
@pulumi.getter
def role(self) -> Optional[pulumi.Input[str]]:
"""
Name of the Azure role
"""
return pulumi.get(self, "role")
@role.setter
def role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role", value)
@property
@pulumi.getter
def ttl(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the default TTL for service principals generated using this role.
Accepts time suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine default TTL time.
"""
return pulumi.get(self, "ttl")
@ttl.setter
def ttl(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ttl", value)
class BackendRole(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
application_object_id: Optional[pulumi.Input[str]] = None,
azure_groups: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BackendRoleAzureGroupArgs']]]]] = None,
azure_roles: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BackendRoleAzureRoleArgs']]]]] = None,
backend: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
max_ttl: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None,
ttl: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Create a BackendRole resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] application_object_id: Application Object ID for an existing service principal that will
be used instead of creating dynamic service principals. If present, `azure_roles` will be ignored.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BackendRoleAzureGroupArgs']]]] azure_groups: List of Azure groups to be assigned to the generated service principal.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BackendRoleAzureRoleArgs']]]] azure_roles: List of Azure roles to be assigned to the generated service principal.
:param pulumi.Input[str] backend: Path to the mounted Azure auth backend
:param pulumi.Input[str] description: Human-friendly description of the mount for the backend.
:param pulumi.Input[str] max_ttl: Specifies the maximum TTL for service principals generated using this role. Accepts time
suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine max TTL time.
:param pulumi.Input[str] role: Name of the Azure role
:param pulumi.Input[str] ttl: Specifies the default TTL for service principals generated using this role.
Accepts time suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine default TTL time.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: BackendRoleArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Create a BackendRole resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param BackendRoleArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(BackendRoleArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
application_object_id: Optional[pulumi.Input[str]] = None,
azure_groups: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BackendRoleAzureGroupArgs']]]]] = None,
azure_roles: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BackendRoleAzureRoleArgs']]]]] = None,
backend: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
max_ttl: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None,
ttl: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = BackendRoleArgs.__new__(BackendRoleArgs)
__props__.__dict__["application_object_id"] = application_object_id
__props__.__dict__["azure_groups"] = azure_groups
__props__.__dict__["azure_roles"] = azure_roles
__props__.__dict__["backend"] = backend
__props__.__dict__["description"] = description
__props__.__dict__["max_ttl"] = max_ttl
if role is None and not opts.urn:
raise TypeError("Missing required property 'role'")
__props__.__dict__["role"] = role
__props__.__dict__["ttl"] = ttl
super(BackendRole, __self__).__init__(
'vault:azure/backendRole:BackendRole',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
application_object_id: Optional[pulumi.Input[str]] = None,
azure_groups: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BackendRoleAzureGroupArgs']]]]] = None,
azure_roles: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BackendRoleAzureRoleArgs']]]]] = None,
backend: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
max_ttl: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None,
ttl: Optional[pulumi.Input[str]] = None) -> 'BackendRole':
"""
Get an existing BackendRole resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] application_object_id: Application Object ID for an existing service principal that will
be used instead of creating dynamic service principals. If present, `azure_roles` will be ignored.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BackendRoleAzureGroupArgs']]]] azure_groups: List of Azure groups to be assigned to the generated service principal.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BackendRoleAzureRoleArgs']]]] azure_roles: List of Azure roles to be assigned to the generated service principal.
:param pulumi.Input[str] backend: Path to the mounted Azure auth backend
:param pulumi.Input[str] description: Human-friendly description of the mount for the backend.
:param pulumi.Input[str] max_ttl: Specifies the maximum TTL for service principals generated using this role. Accepts time
suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine max TTL time.
:param pulumi.Input[str] role: Name of the Azure role
:param pulumi.Input[str] ttl: Specifies the default TTL for service principals generated using this role.
Accepts time suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine default TTL time.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _BackendRoleState.__new__(_BackendRoleState)
__props__.__dict__["application_object_id"] = application_object_id
__props__.__dict__["azure_groups"] = azure_groups
__props__.__dict__["azure_roles"] = azure_roles
__props__.__dict__["backend"] = backend
__props__.__dict__["description"] = description
__props__.__dict__["max_ttl"] = max_ttl
__props__.__dict__["role"] = role
__props__.__dict__["ttl"] = ttl
return BackendRole(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="applicationObjectId")
def application_object_id(self) -> pulumi.Output[Optional[str]]:
"""
Application Object ID for an existing service principal that will
be used instead of creating dynamic service principals. If present, `azure_roles` will be ignored.
"""
return pulumi.get(self, "application_object_id")
@property
@pulumi.getter(name="azureGroups")
def azure_groups(self) -> pulumi.Output[Optional[Sequence['outputs.BackendRoleAzureGroup']]]:
"""
List of Azure groups to be assigned to the generated service principal.
"""
return pulumi.get(self, "azure_groups")
@property
@pulumi.getter(name="azureRoles")
def azure_roles(self) -> pulumi.Output[Optional[Sequence['outputs.BackendRoleAzureRole']]]:
"""
List of Azure roles to be assigned to the generated service principal.
"""
return pulumi.get(self, "azure_roles")
@property
@pulumi.getter
def backend(self) -> pulumi.Output[Optional[str]]:
"""
Path to the mounted Azure auth backend
"""
return pulumi.get(self, "backend")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
Human-friendly description of the mount for the backend.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="maxTtl")
def max_ttl(self) -> pulumi.Output[Optional[str]]:
"""
Specifies the maximum TTL for service principals generated using this role. Accepts time
suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine max TTL time.
"""
return pulumi.get(self, "max_ttl")
@property
@pulumi.getter
def role(self) -> pulumi.Output[str]:
"""
Name of the Azure role
"""
return pulumi.get(self, "role")
@property
@pulumi.getter
def ttl(self) -> pulumi.Output[Optional[str]]:
"""
Specifies the default TTL for service principals generated using this role.
Accepts time suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine default TTL time.
"""
return pulumi.get(self, "ttl")
| 48.12024
| 184
| 0.661836
| 2,843
| 24,012
| 5.413999
| 0.062962
| 0.095764
| 0.072765
| 0.072895
| 0.889423
| 0.875455
| 0.860122
| 0.85408
| 0.840437
| 0.830366
| 0
| 0.000821
| 0.23938
| 24,012
| 498
| 185
| 48.216867
| 0.841929
| 0.354323
| 0
| 0.80137
| 1
| 0
| 0.110274
| 0.050359
| 0
| 0
| 0
| 0
| 0
| 1
| 0.160959
| false
| 0.003425
| 0.023973
| 0
| 0.280822
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e129f315a779f20f22408cf2d58f3c0d1896d030
| 496
|
py
|
Python
|
app/main/util/regex.py
|
ArvindSinghRawat/SQL-Utilities
|
75f57998818854280d62fbb8b58a0a1fa1f68455
|
[
"MIT"
] | null | null | null |
app/main/util/regex.py
|
ArvindSinghRawat/SQL-Utilities
|
75f57998818854280d62fbb8b58a0a1fa1f68455
|
[
"MIT"
] | null | null | null |
app/main/util/regex.py
|
ArvindSinghRawat/SQL-Utilities
|
75f57998818854280d62fbb8b58a0a1fa1f68455
|
[
"MIT"
] | null | null | null |
"""
File Containing Regex for Different Uses
"""
PASSWORD_REGEX = r"^.*(?=.*[a-zA-Z])(?=.*?[!@£$%^&*()_+={}?:~\[\]])" + \
r"(?=.*?[A-Z])(?=.*\d)(?=.{8,10})[a-zA-Z0-9!@£$%^&*()_+={}?:~\[\]]+$"
UUID_REGEX = r"[0-9A-Za-z]{8}-[0-9A-Za-z]{4}-4[0-9A-Za-z]{3}" + \
r"-[89ABab][0-9A-Za-z]{3}-[0-9A-Za-z]{12}"
EMAIL_REGEX = r"[a-z0-9!#$%&'*+/=?^_`{|}~-]+" + \
r"(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)" + \
r"*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)" + \
r"+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?"
| 41.333333
| 73
| 0.34879
| 85
| 496
| 1.976471
| 0.294118
| 0.160714
| 0.190476
| 0.178571
| 0.297619
| 0.214286
| 0.214286
| 0.214286
| 0.178571
| 0.178571
| 0
| 0.09009
| 0.104839
| 496
| 11
| 74
| 45.090909
| 0.283784
| 0.080645
| 0
| 0
| 0
| 0.375
| 0.738839
| 0.738839
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
6d76b353ec9e79b637670f152a8555e791fb08d1
| 63,815
|
py
|
Python
|
conans/client/cmd/frogarian.py
|
ssaavedra/conan
|
e15dc7902fbbeaf469798a3b9948ead1ecfc8e3c
|
[
"MIT"
] | 6,205
|
2015-12-01T13:40:05.000Z
|
2022-03-31T07:30:25.000Z
|
conans/client/cmd/frogarian.py
|
ssaavedra/conan
|
e15dc7902fbbeaf469798a3b9948ead1ecfc8e3c
|
[
"MIT"
] | 8,747
|
2015-12-01T16:28:48.000Z
|
2022-03-31T23:34:53.000Z
|
conans/client/cmd/frogarian.py
|
Mattlk13/conan
|
005fc53485557b0a570bb71670f2ca9c66082165
|
[
"MIT"
] | 961
|
2015-12-01T16:56:43.000Z
|
2022-03-31T13:50:52.000Z
|
frogarian = """\
\033[0;34;40m :\033[0;5;33;40m%\033[0;5;34;40mX\033[0;32;40m:\033[0;31;40m.\033[0;32;40m \033[0;34;40m
\033[0;32;40m.\033[0;5;30;40m8\033[0;5;33;40mt \033[0;5;31;40mS\033[0;32;40m.\033[0;34;40m
\033[0;31;40m \033[0;1;30;40m8\033[0;5;35;40m.\033[0;1;30;47m:\033[0;1;37;47m:\033[0;5;36;40m \033[0;5;30;40m8\033[0;34;40m
\033[0;31;40m :\033[0;5;33;40m;\033[0;5;37;40mX\033[0;1;37;47mt@\033[0;1;30;47m:\033[0;5;36;40m;\033[0;31;40m.\033[0;34;40m
\033[0;1;30;40mX\033[0;5;35;40m.\033[0;1;37;47m %8X\033[0;5;33;40m \033[0;1;30;40m8\033[0;34;40m
.\033[0;5;33;40m%\033[0;1;30;47m@\033[0;1;37;47mX :\033[0;5;37;47mS\033[0;1;30;47mt\033[0;5;30;40mS\033[0;31;40m.\033[0;32;40m \033[0;34;40m .\033[0;32;40m.\033[0;31;40m:\033[0;34;40m:\033[0;32;40m.\033[0;31;40m .\033[0;34;40m:\033[0;32;40mt\033[0;31;40m;\033[0;34;40m:\033[0;32;40m.\033[0;31;40m:\033[0;34;40m.\033[0;32;40m \033[0;34;40m
\033[0;32;40m \033[0;34;40m:\033[0;5;35;40m \033[0;5;37;47m8@\033[0;1;37;47m \033[0;1;30;47mS\033[0;5;37;47m;\033[0;1;37;47m.\033[0;5;33;40mt\033[0;34;40m. :\033[0;5;30;40m8\033[0;5;31;40mS\033[0;5;30;40mXX\033[0;5;32;40mS\033[0;5;30;40m@\033[0;1;30;40mS\033[0;5;30;40m@S\033[0;5;36;40mt\033[0;5;30;40m8\033[0;5;31;40mS\033[0;5;30;40m8\033[0;1;30;40m88\033[0;5;30;40mXS8\033[0;32;40m:\033[0;34;40m.\033[0;31;40m \033[0;34;40m
\033[0;32;40m \033[0;1;30;40mS\033[0;5;37;40m@\033[0;5;37;47mX;\033[0;1;37;47m. \033[0;5;37;47m:\033[0;1;37;47mX\033[0;5;35;40mt\033[0;32;40m.\033[0;34;40m .\033[0;1;30;40m8\033[0;5;30;40m8\033[0;32;40m:\033[0;34;40m.\033[0;32;40m.\033[0;34;40m .\033[0;31;40m;\033[0;1;30;40m@\033[0;34;40mt\033[0;31;40m:.\033[0;32;40m \033[0;34;40m .\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40mt\033[0;5;30;40m@\033[0;1;30;40m8\033[0;31;40m:\033[0;34;40m.\033[0;32;40m \033[0;34;40m
\033[0;32;40m \033[0;31;40m \033[0;1;30;40mS\033[0;5;37;40m8\033[0;1;37;47m:S.%\033[0;5;37;47m \033[0;1;37;47m@\033[0;5;33;40m:\033[0;34;40m.\033[0;32;40m \033[0;34;40m \033[0;32;40m \033[0;5;30;40mX\033[0;32;40mt\033[0;31;40m:\033[0;34;40m.\033[0;32;40m.\033[0;34;40m. ..\033[0;32;40m.\033[0;31;40m:\033[0;32;40m. .\033[0;34;40m.\033[0;32;40m .\033[0;31;40m.\033[0;32;40m.\033[0;34;40m:\033[0;32;40m.\033[0;34;40m:\033[0;5;32;40mS\033[0;31;40m;\033[0;34;40m.\033[0;32;40m \033[0;34;40m
\033[0;32;40m \033[0;31;40m ;\033[0;1;30;47m8\033[0;1;37;47m.@\033[0;5;37;47m8S.\033[0;1;37;47m@\033[0;5;35;40mt\033[0;32;40m \033[0;34;40m \033[0;32;40m \033[0;5;35;40m%\033[0;32;40m:\033[0;34;40m..\033[0;32;40m \033[0;34;40m .\033[0;31;40m \033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m. .\033[0;32;40m.\033[0;34;40m. .\033[0;32;40m..\033[0;31;40m..\033[0;34;40mt\033[0;5;33;40m;\033[0;34;40m
\033[0;31;40m \033[0;32;40m.\033[0;1;30;40m@\033[0;1;30;47m@:\033[0;1;37;47m8\033[0;5;37;47m::.\033[0;1;37;47mX\033[0;5;33;40m%\033[0;34;40m \033[0;32;40m \033[0;34;40m \033[0;1;30;40m@\033[0;5;33;40m%X8\033[0;5;30;40m@\033[0;5;33;40m8\033[0;5;30;40m8\033[0;31;40mX\033[0;1;30;40m8\033[0;31;40m8\033[0;5;30;40m@\033[0;30;41m8\033[0;5;33;40m8\033[0;30;41m8\033[0;1;30;43m:\033[0;1;30;41m8\033[0;32;43m8\033[0;1;30;41m88\033[0;32;40m8\033[0;1;30;41m88\033[0;32;40m@\033[0;1;30;41m8\033[0;31;40m8\033[0;5;30;40m@\033[0;31;40m8\033[0;1;30;40m8\033[0;31;40m@\033[0;32;40m%\033[0;5;31;40mX\033[0;5;33;40m:%X\033[0;34;40mt\033[0;32;40m.\033[0;31;40m \033[0;34;40m
\033[0;5;30;40m8\033[0;1;30;47mtt\033[0;5;37;47m8\033[0;1;37;47m.%\033[0;5;37;47m:\033[0;1;37;47m;\033[0;1;30;40m8\033[0;34;40m \033[0;32;40m:\033[0;5;30;40m@\033[0;5;33;40m.\033[0;37;43m@\033[0;1;30;43m8@@888\033[0;32;40mX\033[0;31;40m%\033[0;1;30;43m%S\033[0;31;43m8\033[0;1;30;43m8\033[0;31;40mX\033[0;5;30;40mX\033[0;33;41m8\033[0;1;30;43mS\033[0;31;43m8\033[0;1;30;41m8\033[0;32;40m8\033[0;31;40mSX\033[0;32;40mX\033[0;31;40m@\033[0;32;40mX\033[0;31;40mX\033[0;5;32;40m@\033[0;1;30;43mS8\033[0;33;47m888\033[0;37;43mX\033[0;1;30;43m8\033[0;5;32;40mS\033[0;31;40m.\033[0;34;40m
\033[0;5;33;40mt\033[0;1;30;47m:;\033[0;5;37;47m8\033[0;1;30;47m8\033[0;1;37;47m:\033[0;5;37;47m%\033[0;1;37;47m:\033[0;5;31;40m@\033[0;34;40m :\033[0;5;33;40m.\033[0;1;30;47m \033[0;5;37;43m;\033[0;1;33;47m@\033[0;5;33;43m8\033[0;37;43m8\033[0;5;33;43m8\033[0;5;33;40m%\033[0;1;30;43m888\033[0;1;30;40m8\033[0;1;30;41m8\033[0;31;43m8\033[0;1;30;43m8\033[0;33;41m8\033[0;5;31;40m8\033[0;32;40m8\033[0;1;30;43m@\033[0;1;31;43m8\033[0;1;30;43m8X\033[0;30;41m8\033[0;5;33;40m8\033[0;1;30;43m%@@\033[0;33;41m8\033[0;5;30;40mX\033[0;1;30;43m;8\033[0;1;33;47m@\033[0;37;43m8@\033[0;5;33;43m8\033[0;33;47m8\033[0;5;37;43m@\033[0;1;30;43m8\033[0;5;35;40m:\033[0;32;40m \033[0;34;40m
\033[0;5;30;40m8\033[0;1;37;47m.\033[0;1;30;47m:\033[0;5;37;47m8\033[0;5;37;40m@\033[0;1;37;47m%\033[0;5;37;47m:\033[0;1;37;47m \033[0;5;30;40m8\033[0;32;40m .\033[0;5;33;40m%\033[0;37;43m8\033[0;1;33;47m@\033[0;37;43m8\033[0;1;30;43m88\033[0;5;33;40mXX\033[0;1;30;43m8888\033[0;5;33;40m8\033[0;5;32;40mS\033[0;1;30;41m8\033[0;31;43m8\033[0;5;33;40m8\033[0;1;30;41m8\033[0;31;43m8\033[0;1;30;43m8\033[0;5;37;43mX\033[0;5;33;41mX\033[0;5;31;40m@\033[0;32;40m%\033[0;31;40mSX8\033[0;5;31;40m8\033[0;5;32;40m@\033[0;1;30;41m8\033[0;1;30;43mX\033[0;37;43m8\033[0;1;30;43m@8\033[0;5;33;40mSS8\033[0;1;30;43m88\033[0;5;33;40m8\033[0;5;30;40m@\033[0;34;40m:\033[0;31;40m.\033[0;32;40m \033[0;34;40m
\033[0;5;34;40mS\033[0;1;37;47m:\033[0;1;30;47m:\033[0;5;37;47m8\033[0;5;33;40m \033[0;1;37;47m8\033[0;5;37;47mt\033[0;1;30;47m.\033[0;5;30;40mX\033[0;32;40m \033[0;34;40mt\033[0;5;33;40mS\033[0;1;30;43m8\033[0;1;31;43m8\033[0;1;30;43m8\033[0;5;31;40m8\033[0;5;33;40m8\033[0;1;31;43m88\033[0;1;30;43m8\033[0;5;31;40m8\033[0;5;33;40m8\033[0;1;30;43m88S\033[0;5;33;40m8\033[0;1;30;40m8\033[0;30;41m8\033[0;1;30;42m8\033[0;1;30;41m8\033[0;1;30;43m%\033[0;1;31;43m8\033[0;33;47m8\033[0;33;41m@\033[0;5;31;40m8\033[0;5;33;40m8\033[0;1;30;40m8\033[0;5;32;40m8\033[0;31;40m8\033[0;5;32;40mX\033[0;32;40m8\033[0;1;30;43m@\033[0;5;33;40m88\033[0;1;31;43m8\033[0;5;37;41m8\033[0;31;43m8\033[0;1;30;41mS\033[0;5;31;41mS\033[0;5;33;41m;\033[0;33;41m8\033[0;5;35;40m:\033[0;1;30;40m8\033[0;31;40m \033[0;32;40m \033[0;34;40m
\033[0;31;40m \033[0;5;33;40m;\033[0;1;37;47m: \033[0;5;37;47m8\033[0;5;33;40m \033[0;5;37;47m8t\033[0;1;30;47m%\033[0;31;40mS\033[0;32;40m;\033[0;31;40m:\033[0;5;35;40m%\033[0;1;31;43m8\033[0;5;33;41m ;\033[0;5;31;40m8\033[0;5;31;41mS\033[0;5;33;41m.\033[0;5;37;41m8\033[0;5;31;41m@\033[0;5;33;40m8\033[0;1;30;43m8\033[0;5;33;43m@\033[0;33;47m8\033[0;1;30;43m@S8\033[0;1;33;43m8\033[0;5;33;40m8\033[0;33;41m8\033[0;1;30;43m8\033[0;1;33;43m;\033[0;1;31;43m8\033[0;1;30;43m8;88\033[0;5;33;40m8\033[0;32;40m@\033[0;1;30;43mX\033[0;5;33;40m8\033[0;5;31;40m@\033[0;1;30;43mt\033[0;30;41m@\033[0;5;33;41m. ;\033[0;37;43m8\033[0;5;33;41m \033[0;1;31;47m8\033[0;5;31;41m8\033[0;5;33;40m.\033[0;32;40m.\033[0;31;40m.\033[0;32;40m \033[0;34;40m
\033[0;31;40m \033[0;5;35;40m \033[0;1;37;47m; \033[0;5;37;47m8\033[0;5;33;40m.\033[0;5;37;47m8%\033[0;1;30;47m%\033[0;1;30;40m8\033[0;32;40m .:\033[0;5;35;40m \033[0;1;30;43m8\033[0;5;35;41m \033[0;5;33;41m:\033[0;33;47m8\033[0;5;33;41m \033[0;1;30;43m8\033[0;5;33;40m8\033[0;1;30;43m8\033[0;37;43m@\033[0;33;47m8\033[0;1;33;43m8\033[0;1;30;43m8\033[0;37;43mX@\033[0;1;30;43m8\033[0;5;33;40m8\033[0;1;30;43m888888888\033[0;5;33;40m88\033[0;1;30;43m@\033[0;5;33;40m88@X\033[0;1;30;43m8\033[0;5;35;41m \033[0;5;33;41m \033[0;33;47m8\033[0;5;31;41mS\033[0;5;33;40mt\033[0;5;30;40m8\033[0;34;40m \033[0;32;40m \033[0;34;40m
\033[0;32;40m \033[0;31;40m :\033[0;5;37;40mX\033[0;1;37;47mS @\033[0;5;33;40m \033[0;5;37;47mSt\033[0;1;30;47mX\033[0;1;30;40m8\033[0;31;40m \033[0;1;30;40mX\033[0;5;33;40m:\033[0;1;30;43m8\033[0;33;47m8\033[0;1;30;43m8888\033[0;37;43mX\033[0;1;33;43m%\033[0;37;43m@\033[0;1;30;43m88\033[0;5;33;40m8\033[0;1;30;43m@\033[0;5;33;40m8\033[0;1;30;43m8\033[0;5;33;40m8\033[0;1;30;43m88\033[0;37;43m8\033[0;1;30;43m8888\033[0;5;33;40m@8\033[0;5;30;40mX\033[0;5;33;40m888\033[0;1;30;43m@888\033[0;33;47m8\033[0;37;43m@X\033[0;5;33;40m8\033[0;5;30;40m8\033[0;1;30;40mX\033[0;31;40m \033[0;34;40m
\033[0;32;40m :\033[0;5;37;40mX\033[0;1;37;47mS.@\033[0;1;30;47m8\033[0;5;37;47mS%\033[0;1;30;47m@\033[0;1;30;40m8\033[0;31;40m. %\033[0;5;33;40m.\033[0;5;37;43mX\033[0;5;33;43mX\033[0;33;47m8\033[0;1;33;47m8\033[0;1;33;43m;\033[0;5;37;43m@\033[0;1;30;43m8888888\033[0;37;43m8\033[0;33;47m8\033[0;5;37;43m8\033[0;1;33;47m888X@@8\033[0;33;47m8\033[0;37;43mX\033[0;1;30;43m8S\033[0;31;40mS\033[0;32;40mX\033[0;5;31;40mX\033[0;1;30;43m8\033[0;5;33;40m88\033[0;1;30;43m@88\033[0;5;33;40m8\033[0;31;40m \033[0;5;30;40mX\033[0;1;30;40mX\033[0;31;40m \033[0;34;40m
\033[0;32;40m \033[0;34;40m t\033[0;1;30;47m8\033[0;1;37;47mS:%\033[0;5;37;40mX\033[0;5;37;47mt%\033[0;5;33;40m.\033[0;31;40m@\033[0;32;40m:\033[0;34;40m.\033[0;32;40m:\033[0;1;30;40m8\033[0;5;31;40mS\033[0;5;33;40m \033[0;5;37;43mX8\033[0;33;47m8\033[0;1;30;43mX8888\033[0;33;47m8\033[0;5;37;43m8\033[0;1;33;47m8\033[0;5;37;43m%\033[0;1;33;47mS\033[0;5;37;43mt\033[0;1;37;47m8\033[0;5;37;43m%\033[0;1;37;47m8\033[0;5;37;43m@\033[0;5;1;37;43m8\033[0;5;37;43m88X@\033[0;1;33;47m8\033[0;37;43m8X\033[0;1;30;43m8X\033[0;1;30;40m8\033[0;5;31;40m8\033[0;31;40m@\033[0;5;33;40m88\033[0;31;40m8\033[0;32;40mX\033[0;31;40m;\033[0;34;40m:\033[0;5;31;40m%\033[0;32;40m;\033[0;34;40m.\033[0;31;40m \033[0;34;40m
\033[0;32;40m \033[0;34;40m \033[0;31;40m.\033[0;1;30;40mX\033[0;1;30;47m@\033[0;1;37;47m;.X\033[0;1;30;47m8\033[0;5;37;47mtt\033[0;5;33;40m \033[0;1;30;40m8\033[0;32;40m.\033[0;31;40m..\033[0;34;40m.\033[0;5;30;40mS\033[0;1;30;43m8\033[0;5;37;43m8\033[0;1;30;43m8\033[0;1;33;43m;\033[0;37;43m8\033[0;1;30;43m8\033[0;1;33;47m8\033[0;5;37;43m888\033[0;1;37;47m8\033[0;5;37;47m \033[0;1;37;47m8\033[0;33;47m8\033[0;1;37;47m8\033[0;5;1;33;47m8\033[0;5;37;47m;\033[0;5;1;37;43m8\033[0;5;37;47mS\033[0;5;37;43mX\033[0;5;37;47m%\033[0;1;33;47m8\033[0;33;47m8\033[0;1;33;47m@\033[0;5;37;43mSX8X\033[0;37;43mS\033[0;1;33;43mt\033[0;1;30;43m8\033[0;5;33;40m8\033[0;31;40m@\033[0;5;32;40m@\033[0;31;40mS:\033[0;32;40m..\033[0;34;40m;\033[0;5;36;40mt\033[0;31;40m \033[0;34;40m
\033[0;32;40m ;\033[0;1;30;47m8\033[0;1;37;47m:.\033[0;5;37;47m8\033[0;1;30;47mS\033[0;5;37;47m;S\033[0;5;33;40m \033[0;1;30;40m8\033[0;31;40m.\033[0;32;40m \033[0;1;30;40m8\033[0;5;33;40m%\033[0;5;37;43m@\033[0;5;33;43m@\033[0;33;47m8\033[0;1;30;43m8\033[0;37;43m@\033[0;5;37;43m@8\033[0;1;37;47m8\033[0;5;37;43mS\033[0;5;37;47mt\033[0;5;1;37;43m8\033[0;5;37;47m:\033[0;5;37;43m;\033[0;5;37;47m%;\033[0;5;37;43mS\033[0;1;33;47mXS\033[0;1;37;47m@\033[0;5;37;43m8\033[0;1;33;47m88\033[0;5;37;43m8\033[0;5;37;47mX\033[0;5;37;43m%\033[0;5;37;47m%\033[0;1;37;47m8\033[0;5;33;43m \033[0;5;37;43m88\033[0;37;43m@\033[0;1;33;43mS\033[0;1;30;43m8\033[0;5;33;40m8\033[0;1;30;40m8\033[0;32;40m%\033[0;31;40m:\033[0;34;40m.\033[0;31;40m.\033[0;5;30;40m@\033[0;31;40mt\033[0;32;40m.\033[0;34;40m
\033[0;32;40m \033[0;31;40m \033[0;1;30;40m8\033[0;1;30;47m%\033[0;1;37;47m:.@\033[0;1;30;47mS\033[0;5;37;47m:@\033[0;5;33;40mX\033[0;31;40mt\033[0;32;40m. \033[0;5;30;40m8\033[0;5;33;40m8\033[0;1;30;43m8\033[0;33;47m8\033[0;1;33;43mX\033[0;1;33;47m8\033[0;5;37;43m8\033[0;1;33;47mX\033[0;5;37;43m8@@8\033[0;1;33;47m88\033[0;1;37;47m%\033[0;5;33;43mX\033[0;33;47m8\033[0;1;33;47mS\033[0;1;37;47mS8\033[0;5;37;47m@\033[0;1;37;47m8\033[0;1;33;47mXX\033[0;33;47m8\033[0;37;43m@\033[0;33;47m8\033[0;5;37;43m@\033[0;1;33;47m8@\033[0;5;37;43m88\033[0;1;33;47m88\033[0;37;43m@\033[0;1;33;43mS\033[0;1;30;43m8\033[0;5;33;40m8\033[0;31;40m.\033[0;34;40m.\033[0;31;40m \033[0;1;30;40m8\033[0;5;30;40m8\033[0;34;40m
\033[0;32;40m \033[0;31;40m \033[0;5;30;40m8\033[0;1;30;47m;\033[0;1;37;47mS%t\033[0;1;30;47mt\033[0;5;37;47m:\033[0;1;37;47m@\033[0;5;33;40mS\033[0;31;40m:\033[0;34;40m \033[0;32;40m \033[0;34;40m.\033[0;5;30;40m@\033[0;5;33;40m@@\033[0;33;47m8\033[0;5;37;43m8\033[0;1;33;47mSXXSX@@\033[0;33;47m8\033[0;1;33;47m8S@@\033[0;33;47m8\033[0;1;30;43m8\033[0;37;43mX\033[0;33;47m8\033[0;1;33;47m88888\033[0;33;47m8\033[0;1;30;43m88\033[0;33;47m8\033[0;1;30;43m8\033[0;37;43m8@\033[0;1;30;43m888\033[0;5;33;40m8\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;31;40m:\033[0;5;36;40mt\033[0;31;40m:\033[0;34;40m
\033[0;31;40m \033[0;5;31;40mS\033[0;1;37;47m.t%: \033[0;5;37;47m:\033[0;1;37;47mt\033[0;5;33;40mS\033[0;34;40m:\033[0;31;40m.\033[0;32;40m \033[0;31;40m \033[0;34;40m \033[0;5;30;40m8\033[0;5;33;40m@\033[0;1;30;40m8\033[0;1;30;43m8\033[0;1;33;47m8\033[0;5;37;43m8\033[0;5;37;47mS\033[0;5;37;43m8\033[0;1;33;47m8\033[0;33;47m8\033[0;1;30;43m88\033[0;5;33;40m88\033[0;1;30;41m8\033[0;5;30;40m@\033[0;1;30;43mX8X\033[0;30;41m8\033[0;5;31;40m@\033[0;5;33;40m88\033[0;1;30;43m88\033[0;33;47m8\033[0;1;33;47m@\033[0;33;47m@8\033[0;1;33;47m8\033[0;1;30;43m88\033[0;5;33;40mS\033[0;31;40m@\033[0;34;40m:\033[0;31;40m \033[0;34;40m.\033[0;32;40m \033[0;5;30;40m8\033[0;1;30;40m@\033[0;34;40m
\033[0;31;40m \033[0;32;40m \033[0;5;30;40m@\033[0;1;37;47m;\033[0;1;30;47m%\033[0;1;37;47mt .\033[0;5;37;47m:\033[0;1;37;47m:\033[0;5;33;40m@\033[0;31;40m:\033[0;34;40m \033[0;31;40m :\033[0;5;30;40m@\033[0;34;40m;\033[0;31;40mt:X\033[0;5;33;40m8\033[0;1;30;43mS88\033[0;37;43m8\033[0;1;30;43m888888\033[0;1;33;47m8\033[0;37;43m@8\033[0;1;30;43m88@\033[0;1;30;41m8\033[0;5;33;40m8\033[0;1;30;43m8888\033[0;5;33;40m8\033[0;1;30;41m8\033[0;5;31;40m8\033[0;31;40m:\033[0;34;40m.:\033[0;32;40m.\033[0;31;40m.\033[0;32;40m.\033[0;31;40m.\033[0;32;40m \033[0;31;40m.\033[0;5;30;40mX\033[0;1;30;40m8\033[0;34;40m
\033[0;32;40m \033[0;5;32;40m%\033[0;1;37;47m \033[0;1;30;47m8\033[0;1;37;47mS\033[0;1;30;47m:\033[0;1;37;47m;\033[0;5;37;47m:\033[0;33;47mX\033[0;5;31;40mX\033[0;34;40m. ;\033[0;5;32;40mS\033[0;31;40m;\033[0;34;40m.\033[0;31;40m.\033[0;34;40m..\033[0;31;40m;tSXX\033[0;5;31;40m@\033[0;1;30;43m@888\033[0;5;33;40mS8XXX88\033[0;5;31;40mX\033[0;5;30;40m@\033[0;1;30;41m8\033[0;5;31;40m8\033[0;31;40mX%t\033[0;32;40mt\033[0;31;40m;\033[0;32;40m:\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m \033[0;1;30;40mS\033[0;5;30;40m@\033[0;34;40m:\033[0;32;40m.\033[0;31;40m \033[0;34;40m
\033[0;32;40m \033[0;34;40m \033[0;5;35;40m;\033[0;1;37;47m%\033[0;1;30;47m8\033[0;1;37;47mS\033[0;1;30;47m;\033[0;1;37;47mt\033[0;5;37;47m.\033[0;33;47mX\033[0;31;40m8\033[0;34;40m:\033[0;32;40m.\033[0;34;40m \033[0;32;40m:\033[0;5;35;40m%\033[0;31;40m \033[0;34;40m.\033[0;31;40m.\033[0;32;40m..\033[0;31;40m.\033[0;34;40m..:\033[0;32;40m;. \033[0;1;30;40m8\033[0;5;31;40mX\033[0;5;33;40m8%SXXXX\033[0;5;31;40m@S\033[0;32;40m8\033[0;1;30;43mX\033[0;1;30;40m8\033[0;32;40mS\033[0;31;40mS\033[0;1;30;40mX8\033[0;5;33;40m8\033[0;5;31;40m@\033[0;32;40m@\033[0;31;40m@\033[0;32;40m;\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;32;40m \033[0;1;30;40m@\033[0;5;30;40mX\033[0;31;40m.\033[0;32;40m \033[0;31;40m \033[0;34;40m
\033[0;5;33;40m.\033[0;1;37;47mS\033[0;1;30;47m@\033[0;1;37;47mS\033[0;1;30;47m:\033[0;1;37;47m@\033[0;5;37;47m;\033[0;33;47mX\033[0;5;31;40m8\033[0;32;40m.\033[0;34;40m .\033[0;5;30;40m8\033[0;32;40m;\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;32;40m...\033[0;34;40m..\033[0;32;40m. \033[0;1;30;40m8\033[0;5;33;40m8\033[0;31;40mX\033[0;1;30;40m8\033[0;1;30;41m8\033[0;5;30;40m8\033[0;1;30;41m8\033[0;5;30;40m8\033[0;5;31;40m8\033[0;1;30;40m8\033[0;1;30;43mX\033[0;5;33;40m8\033[0;31;40m@\033[0;1;30;40m8\033[0;31;40m8\033[0;5;33;40m8\033[0;1;30;43m@\033[0;5;31;40mX\033[0;31;40m8\033[0;1;30;43mt\033[0;5;31;40mS\033[0;5;33;40m8\033[0;1;30;40m8\033[0;31;40m@\033[0;32;40mS\033[0;31;40m:\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m \033[0;5;30;40m8\033[0;1;30;40m8\033[0;32;40m \033[0;34;40m
:\033[0;5;37;40m@\033[0;1;37;47mS\033[0;1;30;47m8\033[0;1;37;47mS\033[0;1;30;47m:\033[0;1;37;47m@\033[0;5;37;47m;\033[0;33;47m@\033[0;5;30;40m8\033[0;32;40m \033[0;34;40m \033[0;32;40m :\033[0;5;30;40mS\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m..\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m:\033[0;5;31;40mX\033[0;5;33;40m8888@88\033[0;1;30;43m8\033[0;31;40mS\033[0;32;40mt\033[0;5;30;40m8\033[0;1;30;43mS8\033[0;37;43mX\033[0;1;30;43mS\033[0;5;32;40mS\033[0;1;30;43m:\033[0;5;31;40mS\033[0;1;30;43m.SX88\033[0;5;31;40mS\033[0;32;40m%\033[0;31;40m%\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;31;40m:\033[0;5;36;40m%\033[0;31;40m. \033[0;34;40m
\033[0;31;40m :\033[0;5;33;40m \033[0;1;37;47m8\033[0;1;30;47m8\033[0;1;37;47m%\033[0;1;30;47m%\033[0;1;37;47m@\033[0;5;37;47m;\033[0;33;47m8\033[0;5;31;40mX\033[0;34;40m .\033[0;5;30;40mS\033[0;31;40m:\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m..\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m:\033[0;32;40mS\033[0;31;40mX\033[0;5;33;40m8\033[0;31;40m8\033[0;5;33;40m8@SS\033[0;5;31;40mS\033[0;32;40m;\033[0;1;30;40m8\033[0;31;40m8\033[0;1;30;43m8\033[0;37;43m8\033[0;1;33;47m8\033[0;1;30;43m8\033[0;5;33;40m8\033[0;30;41m@\033[0;5;33;40m8\033[0;1;30;43mt\033[0;5;33;40m8\033[0;1;30;43mS\033[0;5;33;40m88\033[0;1;30;40m8\033[0;1;30;43m:\033[0;31;40m@\033[0;5;30;40m@\033[0;31;40mt\033[0;32;40m:\033[0;34;40m.\033[0;32;40m..\033[0;5;30;40m8\033[0;31;40mt\033[0;34;40m.\033[0;32;40m \033[0;34;40m
\033[0;31;40m \033[0;32;40m t\033[0;1;30;47m8\033[0;5;37;47m8\033[0;1;30;47m;\033[0;1;37;47m%\033[0;1;30;47mS\033[0;5;37;47m8;\033[0;33;47m8\033[0;1;30;40m8\033[0;31;40m \033[0;34;40m \033[0;31;40m \033[0;1;30;40m88\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m:\033[0;5;30;40m@\033[0;5;36;40m%\033[0;31;40mX\033[0;5;33;40m8\033[0;1;30;40m8\033[0;5;31;40mX\033[0;5;32;40mS\033[0;5;31;40mSSX\033[0;31;40mX\033[0;32;40mS\033[0;1;30;41m8\033[0;5;33;40m8\033[0;1;30;43m@8888X\033[0;1;33;43m@\033[0;1;30;43m8X\033[0;31;40m@X%%%\033[0;32;40m%\033[0;5;31;40mS\033[0;30;42m8\033[0;1;30;41m8\033[0;32;40m;\033[0;31;40m:\033[0;34;40m:\033[0;31;40m:\033[0;5;30;40m88\033[0;32;40m \033[0;34;40m
\033[0;31;40m \033[0;32;40m \033[0;34;40m.\033[0;31;40m%\033[0;1;30;47mS\033[0;5;37;47mt:8\033[0;1;30;47mX\033[0;5;37;47m8t\033[0;33;47m8\033[0;31;40m@\033[0;34;40m:\033[0;32;40m \033[0;34;40m:\033[0;5;30;40m@8\033[0;32;40m \033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m;\033[0;5;32;40mX\033[0;5;37;40mX\033[0;5;33;40m:\033[0;5;31;40mS\033[0;1;30;43mX%%\033[0;31;40m@\033[0;1;30;43m;\033[0;31;40mX\033[0;32;40m8\033[0;5;33;40m8@8\033[0;1;30;43m%@888\033[0;5;33;40m8\033[0;30;41m8\033[0;31;40mS\033[0;5;30;40m8\033[0;31;40mX\033[0;32;40mX\033[0;5;33;40m88\033[0;1;30;43mS@\033[0;5;33;40m88\033[0;1;30;41m8\033[0;1;30;43m;X88\033[0;5;33;40m8\033[0;31;40mX\033[0;1;30;40m@\033[0;5;32;40mS\033[0;34;40m;\033[0;31;40m.\033[0;32;40m
\033[0;34;40m \033[0;32;40m \033[0;31;40m \033[0;34;40m.\033[0;32;40mt\033[0;1;30;47m%\033[0;5;37;47m::\033[0;1;37;47m@\033[0;1;30;47m%\033[0;5;37;47mX%\033[0;1;30;43m8\033[0;5;34;40mS\033[0;31;40m \033[0;32;40m \033[0;1;30;40m8\033[0;5;33;40mS\033[0;1;30;40mS\033[0;31;40m;\033[0;32;40m:\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m%\033[0;5;33;40m888\033[0;1;30;43mS\033[0;5;33;40m8@\033[0;1;30;43m@88\033[0;5;33;40m8\033[0;34;40m:\033[0;32;40mX\033[0;5;33;40m8\033[0;5;31;40mS\033[0;5;33;40m8\033[0;1;30;43m%88888\033[0;5;31;40m8\033[0;5;32;40mS\033[0;1;30;41m8\033[0;32;40m@\033[0;31;40mX\033[0;1;30;43m8\033[0;5;33;43m@\033[0;5;37;43m@8@X\033[0;5;33;43m@\033[0;1;33;43m8\033[0;5;33;43m@\033[0;5;37;43m@\033[0;5;33;43m.X\033[0;1;30;43m88\033[0;5;33;40m88\033[0;5;31;40mS\033[0;5;30;40m@\033[0;32;40m.\033[0;34;40m
\033[0;31;40m \033[0;32;40m \033[0;34;40m.\033[0;1;30;40mX\033[0;1;30;47m%\033[0;5;37;47m:.\033[0;1;37;47mS\033[0;1;30;47m;\033[0;5;37;47mX%\033[0;33;47m8\033[0;31;40m@\033[0;32;40m;\033[0;5;30;40m@\033[0;1;30;40mX\033[0;34;40m.\033[0;32;40m:\033[0;34;40m:\033[0;31;40m.\033[0;32;40m.\033[0;31;40m%\033[0;5;33;40m88\033[0;1;30;43mX\033[0;5;33;40m8\033[0;1;30;41m8\033[0;5;33;40mt\033[0;5;37;40m8\033[0;5;33;40m \033[0;5;37;43m8\033[0;5;33;43m@\033[0;1;30;43m8\033[0;5;31;40mS\033[0;5;33;40m:8\033[0;1;30;43mSX@88\033[0;37;43mX\033[0;1;30;43m8888XX\033[0;31;40m%\033[0;1;30;43m8\033[0;5;37;43m.\033[0;1;33;47mS\033[0;5;37;43mX8@@88XX@X\033[0;5;33;43mS\033[0;5;33;40m;\033[0;5;31;40mS\033[0;1;30;43m%8\033[0;5;33;40m8\033[0;5;30;40m8\033[0;1;30;40mX\033[0;34;40m
\033[0;32;40m \033[0;31;40m \033[0;1;30;40m8\033[0;1;37;47m.\033[0;5;37;47m..\033[0;1;37;47m%.\033[0;5;37;47m%@\033[0;1;30;43m8\033[0;5;30;40mXX\033[0;31;40m:\033[0;34;40m:\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m;\033[0;5;33;40m888\033[0;1;30;43mS\033[0;5;33;40m8\033[0;1;30;43mS@888\033[0;5;37;43mX\033[0;5;33;40mS88\033[0;5;31;40mS\033[0;1;30;43mS8\033[0;37;43mX\033[0;1;33;43mt\033[0;1;33;47m8\033[0;5;33;43m%\033[0;37;43m@\033[0;5;37;43mS\033[0;5;33;43mt\033[0;33;47m8\033[0;1;33;43m:\033[0;37;43m@\033[0;5;31;40m8\033[0;5;33;40m8\033[0;1;37;47m8\033[0;5;1;37;43m8\033[0;5;37;47m;\033[0;5;37;43m%\033[0;5;37;47mS\033[0;5;37;43mX888\033[0;37;43mX\033[0;5;33;43mt\033[0;37;43m@\033[0;1;33;43m%\033[0;37;43m8\033[0;1;30;43m@\033[0;1;30;41m8\033[0;1;30;43mX88\033[0;5;31;40mS\033[0;1;30;40m8\033[0;5;30;40m@\033[0;32;40m.\033[0;34;40m
\033[0;31;40m \033[0;5;30;40m8\033[0;1;37;47mt\033[0;5;37;47m..\033[0;1;37;47m:.\033[0;5;37;47m%8\033[0;5;33;40mt\033[0;5;35;40mS\033[0;32;40mt\033[0;34;40m:\033[0;31;40m:\033[0;32;40m.\033[0;34;40m.\033[0;32;40m;\033[0;5;33;40m88\033[0;1;30;43mt888\033[0;37;43mX\033[0;1;33;43mX\033[0;37;43m8\033[0;1;30;43m8\033[0;1;30;47m;\033[0;1;30;43m8\033[0;1;30;40m8\033[0;1;30;41m8\033[0;1;30;43mS@\033[0;37;43mX\033[0;5;33;43mX\033[0;1;33;47m8\033[0;5;33;43mt\033[0;1;33;43m%\033[0;5;37;43mX\033[0;5;33;43m ;\033[0;33;47m8\033[0;5;33;43m \033[0;5;37;43m@\033[0;1;30;43m8\033[0;5;33;40m8 \033[0;5;37;47m \033[0;1;37;47m888\033[0;1;33;47m@\033[0;1;37;47m8\033[0;1;33;47m@X\033[0;5;37;43m8\033[0;1;33;47m8\033[0;33;47m8\033[0;5;33;43m%\033[0;5;33;40m.\033[0;1;30;43m8\033[0;33;41m8\033[0;5;33;40m8\033[0;31;40mX\033[0;5;33;40m8\033[0;1;30;43m8@\033[0;31;40mt\033[0;5;32;40mS\033[0;1;30;40m8\033[0;34;40m.
\033[0;31;40m \033[0;5;35;40mt\033[0;1;37;47m%\033[0;5;37;47m..\033[0;1;37;47m;:\033[0;5;37;47mt\033[0;1;37;47m8\033[0;1;30;43m@\033[0;5;31;40mS\033[0;34;40mt\033[0;31;40m:\033[0;32;40m:\033[0;34;40m.\033[0;31;40m:\033[0;5;33;40m8\033[0;1;30;43mS888\033[0;37;43m@\033[0;1;33;43mX\033[0;33;47m8\033[0;37;43m@\033[0;1;30;43m88\033[0;1;37;47m:\033[0;5;37;47mS\033[0;1;30;47m@\033[0;1;30;43m%8\033[0;5;33;43m@\033[0;33;47m8\033[0;5;33;43m%\033[0;1;33;43mt\033[0;5;37;43m8S\033[0;5;33;43m \033[0;33;47m8\033[0;5;37;43m8\033[0;1;33;47mX\033[0;1;37;47m.\033[0;1;33;47mS\033[0;5;37;43m8\033[0;5;33;40m8\033[0;33;47m8\033[0;5;37;47m.\033[0;1;37;47m8\033[0;5;37;47m :;;;8\033[0;1;37;47m8\033[0;5;37;43m8\033[0;1;33;47m@\033[0;33;47m8\033[0;1;31;43m8\033[0;1;30;43m8\033[0;5;33;40m8\033[0;5;31;40m@\033[0;5;33;40m8\033[0;31;40mX\033[0;1;30;43m88\033[0;5;30;40m8\033[0;31;40mX\033[0;5;36;40mS\033[0;31;40m.
\033[0;34;40m \033[0;31;40m \033[0;32;40m \033[0;5;30;40m8\033[0;1;37;47m8\033[0;1;30;47m;\033[0;1;37;47m;.%\033[0;5;37;47m;\033[0;1;33;47mX\033[0;5;33;40m S\033[0;34;40m ..\033[0;32;40m.\033[0;5;33;40m8\033[0;1;30;43m@88\033[0;37;43m@X\033[0;5;33;43m%\033[0;33;47m8\033[0;5;33;43m;\033[0;1;30;43m8\033[0;5;33;40mS\033[0;1;30;47m;\033[0;5;37;47m%\033[0;1;37;47m8\033[0;1;33;43mS\033[0;37;43m@\033[0;5;37;43mX\033[0;5;33;43mX;\033[0;1;33;47m8\033[0;5;37;43m@8\033[0;1;33;47m@\033[0;1;37;47m;8\033[0;5;37;43m8\033[0;1;33;47m@\033[0;5;37;43m88\033[0;1;33;47m8\033[0;31;43m8\033[0;1;30;43m;\033[0;5;33;43mXS\033[0;5;37;43m8\033[0;1;37;47m8\033[0;5;37;47m ;:.;@\033[0;5;37;43mX8\033[0;37;43m@\033[0;1;30;43m8@\033[0;5;31;40mS\033[0;30;42m8\033[0;1;30;41m8\033[0;5;33;40m8\033[0;1;30;43mX\033[0;1;30;41m8\033[0;32;40m8\033[0;5;31;40m8\033[0;34;40m;
\033[0;32;40m \033[0;5;36;40m:\033[0;1;37;47m@\033[0;1;30;47m8\033[0;1;37;47m:\033[0;1;30;47m:\033[0;1;37;47mt\033[0;5;37;47m%\033[0;1;37;47mS\033[0;5;33;40m;\033[0;5;30;40m8\033[0;31;40m..\033[0;34;40m.\033[0;31;40m;\033[0;1;30;43m88\033[0;37;43m@\033[0;1;33;43m%\033[0;33;47m8\033[0;1;33;43m%\033[0;1;33;47m8\033[0;5;33;43mX\033[0;33;47m8\033[0;1;30;43m8\033[0;1;30;41m8\033[0;5;37;40m8\033[0;1;37;47m@\033[0;1;33;43mX\033[0;33;47m8\033[0;5;33;43m:\033[0;1;33;43mS\033[0;5;37;43m8\033[0;5;33;43m.\033[0;5;37;43m888\033[0;5;37;47m8\033[0;5;37;43m@\033[0;5;37;47m8\033[0;5;37;43m@88\033[0;1;33;43m:\033[0;5;37;43mX\033[0;1;30;43m@X\033[0;5;33;43m@\033[0;37;43m@\033[0;1;33;43m8\033[0;5;33;43mS\033[0;1;33;47m@\033[0;5;37;47m ::.%X\033[0;5;37;43mS\033[0;1;33;43m@\033[0;1;30;43m8\033[0;5;33;40m8\033[0;33;41m8\033[0;5;33;40m8\033[0;32;40m;\033[0;1;30;43m@\033[0;5;33;40m8\033[0;30;41m8\033[0;5;33;40m8\033[0;5;36;40mS\033[0;5;30;40m8
\033[0;34;40m \033[0;32;40m \033[0;1;30;40mX\033[0;32;40m:\033[0;5;33;40m \033[0;5;37;47m@\033[0;1;30;47m@\033[0;1;37;47m;\033[0;1;30;47m:\033[0;1;37;47m;\033[0;5;37;47m;\033[0;1;33;47mX\033[0;1;30;47mX\033[0;5;31;40m8\033[0;34;40m .\033[0;32;40m.\033[0;1;30;40m8\033[0;1;30;43m8\033[0;1;33;43mX\033[0;33;47m8\033[0;5;33;43m%\033[0;33;47m8\033[0;5;33;43m \033[0;5;37;43m@\033[0;1;33;47mS@\033[0;1;30;43m8\033[0;1;30;41m8\033[0;1;30;43m;@\033[0;5;33;43mS\033[0;1;33;47m8\033[0;5;33;43m%\033[0;5;37;43m@\033[0;5;33;43m:\033[0;1;33;47m8\033[0;5;37;43m8\033[0;1;33;47mS\033[0;5;37;43m%X\033[0;5;33;43m ;\033[0;1;33;47m88\033[0;1;37;47mt\033[0;5;37;47m8\033[0;1;33;47m8\033[0;1;30;43m888\033[0;37;43mS\033[0;1;33;43m@\033[0;37;43m8\033[0;5;37;43m;\033[0;5;37;47mt:::;\033[0;1;33;47mS\033[0;5;37;43mX\033[0;1;33;47m8\033[0;1;30;43m@\033[0;5;33;40m8\033[0;1;30;40m8\033[0;31;40m@\033[0;5;33;40m88\033[0;1;30;41m8\033[0;1;30;42m8\033[0;1;30;41m8\033[0;5;30;40m@8
\033[0;34;40m \033[0;32;40m.\033[0;5;35;40m:\033[0;5;33;40m \033[0;5;37;47mtS;8X88\033[0;1;30;47mS\033[0;1;30;40m@\033[0;31;40m \033[0;32;40m .\033[0;5;33;40m8\033[0;1;30;43m8\033[0;37;43m8X\033[0;5;33;43m;t\033[0;37;43m@\033[0;5;37;43m@\033[0;5;37;47mSt\033[0;1;30;43m8X8\033[0;37;43m@\033[0;5;33;43mt\033[0;1;33;43mt\033[0;5;37;43m@\033[0;5;33;43m:\033[0;5;37;43m@8\033[0;1;33;47mX\033[0;5;37;47mt\033[0;1;33;47m@\033[0;1;37;47m@\033[0;1;33;47mSX@\033[0;1;37;47mS\033[0;5;37;47m%%\033[0;5;37;43m8\033[0;1;33;47mX\033[0;1;30;41m8\033[0;1;30;43m@8\033[0;1;33;43mX\033[0;37;43m8\033[0;1;33;47mS\033[0;5;37;47m::.: \033[0;1;33;47m8\033[0;5;33;43mS\033[0;1;30;43m8S\033[0;5;31;40mS\033[0;1;30;43m:\033[0;5;31;40mX\033[0;32;40m8\033[0;1;30;41m8\033[0;32;40m@\033[0;1;30;41m8\033[0;1;30;40m@\033[0;5;32;40m@
\033[0;34;40m \033[0;5;30;40mS\033[0;1;30;47m;\033[0;5;33;40m;\033[0;1;30;47m8\033[0;1;37;47m%\033[0;5;37;47m@@\033[0;1;37;47m8\033[0;1;33;47mX\033[0;1;37;47m \033[0;1;30;47m8\033[0;1;30;41m8\033[0;32;40m .\033[0;31;40m \033[0;5;31;40m@\033[0;1;30;43m@\033[0;1;33;43m%\033[0;33;47m8\033[0;37;43m@\033[0;5;37;43mS\033[0;5;33;43m \033[0;1;33;47mS\033[0;5;37;47m:8\033[0;1;30;43mS8\033[0;37;43m@\033[0;5;33;43m%\033[0;33;47m8\033[0;5;37;43m%\033[0;5;33;43m \033[0;1;33;43m%\033[0;5;37;43m8\033[0;1;33;47mS\033[0;5;37;43m@\033[0;1;37;47m8\033[0;5;37;47mS:;;t:\033[0;5;37;43m8\033[0;5;33;43m \033[0;33;47m8\033[0;31;43m8\033[0;5;33;40m88\033[0;37;43mX\033[0;1;30;43m8\033[0;1;33;47m88\033[0;5;37;47mt::.:\033[0;1;37;47m8\033[0;5;33;43m@\033[0;33;47m8\033[0;5;33;43mS\033[0;5;31;40mX\033[0;1;30;43m%\033[0;5;33;40m8\033[0;5;31;40mS\033[0;33;41m8\033[0;32;40m8\033[0;30;41m8\033[0;32;40m8\033[0;1;30;40m8\033[0;5;35;40m%
\033[0;34;40m \033[0;32;40m \033[0;1;30;40m8\033[0;1;30;47mt\033[0;5;37;40m8\033[0;5;33;40mX\033[0;1;30;47m@S\033[0;37;43m8\033[0;1;30;47m8\033[0;33;47mS\033[0;1;30;47m8\033[0;5;33;40m%\033[0;1;30;40mX\033[0;32;40m \033[0;34;40m \033[0;32;40m ;\033[0;5;31;40m8\033[0;1;30;43m8\033[0;5;33;43mt\033[0;1;33;43m;\033[0;5;37;43m8\033[0;1;37;47m;\033[0;5;37;47m;:\033[0;1;33;47mX\033[0;5;31;40m@\033[0;1;30;43m@\033[0;37;43m@@\033[0;5;33;43m.\033[0;37;43m@\033[0;5;33;43m \033[0;5;37;43m8\033[0;1;33;47m@\033[0;1;37;47m8\033[0;5;37;47mX%:%@@\033[0;5;37;43m8\033[0;1;33;47mX\033[0;37;43mX\033[0;1;30;43m8\033[0;1;30;41m8\033[0;1;30;43mt%S8\033[0;5;37;43mXX@\033[0;1;33;47m8\033[0;1;37;47m8\033[0;5;37;47m :\033[0;5;37;43m;\033[0;1;37;47m8\033[0;5;33;43m@\033[0;1;33;43mS\033[0;33;47m8\033[0;1;30;43mX\033[0;5;33;40m8\033[0;1;30;43mS%\033[0;1;30;40m8\033[0;30;41m8\033[0;32;40m8\033[0;5;31;40m8\033[0;32;40m8\033[0;5;33;40mt
\033[0;34;40m \033[0;32;40m :\033[0;5;37;40m8\033[0;1;37;47m \033[0;5;31;40m8\033[0;1;30;47mX@\033[0;5;33;40mt\033[0;33;47mX\033[0;1;30;47m8\033[0;5;33;40m :\033[0;31;40mX\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;32;40m.\033[0;31;40m%\033[0;5;33;40m8S\033[0;1;30;43m8\033[0;1;31;43m8\033[0;37;43mX\033[0;5;33;43mt\033[0;1;33;47m8\033[0;5;33;43m%\033[0;1;30;43m8\033[0;1;30;41m8\033[0;1;30;43m88\033[0;37;43m@\033[0;1;33;43m:\033[0;1;33;47m8\033[0;5;33;43m;%\033[0;1;33;43mS\033[0;5;33;43m;\033[0;5;37;43m@\033[0;5;33;43m%\033[0;37;43mX\033[0;5;33;43mt\033[0;37;43mS\033[0;1;30;43m8@\033[0;31;43m@\033[0;1;30;43m8\033[0;37;43m8\033[0;33;47m8\033[0;37;43mX\033[0;5;33;43m8\033[0;5;37;43mX\033[0;5;33;43m@\033[0;1;33;43m@\033[0;5;37;43mX\033[0;5;33;43m%\033[0;5;37;43mX\033[0;5;33;43mS\033[0;5;37;43m8@\033[0;1;33;43mS\033[0;33;47m8\033[0;5;33;43m%\033[0;37;43m@\033[0;1;30;43m8\033[0;1;30;41m8\033[0;5;33;40m8\033[0;1;30;43m@\033[0;1;30;40m8\033[0;32;40mX\033[0;31;40m8\033[0;5;33;40m%8\033[0;5;35;40mX
\033[0;34;40m \033[0;32;40m \033[0;31;40m .\033[0;5;35;40m:\033[0;1;37;47m:\033[0;5;33;40m..\033[0;1;30;47m8\033[0;5;33;40mS\033[0;35;47m8\033[0;33;47m8\033[0;5;37;40mt\033[0;5;35;40mX\033[0;1;30;41m8\033[0;32;40m@\033[0;31;40m;\033[0;34;40m.\033[0;32;40m:\033[0;31;40mX\033[0;5;31;40mS\033[0;1;30;43mS8\033[0;37;43m@\033[0;1;30;43m8\033[0;5;33;43mS\033[0;33;47m8\033[0;5;33;43m%\033[0;37;43m@\033[0;1;30;43m%\033[0;5;33;40m8\033[0;1;30;43m8\033[0;1;31;43m8\033[0;33;47m8\033[0;1;30;43m8\033[0;5;33;40m;%\033[0;1;30;41m8\033[0;5;31;40mX\033[0;1;30;43m88\033[0;37;43m@\033[0;1;30;43m8\033[0;1;30;41m@8\033[0;1;30;43mS8\033[0;1;37;47m8\033[0;5;37;47m :\033[0;5;33;43m;S\033[0;5;37;43m88@\033[0;5;33;43m%\033[0;1;33;47m8\033[0;5;33;43mt\033[0;5;37;43m@\033[0;1;33;43mX\033[0;5;33;43mX\033[0;33;47m8\033[0;1;33;43m%\033[0;33;47m8\033[0;1;33;43mt\033[0;1;30;43m8@\033[0;30;41m8\033[0;1;30;43m8\033[0;5;33;40m8\033[0;31;40m;\033[0;5;33;40mX.8\033[0;5;30;40m8
\033[0;34;40m \033[0;31;40m \033[0;34;40m.;\033[0;5;33;40mt\033[0;1;30;47m.\033[0;5;37;40m@\033[0;5;33;40m:\033[0;5;37;40mS\033[0;5;31;40m@\033[0;5;33;40m:\033[0;1;30;47m8\033[0;5;37;40mt\033[0;5;33;40m8\033[0;1;33;43m%\033[0;5;33;41mS\033[0;1;30;40m8\033[0;31;40m.\033[0;32;40m:t\033[0;31;40m8\033[0;5;31;40mS\033[0;1;30;43m%8\033[0;37;43mXX\033[0;1;33;43m%\033[0;37;43m8\033[0;1;30;43mXX\033[0;1;30;41m8\033[0;5;33;40m8\033[0;5;31;40m@\033[0;1;30;41m8\033[0;31;43m8\033[0;1;30;41m8\033[0;31;40m8\033[0;32;40m%\033[0;31;40m%\033[0;5;30;40m8\033[0;30;41m@\033[0;31;40m88\033[0;32;40mX\033[0;1;30;43mS\033[0;5;37;43mX\033[0;1;37;47m8\033[0;5;37;47m \033[0;5;37;43m%\033[0;1;33;47m@88\033[0;5;37;43m88\033[0;1;33;47m@\033[0;5;37;43m8\033[0;5;33;43m;\033[0;1;33;47m8\033[0;5;33;43m%\033[0;33;47m8\033[0;5;33;43mt\033[0;37;43m8@\033[0;5;33;43mt\033[0;5;33;40m \033[0;1;30;43m8\033[0;5;33;40m88\033[0;1;30;43m@\033[0;31;40m@\033[0;5;33;40m@.%\033[0;5;30;40m@\033[0;34;40m.
\033[0;1;30;40m8\033[0;1;30;43m8\033[0;1;33;47m8\033[0;37;43m@\033[0;33;47m88\033[0;1;30;43m8\033[0;5;33;40m:\033[0;1;30;43m8\033[0;37;43m8\033[0;1;30;43m8\033[0;37;43m8\033[0;1;33;43mt\033[0;5;1;31;43m8\033[0;5;31;40m8\033[0;31;40mS\033[0;34;40m.\033[0;31;40m.\033[0;34;40m:\033[0;32;40mS\033[0;1;30;41m8\033[0;5;33;40m8\033[0;33;41m8\033[0;1;30;43m88\033[0;5;33;40m8\033[0;1;30;41m8\033[0;1;30;43m%S@\033[0;1;33;43m8\033[0;1;30;43m8888888\033[0;1;33;43mX\033[0;37;43m8X\033[0;1;30;43m88\033[0;1;33;43mX\033[0;1;33;47m8\033[0;1;37;47m8\033[0;5;37;47m.\033[0;33;47mX\033[0;1;33;47m8S\033[0;5;37;47m;\033[0;5;37;43m@\033[0;5;37;47m%t8\033[0;33;47m@\033[0;37;43mX\033[0;5;33;43m.t\033[0;33;47m8\033[0;5;33;43m:\033[0;1;30;43m8\033[0;37;43m8\033[0;1;30;43m88\033[0;1;30;41mX\033[0;5;33;40m8\033[0;1;30;43m8\033[0;33;47m8\033[0;1;30;43m8\033[0;5;33;40m.\033[0;5;31;40mX\033[0;32;40m.
\033[0;34;40m .\033[0;5;33;40m:\033[0;5;33;43m \033[0;5;33;41m:\033[0;5;37;43m888\033[0;33;47m8\033[0;1;33;47m88\033[0;5;37;43m88\033[0;5;1;31;43m8\033[0;5;37;41m8\033[0;5;33;41m@\033[0;1;30;41m8\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m:%\033[0;5;33;40m8\033[0;1;30;43mX\033[0;5;33;43mS%\033[0;5;37;43m@\033[0;1;33;43m;\033[0;1;30;43m8X8\033[0;5;37;43mt\033[0;5;37;47mt \033[0;5;37;43m:\033[0;5;37;47mX\033[0;5;37;43m8\033[0;1;33;47m@\033[0;5;37;43mS\033[0;1;33;47m8\033[0;5;33;43m8\033[0;1;33;47mX\033[0;5;33;43m8\033[0;5;37;43m8\033[0;5;33;40m8\033[0;1;30;43m8S\033[0;5;33;43mS\033[0;37;43m@\033[0;5;37;43mX\033[0;1;33;47m8\033[0;5;37;43m%X\033[0;1;37;47m8\033[0;5;1;37;43m8\033[0;5;37;47m;::\033[0;1;33;47m8\033[0;1;30;43m8\033[0;37;43m@\033[0;5;33;43m \033[0;33;47m8\033[0;1;31;43m8\033[0;1;30;43m88\033[0;1;30;41m@\033[0;5;32;40mX\033[0;1;30;43m@\033[0;1;33;47m@\033[0;5;33;40m;.\033[0;1;30;40m8\033[0;32;40m
\033[0;34;40m \033[0;32;40m \033[0;31;40m:\033[0;5;30;40m@\033[0;5;35;40m8\033[0;5;33;40mX\033[0;1;31;41m@\033[0;1;31;43m8\033[0;1;31;47m8\033[0;5;33;43m:\033[0;1;31;47m8\033[0;5;37;43m8\033[0;5;1;33;41mX\033[0;5;33;41m \033[0;5;37;41m8\033[0;5;1;33;41m8\033[0;1;30;43m8\033[0;1;30;41mX\033[0;31;40mS\033[0;34;40m:\033[0;32;40m.:\033[0;5;30;40m@\033[0;1;30;43m8\033[0;1;33;47m8@S\033[0;5;37;43m88\033[0;1;30;43m8S\033[0;5;37;43m8\033[0;5;37;47m%\033[0;5;37;43mt\033[0;5;37;47m;\033[0;1;33;47mS\033[0;5;37;43m@\033[0;1;33;47m@88X\033[0;1;33;43m@8\033[0;33;47m8\033[0;1;30;43m%%\033[0;31;40mX\033[0;5;31;40mS\033[0;1;30;43m8\033[0;37;43mS\033[0;5;37;43m@\033[0;1;33;47m8\033[0;1;37;47m@\033[0;5;37;43mSS\033[0;5;37;47m:::t\033[0;5;37;43mX\033[0;33;47m@\033[0;1;30;43m88888\033[0;5;31;40m8X\033[0;1;30;43m%\033[0;33;47m88\033[0;5;33;40m;\033[0;31;40m%\033[0;32;40m.
\033[0;34;40m \033[0;32;40m .\033[0;5;30;40m@\033[0;5;33;40m@\033[0;1;30;43m8\033[0;5;33;40m:\033[0;1;30;47m;\033[0;1;30;43m8\033[0;37;43m@\033[0;33;47m8\033[0;5;37;43m@\033[0;5;1;33;41m8\033[0;1;33;47m@\033[0;5;1;33;41m8\033[0;1;31;43m8\033[0;31;45m8\033[0;1;30;41mS\033[0;31;40m%\033[0;32;40mt\033[0;5;36;40m;\033[0;5;33;40m X\033[0;5;31;40mX\033[0;5;33;40mS;\033[0;33;47m8\033[0;5;37;40m%\033[0;1;30;43m88\033[0;5;33;40m@\033[0;1;30;43m8\033[0;33;47m8\033[0;37;43m@\033[0;1;33;43m%\033[0;33;47m8\033[0;37;43m@\033[0;1;33;43m8\033[0;33;47m8\033[0;1;30;43mX@8\033[0;5;33;40m@8\033[0;30;41m8\033[0;5;32;40mS\033[0;1;30;43m%\033[0;5;33;40m88\033[0;1;30;43mS88\033[0;5;33;43m@\033[0;33;47m@\033[0;5;37;47m::::\033[0;5;37;43mt\033[0;5;37;47mt\033[0;5;33;43m%\033[0;5;31;40m8\033[0;1;30;43m888\033[0;5;31;40m8\033[0;5;33;40m8\033[0;1;30;43m%\033[0;33;47m8\033[0;1;33;47m8\033[0;5;35;40mS\033[0;31;40m:\033[0;32;40m.\033[0;34;40m \033[0;32;40m
\033[0;34;40m :\033[0;5;33;40mS\033[0;5;33;43m \033[0;1;33;47m8\033[0;5;1;31;43m8\033[0;5;33;43m \033[0;5;37;43m8\033[0;5;33;41mX\033[0;1;31;43m8\033[0;5;1;31;43m8\033[0;5;37;41m8\033[0;5;33;41m \033[0;33;47m8\033[0;5;33;41m.\033[0;1;30;43m8\033[0;31;40m@\033[0;34;40mt\033[0;1;30;40m8\033[0;34;40mX\033[0;5;34;40mS\033[0;5;36;40m8;\033[0;5;37;40mS\033[0;1;30;47m@\033[0;5;37;40m8\033[0;5;35;40m \033[0;5;33;40m \033[0;5;35;40m@\033[0;5;30;40m8\033[0;5;31;40m8X\033[0;5;33;40mX.\033[0;5;31;40mS\033[0;5;33;40m88@8\033[0;5;31;40mX\033[0;1;30;43m:\033[0;5;33;40m888\033[0;1;30;43m;@88\033[0;37;43mX\033[0;1;30;43m8\033[0;1;33;43m%\033[0;33;47m8\033[0;1;33;47m8X\033[0;5;37;47m:::;\033[0;5;1;33;47m8\033[0;5;33;43mS\033[0;5;35;40m8\033[0;1;30;43mS\033[0;5;31;40m8\033[0;1;30;43m8\033[0;1;30;41m8\033[0;5;33;40m8\033[0;1;30;43mS\033[0;37;43m@\033[0;1;33;47m@\033[0;5;33;40m.\033[0;1;30;40m8\033[0;31;40m.\033[0;32;40m
\033[0;34;40m \033[0;31;40m :\033[0;5;30;40m8\033[0;31;45m8\033[0;5;33;41mt%S;\033[0;37;43m@\033[0;5;33;41m \033[0;33;47m8\033[0;5;33;41m:\033[0;1;33;43m%\033[0;5;33;41m.\033[0;1;30;43m8\033[0;1;30;41m%\033[0;32;40m:\033[0;34;40m:\033[0;1;30;40m@88\033[0;34;40m@\033[0;1;30;40m8\033[0;1;30;44m8\033[0;5;30;40mX\033[0;5;34;40mS\033[0;5;35;40m:.\033[0;5;34;40mS@\033[0;1;30;44m8\033[0;5;36;40m8\033[0;5;34;40m@\033[0;5;33;40mX\033[0;5;32;40mS\033[0;5;33;40m8\033[0;5;31;40mS\033[0;1;30;43mtS8888\033[0;37;43m8\033[0;33;47m8\033[0;1;33;47mX\033[0;5;37;47m ;\033[0;5;37;43m88\033[0;1;33;47mX\033[0;5;37;43m%\033[0;5;37;47m%\033[0;5;37;43m;\033[0;5;37;47m;\033[0;5;37;43mX\033[0;1;30;47mt\033[0;1;30;43m8X\033[0;5;33;40m88\033[0;1;30;41m8\033[0;1;30;43mt@\033[0;33;47m8\033[0;5;37;43m%\033[0;1;30;47m8\033[0;5;31;40mS\033[0;32;40m.
\033[0;34;40m \033[0;32;40m \033[0;34;40m \033[0;31;40m:\033[0;5;33;40m;\033[0;1;30;43m8\033[0;1;31;43m8\033[0;33;47m8\033[0;5;33;41m%\033[0;37;43m@\033[0;5;33;41m \033[0;1;33;43m.\033[0;5;1;33;41m8\033[0;5;37;41m8\033[0;1;30;43m8\033[0;1;31;41m8\033[0;31;40m8\033[0;34;40m;\033[0;32;40m:.\033[0;34;40m.\033[0;32;40m.:\033[0;34;40mt\033[0;32;40mS\033[0;34;40m%\033[0;32;40m;\033[0;34;40m%\033[0;1;30;40m@\033[0;34;40m%\033[0;32;40m@\033[0;34;40m@\033[0;32;40m%\033[0;5;33;40m88@\033[0;1;30;43m8\033[0;1;33;47m888\033[0;1;37;47m@\033[0;5;37;47m: ..\033[0;5;37;43mt\033[0;1;37;47m88\033[0;5;37;47m :%%t\033[0;5;37;43mX\033[0;1;37;47m8\033[0;5;37;47m.\033[0;33;47m8\033[0;1;30;43m888\033[0;5;31;40m8\033[0;1;30;43mt\033[0;31;40m8\033[0;1;30;43m@8\033[0;5;37;43m8\033[0;1;37;47m8\033[0;5;33;40m:@\033[0;34;40m \033[0;32;40m
\033[0;34;40m \033[0;32;40m \033[0;34;40m .\033[0;31;40m%\033[0;5;35;40mt%\033[0;1;30;41mX8\033[0;33;41mX\033[0;5;33;40m@\033[0;5;31;41m.\033[0;1;30;43m8\033[0;5;31;41m;\033[0;5;33;41m:\033[0;5;33;40m:\033[0;30;41m8\033[0;32;40m:\033[0;34;40m..\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.:\033[0;34;40m:\033[0;31;40m:\033[0;32;40m:..:\033[0;34;40mt\033[0;1;30;40m@\033[0;5;33;40m8\033[0;5;32;40mS\033[0;1;30;43m8\033[0;1;33;47mSS@8\033[0;5;37;43m8\033[0;5;33;43m.\033[0;5;37;43m88\033[0;1;33;47mS\033[0;5;37;47m \033[0;1;33;47mS\033[0;5;37;47m \033[0;5;33;43mS\033[0;1;33;43mX\033[0;1;30;43m%\033[0;5;37;43mX\033[0;33;47m@\033[0;5;37;47m \033[0;1;37;47m8\033[0;5;37;43mS\033[0;5;37;47m.t\033[0;1;30;43m8888\033[0;5;31;40m@\033[0;31;40m@\033[0;1;30;43m8\033[0;37;43mX\033[0;5;37;43m@\033[0;1;37;47m8\033[0;1;30;43m8\033[0;5;33;40mt\033[0;34;40m \033[0;32;40m
\033[0;34;40m .\033[0;32;40m.\033[0;31;40m \033[0;34;40m ;\033[0;1;30;40m8\033[0;5;33;40m \033[0;1;30;47mS\033[0;5;33;40m%%\033[0;5;31;40mX8\033[0;35;41mX\033[0;1;30;43m8\033[0;5;31;41m8\033[0;1;30;41m8\033[0;30;41m8\033[0;32;40m;\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m:\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m..\033[0;31;40m:\033[0;32;40m:t\033[0;5;33;40m%\033[0;1;30;47m@\033[0;1;33;47m@\033[0;1;37;47m8\033[0;5;37;43m@\033[0;5;37;47m8\033[0;1;33;47mS\033[0;1;37;47m \033[0;1;33;47m@XX\033[0;33;47m8\033[0;5;33;43m8\033[0;33;47m8\033[0;1;33;43mX\033[0;33;47m88\033[0;1;30;43m8\033[0;1;30;47m;\033[0;1;37;47mX\033[0;5;37;47m. \033[0;1;33;47mS\033[0;5;37;47m \033[0;1;33;47mS\033[0;1;30;43m88\033[0;5;33;40m8\033[0;1;30;41m8\033[0;1;30;42m8\033[0;1;30;43mX\033[0;37;43mX\033[0;5;37;43mS\033[0;1;37;47m@\033[0;1;30;43m8\033[0;5;31;40mS\033[0;34;40m.
\033[0;5;30;40m@\033[0;5;37;40m8\033[0;5;33;40m :\033[0;5;30;40m8@\033[0;5;37;40m8\033[0;5;37;47m8\033[0;1;37;47m8\033[0;1;30;47mS\033[0;5;37;40m8\033[0;1;30;47m%.\033[0;33;47m8\033[0;5;37;40m8\033[0;5;33;40mS \033[0;5;31;40mX\033[0;32;40m:\033[0;34;40m:\033[0;32;40m.\033[0;34;40m..\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m:\033[0;1;30;40m8\033[0;1;30;43m8\033[0;33;47m8\033[0;5;37;43m8\033[0;5;37;47mX\033[0;5;37;43mX\033[0;5;37;47mSS\033[0;5;1;37;43m8\033[0;5;37;47m8@%:;:%\033[0;1;37;47m8\033[0;5;37;47m;\033[0;1;37;47m8\033[0;5;37;47m .:\033[0;1;37;47m8\033[0;5;37;47m \033[0;33;47m8\033[0;1;30;43mX\033[0;5;31;40m8\033[0;5;33;40m8\033[0;31;43m8\033[0;1;30;43m8\033[0;1;33;47m8S\033[0;1;30;47m \033[0;5;33;40m8\033[0;31;40m;\033[0;34;40m:
\033[0;5;36;40m \033[0;1;30;47m \033[0;33;47m8\033[0;5;33;40m%tX\033[0;5;37;40m8\033[0;5;33;40m \033[0;5;37;40m88\033[0;33;47m888\033[0;1;30;47mSSt\033[0;1;37;47mS@\033[0;5;33;40m:\033[0;32;40m \033[0;34;40m \033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m;\033[0;5;33;40m8\033[0;1;30;43mSXS\033[0;5;33;43m@XS\033[0;1;33;43mS\033[0;5;33;43m:\033[0;1;33;43m;\033[0;5;33;43m;\033[0;33;47m8\033[0;5;37;43mX\033[0;1;33;47m8\033[0;5;37;43m88\033[0;1;33;47mS\033[0;5;1;37;43m8\033[0;5;37;47mt;\033[0;1;33;47mS\033[0;5;37;47m \033[0;5;37;43mS\033[0;5;33;40mX\033[0;1;30;43mS\033[0;5;33;40m8\033[0;1;30;43m@8\033[0;1;33;47m8\033[0;1;37;47m8\033[0;37;43m8\033[0;5;33;40mX\033[0;34;40m..\033[0;32;40m \033[0;34;40m
\033[0;5;30;40m8\033[0;5;33;40m@8@tt.\033[0;1;30;47mS\033[0;5;33;40m;%8\033[0;33;47m8\033[0;1;33;47m@\033[0;1;30;43m8\033[0;5;33;40mSXXS%;tXX\033[0;1;30;40m8\033[0;31;40m:\033[0;32;40m..\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;32;40m..\033[0;31;40m@\033[0;1;30;43m8\033[0;1;33;43m8\033[0;33;47m8\033[0;1;30;43m8\033[0;37;43m@@\033[0;5;37;43m@\033[0;37;43mX\033[0;5;37;43mS\033[0;5;33;43mS%\033[0;1;33;43m%\033[0;5;37;43m@\033[0;5;33;43mX\033[0;1;33;47m8\033[0;5;37;43m@\033[0;5;33;43m:%\033[0;1;37;47m8\033[0;5;37;47m \033[0;1;33;47mX\033[0;5;33;40m;\033[0;5;31;40mS\033[0;31;40m8\033[0;1;30;43m8\033[0;37;43m8\033[0;1;33;47mXX\033[0;5;33;40m:\033[0;5;31;40m8\033[0;34;40m.
.\033[0;31;40m;\033[0;34;40mS\033[0;5;30;40m8\033[0;5;31;40m8\033[0;5;33;40mX8@\033[0;1;30;47m@\033[0;5;33;40mS@%%\033[0;5;31;40mS\033[0;5;33;40mS...\033[0;5;30;40m8\033[0;31;40m@\033[0;5;30;40m8\033[0;1;30;40m@\033[0;32;40mt\033[0;31;40m:\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m:\033[0;32;40m;\033[0;31;40m:\033[0;34;40m:\033[0;32;40m:t\033[0;1;30;40mS8\033[0;1;30;43m%8\033[0;1;33;43m88\033[0;5;33;43m8\033[0;5;37;43m8\033[0;37;43mS\033[0;5;37;43m@\033[0;37;43m8\033[0;5;37;43m@\033[0;1;30;43m8\033[0;5;37;43m@\033[0;1;30;43m8\033[0;5;33;43mX\033[0;33;47m8\033[0;5;37;43m@\033[0;1;33;47m8\033[0;33;47m8\033[0;37;43mX\033[0;1;30;43m8\033[0;5;33;40m8\033[0;1;30;40m8\033[0;1;30;43m8\033[0;1;33;47m8S\033[0;5;37;40m8\033[0;5;31;40m@\033[0;32;40m;\033[0;34;40m
\033[0;31;40m.:\033[0;32;40m \033[0;34;40m :\033[0;5;31;40mX\033[0;5;33;40mt\033[0;1;30;47m8\033[0;5;33;40mt% :\033[0;1;30;40m8\033[0;5;30;40mX\033[0;5;31;40mS\033[0;5;36;40mt\033[0;5;33;40m;\033[0;34;40m:\033[0;32;40m \033[0;31;40m .\033[0;34;40m:\033[0;32;40m:\033[0;34;40m..\033[0;31;40m%\033[0;5;33;40m8\033[0;1;30;43m88\033[0;5;33;40m8\033[0;31;40m:\033[0;34;40m.\033[0;31;40m. .\033[0;34;40m:tS\033[0;5;30;40mS\033[0;5;33;40m888\033[0;1;30;43m@\033[0;5;33;40m88\033[0;5;30;40mX\033[0;5;32;40mS\033[0;31;40m@\033[0;5;33;40m8\033[0;1;30;43mS\033[0;5;33;40m8\033[0;1;30;43mt\033[0;5;33;40m8\033[0;31;40m8\033[0;5;33;40m8\033[0;1;30;43m8\033[0;1;33;47mS\033[0;5;37;40m8\033[0;5;33;40m8\033[0;1;30;40m8\033[0;34;40m .
\033[0;32;40m \033[0;34;40m .t\033[0;5;30;40m@\033[0;5;33;40m@\033[0;1;30;47m@;\033[0;5;33;40m8\033[0;31;40mt\033[0;5;33;40m8\033[0;1;30;43m8\033[0;5;33;40mS\033[0;1;30;40m8\033[0;34;40m:\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;31;40m;\033[0;5;33;40m8\033[0;1;30;43mX\033[0;37;43mXS\033[0;1;30;43m8@\033[0;5;33;40m88\033[0;1;30;40m8\033[0;31;40m@\033[0;32;40m@\033[0;31;40mt\033[0;32;40m;\033[0;31;40m \033[0;32;40m :\033[0;31;40mt\033[0;32;40mS\033[0;31;40mt\033[0;32;40mt\033[0;31;40m;:\033[0;32;40mt.\033[0;34;40m.\033[0;32;40m:\033[0;5;30;40m8\033[0;1;30;40m8\033[0;32;40mX\033[0;5;31;40m@\033[0;5;33;40m8\033[0;31;40mS\033[0;32;40m;\033[0;5;31;40mS\033[0;34;40m:\033[0;32;40m.\033[0;31;40m \033[0;34;40m
\033[0;32;40m t\033[0;5;33;40mt\033[0;37;43m8\033[0;33;47m88\033[0;37;43m@\033[0;1;30;43m888\033[0;5;31;40mX\033[0;32;40m.\033[0;31;43m8\033[0;1;33;43m:\033[0;5;31;41m@\033[0;1;30;41m@\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.%\033[0;32;40mt\033[0;34;40m;\033[0;5;33;40m8\033[0;1;30;43m@\033[0;5;33;43m@\033[0;5;37;43m@\033[0;5;33;43m8\033[0;5;37;43m8\033[0;5;33;43m8\033[0;1;30;43m8%X\033[0;5;33;43m%\033[0;5;37;43m8\033[0;1;30;43m88X\033[0;1;30;40m8\033[0;32;40mS\033[0;5;30;40m8\033[0;1;30;40m@\033[0;34;40m;\033[0;31;40m:\033[0;32;40m:.\033[0;34;40m..\033[0;32;40m.\033[0;31;40m..:.\033[0;34;40m .:\033[0;1;30;40m@\033[0;5;32;40m%\033[0;31;40m \033[0;34;40m
\033[0;32;40m \033[0;34;40m.\033[0;5;30;40m8\033[0;1;30;43m8\033[0;5;37;43mSS88\033[0;1;33;47m@8\033[0;5;37;43m8\033[0;1;30;43m8X8\033[0;5;33;43m \033[0;35;41m@\033[0;34;40m..\033[0;32;40m \033[0;34;40m.\033[0;32;40m.t\033[0;5;33;40m8\033[0;31;40m@\033[0;32;40mS\033[0;5;33;40m8\033[0;1;30;43m8\033[0;37;43m8X\033[0;1;33;43m8\033[0;33;47m8\033[0;37;43mX\033[0;1;30;43mX\033[0;5;33;40m8\033[0;1;30;40m8\033[0;5;33;40m8\033[0;1;30;43m8\033[0;33;47m88\033[0;5;33;40m%\033[0;1;30;47mt\033[0;1;37;47mS88\033[0;1;30;47mX\033[0;1;30;42m8\033[0;31;40m8\033[0;32;40mS\033[0;31;40m;\033[0;34;40m:\033[0;32;40m.\033[0;31;40m.\033[0;32;40m..\033[0;34;40m..\033[0;32;40m. \033[0;1;30;40mX\033[0;5;31;40mS\033[0;34;40m
\033[0;31;40m :\033[0;5;33;40mt\033[0;37;43mS\033[0;5;1;31;43m8\033[0;5;37;41m8\033[0;5;1;31;43m8\033[0;1;31;47m8\033[0;1;31;43m88\033[0;1;31;47m8\033[0;5;1;31;43m8\033[0;5;33;41m.\033[0;1;30;43m8\033[0;5;31;41m@\033[0;5;33;40m@\033[0;31;40m;\033[0;34;40m.\033[0;32;40m.\033[0;31;40m:%\033[0;1;30;40m8\033[0;5;31;40mX\033[0;5;30;40m@\033[0;30;41m8\033[0;5;33;40m8\033[0;5;31;40m8\033[0;5;33;40m888\033[0;5;32;40mS\033[0;31;40m8\033[0;5;33;40m8\033[0;32;40m@\033[0;31;40mS\033[0;5;32;40m8\033[0;1;37;47mX\033[0;5;37;47m:tX\033[0;1;37;47m@\033[0;37;43m8\033[0;5;33;40m%@\033[0;1;30;42m8\033[0;5;33;40mX.\033[0;1;30;43m8\033[0;5;33;40m8\033[0;32;40m8\033[0;5;33;40mX\033[0;5;32;40m8\033[0;32;40mS\033[0;34;40mt\033[0;32;40m:\033[0;34;40m.\033[0;32;40m.\033[0;34;40m \033[0;1;30;40mX\033[0;5;33;40m%\033[0;34;40m
;\033[0;5;33;40mX\033[0;5;33;41m.\033[0;5;37;43m@\033[0;5;33;41m \033[0;5;37;43m8\033[0;5;1;31;43m8\033[0;1;33;47m8\033[0;1;31;43m8\033[0;37;43m@\033[0;1;33;43m.\033[0;1;31;43m8\033[0;1;30;43m8\033[0;35;41m8\033[0;31;40m@.\033[0;34;40m.\033[0;32;40m.;\033[0;5;32;40m@\033[0;5;33;40m 8\033[0;32;40mX\033[0;5;30;40m@\033[0;5;32;40mX\033[0;1;30;42m8\033[0;31;40m:\033[0;32;40mt%%\033[0;31;40mS\033[0;5;32;40mS\033[0;32;40m@\033[0;5;33;40m%\033[0;1;30;47m 8\033[0;1;30;43m8\033[0;5;33;40mt\033[0;1;30;42m8\033[0;32;40mX\033[0;1;30;42m@\033[0;32;40m;\033[0;31;40m;\033[0;32;40m@\033[0;5;33;40m@\033[0;5;32;40mX8\033[0;1;30;40m8\033[0;32;40m@\033[0;1;30;40m@S\033[0;32;40m:\033[0;31;40m:\033[0;34;40m..\033[0;32;40m.;\033[0;5;34;40mX\033[0;32;40m \033[0;34;40m
\033[0;32;40m.\033[0;31;40m.\033[0;1;30;40m8\033[0;5;33;40m%\033[0;5;33;41m:\033[0;5;37;43mX\033[0;5;33;41m \033[0;5;37;41m8\033[0;5;1;33;41m8\033[0;5;33;41mX:\033[0;1;33;47m8\033[0;5;31;41m \033[0;5;33;41m8\033[0;5;31;40m8\033[0;5;30;40mX\033[0;32;40m:\033[0;31;40m.\033[0;34;40m:\033[0;31;40m..\033[0;32;40mX\033[0;5;32;40m8X8\033[0;34;40m \033[0;31;40m:\033[0;1;30;40mX\033[0;32;40mS\033[0;1;30;40mX\033[0;31;40mS\033[0;1;30;40m@\033[0;1;30;42m8\033[0;5;33;40m88\033[0;5;32;40mX\033[0;5;33;40m S\033[0;5;32;40mX\033[0;5;33;40m%t\033[0;5;35;40m:.\033[0;5;33;40m;t\033[0;5;30;40m8\033[0;32;40mt\033[0;34;40m:..:\033[0;31;40m.\033[0;34;40m..\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m..\033[0;5;30;40m8\033[0;32;40m%\033[0;31;40m.\033[0;34;40m
\033[0;31;40m \033[0;1;30;40mS\033[0;5;33;40mS\033[0;1;31;43m8\033[0;5;1;33;41m8\033[0;37;43m@\033[0;5;33;41m \033[0;1;30;43m8\033[0;1;31;43m88\033[0;5;35;41mS\033[0;1;30;43m8\033[0;1;30;41m8\033[0;5;31;40mS\033[0;5;35;40m:\033[0;1;30;40mX\033[0;32;40m \033[0;31;40m.\033[0;34;40m..\033[0;31;40m:\033[0;32;40m:%\033[0;34;40m;\033[0;32;40m.\033[0;34;40m.\033[0;31;40m:\033[0;32;40m;\033[0;1;30;40m@\033[0;5;33;40mS \033[0;5;37;40m8\033[0;5;33;40mt \033[0;1;30;47mX\033[0;5;33;40m; \033[0;5;37;40m8\033[0;5;33;40m;: ;S8\033[0;1;30;40m8\033[0;31;40mt\033[0;32;40m;\033[0;34;40m:\033[0;31;40m:\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m..\033[0;32;40m.\033[0;31;40m;\033[0;5;35;40mS\033[0;32;40m.\033[0;34;40m
\033[0;31;40m :\033[0;5;31;40mS\033[0;5;33;40m;\033[0;35;41m8\033[0;5;33;41m;\033[0;37;43m@\033[0;5;33;41m \033[0;1;30;43m8\033[0;5;33;41mS\033[0;1;30;43m8\033[0;5;31;41mt\033[0;5;31;40m@S\033[0;5;32;40mS\033[0;5;30;40m8\033[0;34;40m .\033[0;32;40m.\033[0;34;40m..:\033[0;31;40m:\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m:\033[0;31;40m:\033[0;5;30;40m8\033[0;5;37;40m8\033[0;1;37;47m;\033[0;5;33;40m@\033[0;31;40mt\033[0;5;30;40m8\033[0;31;40mt\033[0;5;33;40m8\033[0;31;40m8\033[0;5;33;40m8X.8\033[0;31;40m%\033[0;32;40m:\033[0;34;40m..:.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m..\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m:\033[0;32;40m.\033[0;1;30;40mX@\033[0;32;40m \033[0;34;40m
\033[0;31;40m \033[0;34;40m . \033[0;31;40m%\033[0;5;33;40m:%\033[0;1;30;41mX8@88\033[0;5;36;40mt\033[0;5;31;40mX\033[0;34;40mt\033[0;1;30;40m8@\033[0;34;40m .\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40m:\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;32;40mt\033[0;5;33;40mX.8\033[0;5;30;40m@\033[0;32;40mt\033[0;34;40m:\033[0;32;40m:\033[0;31;40mt\033[0;5;30;40m@\033[0;5;32;40mS\033[0;1;30;43mt\033[0;5;37;40m8\033[0;1;37;47mt\033[0;5;37;40m8\033[0;5;32;40mX\033[0;31;40m.\033[0;34;40m .\033[0;32;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40m..\033[0;32;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m:..\033[0;5;30;40m8\033[0;1;30;40mS\033[0;32;40m \033[0;34;40m
\033[0;31;40m \033[0;34;40m \033[0;32;40m .\033[0;34;40m.\033[0;1;30;40m@\033[0;5;31;40mX\033[0;31;40m::\033[0;34;40m:\033[0;1;30;40m8\033[0;5;35;40m%\033[0;32;40m..\033[0;5;30;40m8\033[0;1;30;40m8\033[0;34;40m .\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m:\033[0;31;40m:\033[0;32;40m:\033[0;31;40m;\033[0;34;40m:\033[0;31;40mt\033[0;1;30;40m8@\033[0;32;40mX\033[0;5;33;40m::\033[0;31;40m:\033[0;32;40m;\033[0;5;30;40m8\033[0;5;31;40mS\033[0;31;40m;\033[0;1;30;40m8\033[0;1;30;43mS\033[0;5;33;40m@S\033[0;1;30;40m@\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m..\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.:\033[0;32;40m;\033[0;1;30;40m8\033[0;5;30;40m8\033[0;1;30;40m8\033[0;32;40mt\033[0;34;40m:\033[0;31;40m.\033[0;32;40m \033[0;34;40m
\033[0;32;40m \033[0;34;40m :\033[0;5;32;40mS\033[0;34;40m:\033[0;31;40mt\033[0;32;40m%\033[0;5;31;40mS\033[0;1;30;40m8\033[0;32;40m. \033[0;5;30;40mX\033[0;1;30;40mX\033[0;34;40m \033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40mt\033[0;1;30;40m88\033[0;5;31;40m@\033[0;32;40m@\033[0;31;40mt\033[0;32;40m%\033[0;1;30;40m8\033[0;5;32;40mX\033[0;5;33;40m8\033[0;5;30;40m8\033[0;1;30;40m@\033[0;31;40m::\033[0;32;40m..;\033[0;31;40mtt\033[0;5;33;40m%X\033[0;31;40m%\033[0;32;40mt\033[0;31;40m;\033[0;34;40m:\033[0;32;40m:\033[0;31;40m:\033[0;32;40m:\033[0;31;40m;\033[0;1;30;40m@\033[0;32;40mX\033[0;5;30;40m8\033[0;1;30;40m8\033[0;5;33;40m8\033[0;5;32;40mX\033[0;5;33;40m88XX%\033[0;1;30;40m8\033[0;31;40m:\033[0;1;30;40m@\033[0;5;30;40mX\033[0;31;40m;\033[0;32;40m.\033[0;34;40m.\033[0;32;40m \033[0;34;40m
\033[0;32;40m \033[0;34;40m \033[0;32;40m \033[0;31;40m:\033[0;5;30;40mX\033[0;1;30;40m@\033[0;5;31;40mS\033[0;32;40mS\033[0;5;31;40m@\033[0;1;30;40m@\033[0;32;40m. \033[0;5;33;40m%\033[0;34;40mt\033[0;32;40m.:\033[0;31;40m:;\033[0;1;30;40m8\033[0;31;40m@\033[0;1;30;42m8\033[0;31;40mX\033[0;1;30;40m8\033[0;32;40mt\033[0;1;30;40m8\033[0;5;30;40m@\033[0;31;40mS\033[0;1;30;40m@\033[0;5;33;40mX:\033[0;1;30;40m8\033[0;32;40mt\033[0;5;30;40m8\033[0;34;40m.\033[0;31;40m:\033[0;32;40m:\033[0;34;40m:\033[0;32;40m:\033[0;31;40m;\033[0;32;40m%\033[0;1;30;40mX\033[0;31;40m;\033[0;32;40mS\033[0;5;33;40m%%@ttS8888\033[0;31;40m;\033[0;32;40mS\033[0;31;40m%\033[0;5;30;40m8\033[0;5;33;40m8\033[0;32;40m..\033[0;31;40m \033[0;34;40m;\033[0;5;33;40mt\033[0;34;40m. \033[0;32;40m \033[0;34;40m
\033[0;32;40m \033[0;34;40m ;\033[0;5;30;40mS\033[0;31;40m;\033[0;32;40m%\033[0;31;40mS\033[0;5;32;40mS\033[0;1;30;40m8\033[0;31;40m.\033[0;34;40m;\033[0;5;32;40mS\033[0;31;40m%\033[0;32;40mt\033[0;31;40m%\033[0;34;40m;\033[0;32;40mX\033[0;31;40m8\033[0;32;40m@\033[0;31;40m8\033[0;1;30;40m8\033[0;5;31;40mS\033[0;32;40m:\033[0;31;40m;\033[0;32;40mSt\033[0;31;40m:\033[0;1;30;40mS\033[0;32;40mS\033[0;31;40mt\033[0;34;40m;\033[0;31;40mt\033[0;32;40m%\033[0;31;40mt\033[0;32;40mt\033[0;1;30;40mS\033[0;31;40mt\033[0;1;30;40mXX8\033[0;5;30;40mXX\033[0;5;33;40m8\033[0;5;30;40mS\033[0;1;30;40m8\033[0;1;30;43m8\033[0;5;33;40m88\033[0;5;31;40mS\033[0;32;40m;\033[0;31;40m:\033[0;32;40m%\033[0;1;30;40mX\033[0;34;40m:\033[0;32;40m:\033[0;34;40m \033[0;32;40m \033[0;31;40m .\033[0;32;40mt\033[0;5;35;40mt\033[0;1;30;40m8\033[0;32;40m \033[0;34;40m
\033[0;32;40m.\033[0;31;40m;\033[0;5;30;40m@\033[0;31;40mt\033[0;1;30;40m8\033[0;31;40m;\033[0;1;30;41m8\033[0;5;30;40m8\033[0;5;32;40mS\033[0;1;30;40m8\033[0;31;40m;\033[0;34;40m::\033[0;32;40m:\033[0;31;40m;\033[0;1;30;40m88\033[0;31;40m8\033[0;5;32;40mS\033[0;31;40m@\033[0;32;40m@\033[0;31;40m;\033[0;32;40m;\033[0;31;40mt\033[0;34;40mt\033[0;32;40m:\033[0;34;40m:\033[0;31;40m;\033[0;34;40m;\033[0;32;40mt\033[0;34;40m;\033[0;31;40mt\033[0;34;40mt\033[0;32;40m;\033[0;34;40m:\033[0;32;40m:\033[0;34;40m.\033[0;32;40m.\033[0;31;40m:\033[0;32;40m:\033[0;31;40m;\033[0;32;40m8\033[0;1;30;43mS\033[0;31;40m@\033[0;1;30;40m@\033[0;32;40mS\033[0;5;30;40m8@\033[0;32;40mt\033[0;34;40m:\033[0;31;40m:\033[0;34;40m.\033[0;31;40m.\033[0;34;40m. .\033[0;32;40m.\033[0;34;40m.\033[0;1;30;40m8\033[0;5;30;40mS\033[0;32;40m:\033[0;34;40m.\033[0;32;40m \033[0;34;40m
\033[0;32;40m:\033[0;5;36;40mt\033[0;31;40mt\033[0;32;40mS%\033[0;5;30;40mX\033[0;1;30;40m8\033[0;31;40mS\033[0;5;30;40m8\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m:\033[0;32;40m.\033[0;31;40m;\033[0;32;40mt\033[0;31;40mt\033[0;32;40m@\033[0;31;40m8\033[0;32;40mt\033[0;31;40m:\033[0;34;40m;\033[0;32;40m:\033[0;31;40m:\033[0;34;40m.\033[0;31;40m.\033[0;34;40m:\033[0;32;40m;\033[0;31;40m;\033[0;32;40m;\033[0;34;40m;\033[0;32;40m;\033[0;31;40m;\033[0;34;40m:\033[0;31;40m.\033[0;32;40m.\033[0;31;40m.\033[0;32;40m.\033[0;31;40m.::\033[0;34;40m:\033[0;32;40m%\033[0;31;40m;\033[0;34;40m;\033[0;31;40m;\033[0;32;40m;\033[0;31;40m;\033[0;34;40m:\033[0;31;40m.\033[0;34;40m..\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m;\033[0;1;30;40m8\033[0;32;40m;\033[0;5;35;40mS\033[0;32;40m \033[0;34;40m
.\033[0;31;40m:\033[0;5;30;40mS\033[0;5;31;40mS\033[0;31;43m8\033[0;5;33;40m8\033[0;5;31;40m@\033[0;5;30;40m8\033[0;32;40mS\033[0;5;34;40mS\033[0;31;40m .\033[0;32;40m:\033[0;31;40m%\033[0;32;40m%\033[0;31;40m;\033[0;34;40m:\033[0;31;40m:;\033[0;32;40mt\033[0;34;40m:\033[0;31;40m.\033[0;32;40m.\033[0;34;40m:\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m;\033[0;32;40m:\033[0;34;40m;\033[0;31;40m;\033[0;34;40m:\033[0;32;40m:\033[0;31;40m.\033[0;34;40m..\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m..\033[0;31;40m:\033[0;34;40m;\033[0;32;40mt\033[0;31;40mt\033[0;32;40mt\033[0;34;40m;\033[0;31;40m;\033[0;32;40m:\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m:\033[0;31;40mt\033[0;32;40mt\033[0;5;30;40mX\033[0;32;40m:\033[0;34;40m \033[0;32;40m \033[0;34;40m
\033[0;32;40m :\033[0;5;31;40m%\033[0;5;32;40mS\033[0;31;40mX\033[0;32;40mt\033[0;1;30;40m8\033[0;5;31;40mS\033[0;5;30;40m8\033[0;5;33;40m%\033[0;5;30;40m8\033[0;31;40m.\033[0;32;40m;\033[0;1;30;40mX\033[0;32;40m;\033[0;31;40m:\033[0;34;40m:\033[0;32;40m:.\033[0;34;40m:\033[0;31;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m:\033[0;34;40m:\033[0;32;40m::\033[0;31;40m:\033[0;34;40m:\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m:\033[0;34;40m;\033[0;32;40m:\033[0;31;40m;\033[0;34;40m:\033[0;32;40m:\033[0;34;40m:\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m:\033[0;31;40mt\033[0;5;30;40m8\033[0;1;30;40m88\033[0;31;40m \033[0;34;40m
\033[0;32;40m \033[0;31;40m:\033[0;5;33;40m \033[0;1;30;40m8\033[0;31;40m;\033[0;32;40mX\033[0;5;31;40m8\033[0;5;30;40m8\033[0;1;30;43m8\033[0;5;33;40m88\033[0;1;30;40mX\033[0;31;40m;\033[0;32;40m:\033[0;31;40m:\033[0;34;40m:.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m...\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m:\033[0;32;40m;\033[0;34;40mt\033[0;32;40mt\033[0;31;40mt\033[0;32;40m:\033[0;34;40m:\033[0;32;40m:\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;32;40m:\033[0;34;40m;\033[0;1;30;40m8\033[0;31;40mS\033[0;5;36;40mX\033[0;31;40m \033[0;34;40m
\033[0;1;30;40m8\033[0;5;33;40m.\033[0;32;40mS\033[0;5;31;40m@\033[0;1;30;40m8\033[0;32;40m8\033[0;5;31;40m8\033[0;5;33;40mX\033[0;31;40mt\033[0;5;30;40mX\033[0;32;40m.\033[0;34;40m:\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m..\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m:\033[0;31;40m;\033[0;32;40m;\033[0;31;40m;\033[0;34;40m;\033[0;32;40m:\033[0;31;40m;\033[0;34;40m:\033[0;32;40m:\033[0;34;40m.\033[0;31;40m.\033[0;34;40m..\033[0;31;40m.\033[0;32;40m:\033[0;31;40mX\033[0;32;40m%\033[0;5;31;40m@\033[0;34;40m.\033[0;32;40m \033[0;31;40m \033[0;34;40m
\033[0;32;40m t\033[0;5;33;40mt\033[0;5;35;40m;\033[0;5;33;40m8;%t :.\033[0;31;40m;\033[0;32;40m:\033[0;34;40m:\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m..\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m:\033[0;34;40m::\033[0;32;40m:\033[0;34;40m:\033[0;32;40m:\033[0;31;40m:\033[0;34;40m:\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m:\033[0;1;30;40mS\033[0;34;40m:\033[0;5;30;40m8\033[0;1;30;40m8\033[0;32;40m \033[0;31;40m \033[0;32;40m \033[0;34;40m
\033[0;32;40m \033[0;31;40m.\033[0;34;40m.\033[0;31;40m ;\033[0;5;36;40mt\033[0;5;33;40m:\033[0;1;37;47m;\033[0;1;30;47m@\033[0;5;33;40m8\033[0;34;40m.\033[0;5;30;40mS\033[0;34;40m;\033[0;32;40m:\033[0;31;40m.\033[0;34;40m..\033[0;31;40m.\033[0;34;40m..\033[0;31;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m:\033[0;31;40m:\033[0;32;40m.\033[0;31;40m:\033[0;34;40m.\033[0;31;40m:\033[0;34;40m:\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40mt\033[0;5;30;40mX\033[0;32;40m \033[0;34;40m
\033[0;31;40m \033[0;34;40m \033[0;31;40m:\033[0;1;30;40m8\033[0;5;31;40m@\033[0;32;40mt\033[0;31;40m.;\033[0;5;32;40mS\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m..\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m:\033[0;34;40m:\033[0;31;40m:\033[0;32;40m:\033[0;34;40m:\033[0;32;40m:\033[0;31;40m:\033[0;34;40m:\033[0;32;40m:\033[0;34;40m:\033[0;32;40m:\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m:\033[0;31;40m;\033[0;5;36;40m%\033[0;31;40m.\033[0;32;40m \033[0;34;40m
\033[0;31;40m \033[0;34;40m \033[0;31;40m \033[0;32;40m \033[0;31;40m..\033[0;5;30;40mS\033[0;31;40mt\033[0;34;40m:.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m..\033[0;32;40m..\033[0;31;40m:\033[0;34;40m:\033[0;32;40m;\033[0;34;40m:\033[0;31;40m;\033[0;32;40m:\033[0;34;40m:\033[0;32;40m:\033[0;31;40m:\033[0;34;40m:\033[0;32;40m:\033[0;31;40m:\033[0;34;40m:\033[0;32;40m:\033[0;31;40m:\033[0;34;40m:\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;1;30;40m8@\033[0;31;40m \033[0;32;40m \033[0;34;40m
\033[0;31;40m \033[0;32;40m \033[0;5;30;40m88\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m..\033[0;32;40m:\033[0;31;40m:\033[0;34;40m:\033[0;32;40m:\033[0;34;40m:\033[0;31;40m:\033[0;32;40m:\033[0;34;40m:\033[0;32;40m:\033[0;31;40m.\033[0;34;40m.\033[0;32;40m:\033[0;31;40m:\033[0;34;40m:\033[0;32;40m:\033[0;34;40m:\033[0;32;40m:\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;32;40m.\033[0;31;40m.\033[0;34;40m.\033[0;31;40m;\033[0;5;30;40mS\033[0;32;40m \033[0;34;40m
"""
def cmd_frogarian(output):
output.writeln(frogarian)
| 686.182796
| 1,013
| 0.652354
| 18,759
| 63,815
| 2.219148
| 0.00741
| 0.395301
| 0.286195
| 0.103413
| 0.986308
| 0.983257
| 0.980158
| 0.967787
| 0.934709
| 0.877441
| 0
| 0.584033
| 0.019964
| 63,815
| 92
| 1,014
| 693.641304
| 0.081594
| 0
| 0
| 0
| 0
| 0.955056
| 0.998778
| 0.937098
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011236
| false
| 0
| 0
| 0
| 0.011236
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 15
|
096fff60e3d214645c0f8a31772f52702b1ec93d
| 8,746
|
py
|
Python
|
firebot/modules/fire.py
|
vikas04599/Fire-X
|
527d57c29785f36e1b07aa739f3ac7c969cc916c
|
[
"MIT"
] | 20
|
2021-08-16T18:49:49.000Z
|
2022-02-23T08:35:57.000Z
|
firebot/modules/fire.py
|
elizamusic/Fire-X
|
1ec6ba73b3033ad03e7859fcf6917fc6aff89efc
|
[
"MIT"
] | null | null | null |
firebot/modules/fire.py
|
elizamusic/Fire-X
|
1ec6ba73b3033ad03e7859fcf6917fc6aff89efc
|
[
"MIT"
] | 93
|
2021-08-16T19:16:20.000Z
|
2022-03-31T02:02:06.000Z
|
import requests
from firebot import CMD_HELP
from firebot.Configs import Config
from firebot.utils import admin_cmd
@fire.on(admin_cmd(pattern="fpl"))
async def _(event):
if event.fwd_from:
return
if Config.FOOTBALL_API_KEY is None:
await event.edit(
"Need to get an API key from https://rapidapi.com/api-sports/api/api-football-beta\nModule stopping!"
)
return
appo = Config.FOOTBALL_API_KEY
url = "https://api-football-beta.p.rapidapi.com/standings"
querystring = {"season": "2020", "league": "39"}
headers = {
"x-rapidapi-key": appo,
"x-rapidapi-host": "api-football-beta.p.rapidapi.com",
}
response = requests.request("GET", url, headers=headers, params=querystring)
a = response.json()
b = a.get("response")
c = b[0]
d = c.get("league")
e = d.get("name")
f = d.get("country")
logo = d.get("logo")
season = d.get("season")
g = d.get("standings")
h = g[0]
i = h[0]
rank = i.get("rank")
k = i.get("team")
nomo = k.get("name")
pont = i.get("points")
kk = i.get("all")
pl = kk.get("played")
wein = kk.get("win")
yqw = kk.get("draw")
pol = kk.get("lose")
nex = h[1]
new = nex.get("rank")
np = nex.get("team")
nee = np.get("name")
popo = nex.get("points")
oloq = nex.get("all")
oloq.get("played")
wein1 = oloq.get("win")
yqw1 = oloq.get("draw")
pol1 = oloq.get("lose")
nex2 = h[2]
new2 = nex2.get("rank")
np2 = nex2.get("team")
nee2 = np2.get("name")
popo2 = nex2.get("points")
oloq2 = nex2.get("all")
oloq2.get("played")
wein2 = oloq2.get("win")
yqw2 = oloq2.get("draw")
pol2 = oloq2.get("lose")
nex3 = h[3]
new3 = nex3.get("rank")
np3 = nex3.get("team")
nee3 = np3.get("name")
popo3 = nex3.get("points")
oloq3 = nex3.get("all")
oloq3.get("played")
wein3 = oloq3.get("win")
yqw3 = oloq3.get("draw")
pol3 = oloq3.get("lose")
nex4 = h[4]
new4 = nex4.get("rank")
np4 = nex4.get("team")
nee4 = np4.get("name")
popo4 = nex4.get("points")
oloq4 = nex4.get("all")
oloq4.get("played")
wein4 = oloq4.get("win")
yqw4 = oloq4.get("draw")
pol4 = oloq4.get("lose")
caption = f"""<b>{e}</b>
<b>Country:- {f}
season = {season}
Standings
Rank:- {rank}
Name:- {nomo}
points:- {pont}
Played:- {pl}
win:- {wein}
Draw:- {yqw}
Lose:- {pol}
Rank:- {new}
Name:- {nee}
points:- {popo}
Win:- {wein1}
Draw:- {yqw1}
Lose:- {pol1}
Rank:- {new2}
Name:- {nee2}
points:- {popo2}
Win:- {wein2}
Draw:- {yqw2}
Lose:- {pol2}
Rank:- {new3}
Name:- {nee3}
points:- {popo3}
Win:- {wein3}
Draw:- {yqw3}
Lose:- {pol3}
Rank:- {new4}
Name:- {nee4}
points:- {popo4}
Win:- {wein4}
Draw:- {yqw4}
Lose:- {pol4}</b>
"""
await borg.send_message(
event.chat_id,
caption,
parse_mode="HTML",
file=logo,
force_document=False,
silent=True,
)
await event.delete()
@fire.on(admin_cmd(pattern="ffl"))
async def _(event):
if event.fwd_from:
return
if Config.FOOTBALL_API_KEY is None:
await event.edit(
"Need to get an API key from https://rapidapi.com/api-sports/api/api-football-beta\nModule stopping!"
)
return
appo = Config.FOOTBALL_API_KEY
url = "https://api-football-beta.p.rapidapi.com/standings"
querystring = {"season": "2020", "league": "61"}
headers = {
"x-rapidapi-key": appo,
"x-rapidapi-host": "api-football-beta.p.rapidapi.com",
}
response = requests.request("GET", url, headers=headers, params=querystring)
a = response.json()
b = a.get("response")
c = b[0]
d = c.get("league")
e = d.get("name")
f = d.get("country")
logo = d.get("logo")
season = d.get("season")
g = d.get("standings")
h = g[0]
i = h[0]
rank = i.get("rank")
k = i.get("team")
nomo = k.get("name")
pont = i.get("points")
kk = i.get("all")
pl = kk.get("played")
wein = kk.get("win")
yqw = kk.get("draw")
pol = kk.get("lose")
nex = h[1]
new = nex.get("rank")
np = nex.get("team")
nee = np.get("name")
popo = nex.get("points")
oloq = nex.get("all")
oloq.get("played")
wein1 = oloq.get("win")
yqw1 = oloq.get("draw")
pol1 = oloq.get("lose")
nex2 = h[2]
new2 = nex2.get("rank")
np2 = nex2.get("team")
nee2 = np2.get("name")
popo2 = nex2.get("points")
oloq2 = nex2.get("all")
oloq2.get("played")
wein2 = oloq2.get("win")
yqw2 = oloq2.get("draw")
pol2 = oloq2.get("lose")
nex3 = h[3]
new3 = nex3.get("rank")
np3 = nex3.get("team")
nee3 = np3.get("name")
popo3 = nex3.get("points")
oloq3 = nex3.get("all")
oloq3.get("played")
wein3 = oloq3.get("win")
yqw3 = oloq3.get("draw")
pol3 = oloq3.get("lose")
nex4 = h[4]
new4 = nex4.get("rank")
np4 = nex4.get("team")
nee4 = np4.get("name")
popo4 = nex4.get("points")
oloq4 = nex4.get("all")
oloq4.get("played")
wein4 = oloq4.get("win")
yqw4 = oloq4.get("draw")
pol4 = oloq4.get("lose")
caption = f"""<b>{e}</b>
<b>Country:- {f}
season = {season}
Standings
Rank:- {rank}
Name:- {nomo}
points:- {pont}
Played:- {pl}
win:- {wein}
Draw:- {yqw}
Lose:- {pol}
Rank:- {new}
Name:- {nee}
points:- {popo}
Win:- {wein1}
Draw:- {yqw1}
Lose:- {pol1}
Rank:- {new2}
Name:- {nee2}
points:- {popo2}
Win:- {wein2}
Draw:- {yqw2}
Lose:- {pol2}
Rank:- {new3}
Name:- {nee3}
points:- {popo3}
Win:- {wein3}
Draw:- {yqw3}
Lose:- {pol3}
Rank:- {new4}
Name:- {nee4}
points:- {popo4}
Win:- {wein4}
Draw:- {yqw4}
Lose:- {pol4}</b>
"""
await borg.send_message(
event.chat_id,
caption,
parse_mode="HTML",
file=logo,
force_document=False,
silent=True,
)
await event.delete()
@fire.on(admin_cmd(pattern="fuefa$"))
async def _(event):
if event.fwd_from:
return
if Config.FOOTBALL_API_KEY is None:
await event.edit(
"Need to get an API key from https://rapidapi.com/api-sports/api/api-football-beta\nModule stopping!"
)
return
appo = Config.FOOTBALL_API_KEY
url = "https://api-football-beta.p.rapidapi.com/standings"
querystring = {"season": "2020", "league": "2"}
headers = {
"x-rapidapi-key": appo,
"x-rapidapi-host": "api-football-beta.p.rapidapi.com",
}
response = requests.request("GET", url, headers=headers, params=querystring)
a = response.json()
b = a.get("response")
c = b[0]
d = c.get("league")
e = d.get("name")
f = d.get("country")
logo = d.get("logo")
season = d.get("season")
g = d.get("standings")
h = g[0]
i = h[0]
rank = i.get("rank")
k = i.get("team")
nomo = k.get("name")
pont = i.get("points")
kk = i.get("all")
pl = kk.get("played")
wein = kk.get("win")
yqw = kk.get("draw")
pol = kk.get("lose")
nex = h[1]
new = nex.get("rank")
np = nex.get("team")
nee = np.get("name")
popo = nex.get("points")
oloq = nex.get("all")
oloq.get("played")
wein1 = oloq.get("win")
yqw1 = oloq.get("draw")
pol1 = oloq.get("lose")
nex2 = h[2]
new2 = nex2.get("rank")
np2 = nex2.get("team")
nee2 = np2.get("name")
popo2 = nex2.get("points")
oloq2 = nex2.get("all")
oloq2.get("played")
wein2 = oloq2.get("win")
yqw2 = oloq2.get("draw")
pol2 = oloq2.get("lose")
nex3 = h[3]
new3 = nex3.get("rank")
np3 = nex3.get("team")
nee3 = np3.get("name")
popo3 = nex3.get("points")
oloq3 = nex3.get("all")
oloq3.get("played")
wein3 = oloq3.get("win")
yqw3 = oloq3.get("draw")
pol3 = oloq3.get("lose")
caption = f"""<b>{e}</b>
<b>Country:- {f}
season = {season}
Standings
Rank:- {rank}
Name:- {nomo}
points:- {pont}
Played:- {pl}
win:- {wein}
Draw:- {yqw}
Lose:- {pol}
Rank:- {new}
Name:- {nee}
points:- {popo}
Win:- {wein1}
Draw:- {yqw1}
Lose:- {pol1}
Rank:- {new2}
Name:- {nee2}
points:- {popo2}
Win:- {wein2}
Draw:- {yqw2}
Lose:- {pol2}
Rank:- {new3}
Name:- {nee3}
points:- {popo3}
Win:- {wein3}
Draw:- {yqw3}
Lose:- {pol3}
"""
await borg.send_message(
event.chat_id,
caption,
parse_mode="HTML",
file=logo,
force_document=False,
silent=True,
)
await event.delete()
CMD_HELP.update(
{
"football": "**Football**\
\n\n**Syntax : **`.fpl`\
\n**Usage :** Shows Premier League's Standings.\
\n\n**Syntax : **`.ffl`\
\n**Usage :** Shows French Ligue1 Standings.\
\n\n**Syntax : **`.fuefa`\
\n**Usage :** Shows UEFA championship Standings."
}
)
| 21.974874
| 113
| 0.553968
| 1,227
| 8,746
| 3.919315
| 0.124694
| 0.024745
| 0.028072
| 0.024953
| 0.941776
| 0.937409
| 0.937409
| 0.937409
| 0.937409
| 0.937409
| 0
| 0.037434
| 0.242511
| 8,746
| 397
| 114
| 22.030227
| 0.688453
| 0
| 0
| 0.917808
| 0
| 0.008219
| 0.320832
| 0.010976
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.010959
| 0
| 0.027397
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
097bd776de1c31fc8cf95962fd3af688cc33c8a4
| 5,944
|
py
|
Python
|
tests/gpsd_fake.py
|
tfeldmann/gpsdclient
|
11d9feddfb6d8e7ece71daaa02c4c8d16218dfb6
|
[
"MIT"
] | 12
|
2021-08-13T16:31:43.000Z
|
2022-02-15T19:45:33.000Z
|
tests/gpsd_fake.py
|
tfeldmann/python-gpsdclient
|
11d9feddfb6d8e7ece71daaa02c4c8d16218dfb6
|
[
"MIT"
] | null | null | null |
tests/gpsd_fake.py
|
tfeldmann/python-gpsdclient
|
11d9feddfb6d8e7ece71daaa02c4c8d16218dfb6
|
[
"MIT"
] | 3
|
2021-08-19T17:50:18.000Z
|
2021-08-23T19:44:06.000Z
|
import time
import socket
import threading
VERSION_HEADER = b'{"class":"VERSION","release":"3.17","rev":"3.17","proto_major":3,"proto_minor":12}\n'
WATCH_COMMAND = b'?WATCH={"enable":true,"json":true}\n'
GPSD_OUTPUT = """
{"class":"DEVICES","devices":[{"class":"DEVICE","path":"/dev/ttyO4","driver":"NMEA0183","activated":"2021-08-13T09:12:40.028Z","flags":1,"native":0,"bps":9600,"parity":"N","stopbits":1,"cycle":1.00}]}
{"class":"WATCH","enable":true,"json":true,"nmea":false,"raw":0,"scaled":false,"timing":false,"split24":false,"pps":false}
{"class":"SKY","device":"/dev/ttyO4","xdop":0.54,"ydop":0.77,"vdop":0.85,"tdop":1.00,"hdop":0.89,"gdop":2.12,"pdop":1.23,"satellites":[{"PRN":27,"el":84,"az":141,"ss":0,"used":false},{"PRN":8,"el":60,"az":294,"ss":16,"used":true},{"PRN":10,"el":60,"az":109,"ss":16,"used":true},{"PRN":23,"el":40,"az":59,"ss":17,"used":false},{"PRN":16,"el":33,"az":188,"ss":26,"used":true},{"PRN":21,"el":28,"az":256,"ss":16,"used":false},{"PRN":18,"el":12,"az":69,"ss":26,"used":true},{"PRN":7,"el":9,"az":288,"ss":0,"used":false},{"PRN":30,"el":9,"az":321,"ss":32,"used":true},{"PRN":15,"el":8,"az":28,"ss":0,"used":false},{"PRN":26,"el":6,"az":175,"ss":0,"used":false},{"PRN":1,"el":3,"az":251,"ss":0,"used":false},{"PRN":32,"el":2,"az":133,"ss":0,"used":false},{"PRN":13,"el":1,"az":2,"ss":0,"used":false},{"PRN":138,"el":0,"az":0,"ss":0,"used":false},{"PRN":83,"el":66,"az":321,"ss":0,"used":false},{"PRN":82,"el":50,"az":68,"ss":0,"used":false},{"PRN":67,"el":43,"az":98,"ss":19,"used":true},{"PRN":73,"el":35,"az":261,"ss":17,"used":true},{"PRN":74,"el":29,"az":320,"ss":21,"used":true},{"PRN":66,"el":27,"az":33,"ss":30,"used":true},{"PRN":68,"el":17,"az":150,"ss":0,"used":false},{"PRN":84,"el":12,"az":279,"ss":0,"used":false},{"PRN":80,"el":11,"az":215,"ss":0,"used":false},{"PRN":81,"el":5,"az":88,"ss":0,"used":false}]}
{"class":"TPV","device":"/dev/ttyO4","mode":3,"ept":0.005,"lat":51.813280233,"lon":6.550214200,"alt":30.393,"epx":8.171,"epy":11.499,"epv":19.550,"track":12.4500,"speed":0.000,"climb":0.000,"eps":23.00,"epc":39.10}
{"class":"SKY","device":"/dev/ttyO4","xdop":0.54,"ydop":0.77,"vdop":0.85,"tdop":1.00,"hdop":0.89,"gdop":2.12,"pdop":1.23,"satellites":[{"PRN":27,"el":84,"az":141,"ss":0,"used":false},{"PRN":8,"el":60,"az":294,"ss":16,"used":true},{"PRN":10,"el":60,"az":109,"ss":16,"used":true},{"PRN":23,"el":40,"az":59,"ss":17,"used":false},{"PRN":16,"el":33,"az":188,"ss":26,"used":true},{"PRN":21,"el":28,"az":256,"ss":16,"used":false},{"PRN":18,"el":12,"az":69,"ss":26,"used":true},{"PRN":7,"el":9,"az":288,"ss":0,"used":false},{"PRN":30,"el":9,"az":321,"ss":33,"used":true},{"PRN":15,"el":8,"az":28,"ss":0,"used":false},{"PRN":26,"el":6,"az":175,"ss":0,"used":false},{"PRN":1,"el":3,"az":251,"ss":0,"used":false},{"PRN":32,"el":2,"az":133,"ss":0,"used":false},{"PRN":13,"el":1,"az":2,"ss":0,"used":false},{"PRN":138,"el":0,"az":0,"ss":0,"used":false},{"PRN":83,"el":66,"az":321,"ss":0,"used":false},{"PRN":82,"el":50,"az":68,"ss":0,"used":false},{"PRN":67,"el":43,"az":98,"ss":19,"used":true},{"PRN":73,"el":35,"az":261,"ss":16,"used":true},{"PRN":74,"el":29,"az":320,"ss":21,"used":true},{"PRN":66,"el":27,"az":33,"ss":30,"used":true},{"PRN":68,"el":17,"az":150,"ss":0,"used":false},{"PRN":84,"el":12,"az":279,"ss":0,"used":false},{"PRN":80,"el":11,"az":215,"ss":0,"used":false},{"PRN":81,"el":5,"az":88,"ss":0,"used":false}]}
{"class":"TPV","device":"/dev/ttyO4","mode":3,"time":"2021-08-13T09:12:41.000Z","ept":0.005,"lat":51.813280233,"lon":6.550214200,"alt":30.393,"epx":8.171,"epy":11.499,"epv":19.550,"track":12.4500,"speed":0.000,"climb":0.000,"eps":23.00,"epc":39.10}
{"class":"SKY","device":"/dev/ttyO4","xdop":0.54,"ydop":0.77,"vdop":0.85,"tdop":1.00,"hdop":0.89,"gdop":2.12,"pdop":1.22,"satellites":[{"PRN":27,"el":84,"az":141,"ss":0,"used":false},{"PRN":8,"el":60,"az":294,"ss":16,"used":true},{"PRN":10,"el":60,"az":109,"ss":17,"used":true},{"PRN":23,"el":40,"az":59,"ss":17,"used":false},{"PRN":16,"el":33,"az":188,"ss":26,"used":true},{"PRN":21,"el":28,"az":256,"ss":16,"used":false},{"PRN":18,"el":12,"az":69,"ss":26,"used":true},{"PRN":7,"el":9,"az":288,"ss":0,"used":false},{"PRN":30,"el":9,"az":321,"ss":33,"used":true},{"PRN":15,"el":8,"az":28,"ss":0,"used":false},{"PRN":26,"el":6,"az":175,"ss":0,"used":false},{"PRN":1,"el":3,"az":251,"ss":0,"used":false},{"PRN":32,"el":2,"az":133,"ss":0,"used":false},{"PRN":13,"el":1,"az":2,"ss":0,"used":false},{"PRN":138,"el":0,"az":0,"ss":0,"used":false},{"PRN":83,"el":66,"az":321,"ss":0,"used":false},{"PRN":82,"el":50,"az":68,"ss":0,"used":false},{"PRN":67,"el":43,"az":98,"ss":19,"used":true},{"PRN":73,"el":35,"az":261,"ss":15,"used":true},{"PRN":74,"el":29,"az":320,"ss":21,"used":true},{"PRN":66,"el":27,"az":33,"ss":30,"used":true},{"PRN":68,"el":17,"az":150,"ss":0,"used":false},{"PRN":84,"el":12,"az":279,"ss":0,"used":false},{"PRN":80,"el":11,"az":215,"ss":0,"used":false},{"PRN":81,"el":5,"az":88,"ss":0,"used":false}]}
{"class":"TPV","device":"/dev/ttyO4","mode":3,"time":"2021-08-13T09:12:42.000Z","ept":0.005,"lat":51.813280233,"lon":6.550214200,"alt":30.393,"epx":8.171,"epy":11.499,"epv":19.550,"track":12.4500,"speed":0.000,"climb":0.000,"eps":23.00,"epc":39.10}
"""
def fake_gpsd_server():
addr = ("127.0.0.1", 2947)
if hasattr(socket, "create_server"):
sock = socket.create_server(address=addr, reuse_port=True)
else:
sock = socket.socket()
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
sock.bind(addr)
sock.listen(1)
client, _ = sock.accept()
client.send(VERSION_HEADER)
if client.recv(100) == WATCH_COMMAND:
n = 120
chunks = [GPSD_OUTPUT[i : i + n] for i in range(0, len(GPSD_OUTPUT), n)]
for chunk in chunks:
client.send(chunk.encode("utf-8"))
time.sleep(0.01)
if __name__ == "__main__":
server = threading.Thread(target=fake_gpsd_server)
server.start()
| 144.97561
| 1,316
| 0.571332
| 1,156
| 5,944
| 2.91436
| 0.172145
| 0.128228
| 0.160285
| 0.149599
| 0.777679
| 0.762244
| 0.762244
| 0.762244
| 0.762244
| 0.762244
| 0
| 0.163586
| 0.03533
| 5,944
| 40
| 1,317
| 148.6
| 0.423962
| 0
| 0
| 0
| 0
| 0.257143
| 0.865411
| 0.858008
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028571
| false
| 0
| 0.085714
| 0
| 0.114286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
09e5e4fba0a0e7bc33cec446742f0c7a59664689
| 21,091
|
py
|
Python
|
sdk/python/pulumi_aws/iot/authorizer.py
|
rapzo/pulumi-aws
|
390a098221315d98a54ba97d1559e750dc3053b7
|
[
"ECL-2.0",
"Apache-2.0"
] | 260
|
2018-06-18T14:57:00.000Z
|
2022-03-29T11:41:03.000Z
|
sdk/python/pulumi_aws/iot/authorizer.py
|
rapzo/pulumi-aws
|
390a098221315d98a54ba97d1559e750dc3053b7
|
[
"ECL-2.0",
"Apache-2.0"
] | 1,154
|
2018-06-19T20:38:20.000Z
|
2022-03-31T19:48:16.000Z
|
sdk/python/pulumi_aws/iot/authorizer.py
|
rapzo/pulumi-aws
|
390a098221315d98a54ba97d1559e750dc3053b7
|
[
"ECL-2.0",
"Apache-2.0"
] | 115
|
2018-06-28T03:20:27.000Z
|
2022-03-29T11:41:06.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['AuthorizerArgs', 'Authorizer']
@pulumi.input_type
class AuthorizerArgs:
def __init__(__self__, *,
authorizer_function_arn: pulumi.Input[str],
name: Optional[pulumi.Input[str]] = None,
signing_disabled: Optional[pulumi.Input[bool]] = None,
status: Optional[pulumi.Input[str]] = None,
token_key_name: Optional[pulumi.Input[str]] = None,
token_signing_public_keys: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a Authorizer resource.
:param pulumi.Input[str] authorizer_function_arn: The ARN of the authorizer's Lambda function.
:param pulumi.Input[str] name: The name of the authorizer.
:param pulumi.Input[bool] signing_disabled: Specifies whether AWS IoT validates the token signature in an authorization request. Default: `false`.
:param pulumi.Input[str] status: The status of Authorizer request at creation. Valid values: `ACTIVE`, `INACTIVE`. Default: `ACTIVE`.
:param pulumi.Input[str] token_key_name: The name of the token key used to extract the token from the HTTP headers. This value is required if signing is enabled in your authorizer.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] token_signing_public_keys: The public keys used to verify the digital signature returned by your custom authentication service. This value is required if signing is enabled in your authorizer.
"""
pulumi.set(__self__, "authorizer_function_arn", authorizer_function_arn)
if name is not None:
pulumi.set(__self__, "name", name)
if signing_disabled is not None:
pulumi.set(__self__, "signing_disabled", signing_disabled)
if status is not None:
pulumi.set(__self__, "status", status)
if token_key_name is not None:
pulumi.set(__self__, "token_key_name", token_key_name)
if token_signing_public_keys is not None:
pulumi.set(__self__, "token_signing_public_keys", token_signing_public_keys)
@property
@pulumi.getter(name="authorizerFunctionArn")
def authorizer_function_arn(self) -> pulumi.Input[str]:
"""
The ARN of the authorizer's Lambda function.
"""
return pulumi.get(self, "authorizer_function_arn")
@authorizer_function_arn.setter
def authorizer_function_arn(self, value: pulumi.Input[str]):
pulumi.set(self, "authorizer_function_arn", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the authorizer.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="signingDisabled")
def signing_disabled(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether AWS IoT validates the token signature in an authorization request. Default: `false`.
"""
return pulumi.get(self, "signing_disabled")
@signing_disabled.setter
def signing_disabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "signing_disabled", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
The status of Authorizer request at creation. Valid values: `ACTIVE`, `INACTIVE`. Default: `ACTIVE`.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter(name="tokenKeyName")
def token_key_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the token key used to extract the token from the HTTP headers. This value is required if signing is enabled in your authorizer.
"""
return pulumi.get(self, "token_key_name")
@token_key_name.setter
def token_key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "token_key_name", value)
@property
@pulumi.getter(name="tokenSigningPublicKeys")
def token_signing_public_keys(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
The public keys used to verify the digital signature returned by your custom authentication service. This value is required if signing is enabled in your authorizer.
"""
return pulumi.get(self, "token_signing_public_keys")
@token_signing_public_keys.setter
def token_signing_public_keys(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "token_signing_public_keys", value)
@pulumi.input_type
class _AuthorizerState:
def __init__(__self__, *,
arn: Optional[pulumi.Input[str]] = None,
authorizer_function_arn: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
signing_disabled: Optional[pulumi.Input[bool]] = None,
status: Optional[pulumi.Input[str]] = None,
token_key_name: Optional[pulumi.Input[str]] = None,
token_signing_public_keys: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering Authorizer resources.
:param pulumi.Input[str] arn: The ARN of the authorizer.
:param pulumi.Input[str] authorizer_function_arn: The ARN of the authorizer's Lambda function.
:param pulumi.Input[str] name: The name of the authorizer.
:param pulumi.Input[bool] signing_disabled: Specifies whether AWS IoT validates the token signature in an authorization request. Default: `false`.
:param pulumi.Input[str] status: The status of Authorizer request at creation. Valid values: `ACTIVE`, `INACTIVE`. Default: `ACTIVE`.
:param pulumi.Input[str] token_key_name: The name of the token key used to extract the token from the HTTP headers. This value is required if signing is enabled in your authorizer.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] token_signing_public_keys: The public keys used to verify the digital signature returned by your custom authentication service. This value is required if signing is enabled in your authorizer.
"""
if arn is not None:
pulumi.set(__self__, "arn", arn)
if authorizer_function_arn is not None:
pulumi.set(__self__, "authorizer_function_arn", authorizer_function_arn)
if name is not None:
pulumi.set(__self__, "name", name)
if signing_disabled is not None:
pulumi.set(__self__, "signing_disabled", signing_disabled)
if status is not None:
pulumi.set(__self__, "status", status)
if token_key_name is not None:
pulumi.set(__self__, "token_key_name", token_key_name)
if token_signing_public_keys is not None:
pulumi.set(__self__, "token_signing_public_keys", token_signing_public_keys)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
"""
The ARN of the authorizer.
"""
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter(name="authorizerFunctionArn")
def authorizer_function_arn(self) -> Optional[pulumi.Input[str]]:
"""
The ARN of the authorizer's Lambda function.
"""
return pulumi.get(self, "authorizer_function_arn")
@authorizer_function_arn.setter
def authorizer_function_arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "authorizer_function_arn", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the authorizer.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="signingDisabled")
def signing_disabled(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether AWS IoT validates the token signature in an authorization request. Default: `false`.
"""
return pulumi.get(self, "signing_disabled")
@signing_disabled.setter
def signing_disabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "signing_disabled", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
The status of Authorizer request at creation. Valid values: `ACTIVE`, `INACTIVE`. Default: `ACTIVE`.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter(name="tokenKeyName")
def token_key_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the token key used to extract the token from the HTTP headers. This value is required if signing is enabled in your authorizer.
"""
return pulumi.get(self, "token_key_name")
@token_key_name.setter
def token_key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "token_key_name", value)
@property
@pulumi.getter(name="tokenSigningPublicKeys")
def token_signing_public_keys(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
The public keys used to verify the digital signature returned by your custom authentication service. This value is required if signing is enabled in your authorizer.
"""
return pulumi.get(self, "token_signing_public_keys")
@token_signing_public_keys.setter
def token_signing_public_keys(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "token_signing_public_keys", value)
class Authorizer(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
authorizer_function_arn: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
signing_disabled: Optional[pulumi.Input[bool]] = None,
status: Optional[pulumi.Input[str]] = None,
token_key_name: Optional[pulumi.Input[str]] = None,
token_signing_public_keys: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Creates and manages an AWS IoT Authorizer.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
example = aws.iot.Authorizer("example",
authorizer_function_arn=aws_lambda_function["example"]["arn"],
signing_disabled=False,
status="ACTIVE",
token_key_name="Token-Header",
token_signing_public_keys={
"Key1": (lambda path: open(path).read())("test-fixtures/iot-authorizer-signing-key.pem"),
})
```
## Import
IOT Authorizers can be imported using the name, e.g.
```sh
$ pulumi import aws:iot/authorizer:Authorizer example example
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] authorizer_function_arn: The ARN of the authorizer's Lambda function.
:param pulumi.Input[str] name: The name of the authorizer.
:param pulumi.Input[bool] signing_disabled: Specifies whether AWS IoT validates the token signature in an authorization request. Default: `false`.
:param pulumi.Input[str] status: The status of Authorizer request at creation. Valid values: `ACTIVE`, `INACTIVE`. Default: `ACTIVE`.
:param pulumi.Input[str] token_key_name: The name of the token key used to extract the token from the HTTP headers. This value is required if signing is enabled in your authorizer.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] token_signing_public_keys: The public keys used to verify the digital signature returned by your custom authentication service. This value is required if signing is enabled in your authorizer.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: AuthorizerArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates and manages an AWS IoT Authorizer.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
example = aws.iot.Authorizer("example",
authorizer_function_arn=aws_lambda_function["example"]["arn"],
signing_disabled=False,
status="ACTIVE",
token_key_name="Token-Header",
token_signing_public_keys={
"Key1": (lambda path: open(path).read())("test-fixtures/iot-authorizer-signing-key.pem"),
})
```
## Import
IOT Authorizers can be imported using the name, e.g.
```sh
$ pulumi import aws:iot/authorizer:Authorizer example example
```
:param str resource_name: The name of the resource.
:param AuthorizerArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AuthorizerArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
authorizer_function_arn: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
signing_disabled: Optional[pulumi.Input[bool]] = None,
status: Optional[pulumi.Input[str]] = None,
token_key_name: Optional[pulumi.Input[str]] = None,
token_signing_public_keys: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AuthorizerArgs.__new__(AuthorizerArgs)
if authorizer_function_arn is None and not opts.urn:
raise TypeError("Missing required property 'authorizer_function_arn'")
__props__.__dict__["authorizer_function_arn"] = authorizer_function_arn
__props__.__dict__["name"] = name
__props__.__dict__["signing_disabled"] = signing_disabled
__props__.__dict__["status"] = status
__props__.__dict__["token_key_name"] = token_key_name
__props__.__dict__["token_signing_public_keys"] = token_signing_public_keys
__props__.__dict__["arn"] = None
super(Authorizer, __self__).__init__(
'aws:iot/authorizer:Authorizer',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
arn: Optional[pulumi.Input[str]] = None,
authorizer_function_arn: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
signing_disabled: Optional[pulumi.Input[bool]] = None,
status: Optional[pulumi.Input[str]] = None,
token_key_name: Optional[pulumi.Input[str]] = None,
token_signing_public_keys: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'Authorizer':
"""
Get an existing Authorizer resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: The ARN of the authorizer.
:param pulumi.Input[str] authorizer_function_arn: The ARN of the authorizer's Lambda function.
:param pulumi.Input[str] name: The name of the authorizer.
:param pulumi.Input[bool] signing_disabled: Specifies whether AWS IoT validates the token signature in an authorization request. Default: `false`.
:param pulumi.Input[str] status: The status of Authorizer request at creation. Valid values: `ACTIVE`, `INACTIVE`. Default: `ACTIVE`.
:param pulumi.Input[str] token_key_name: The name of the token key used to extract the token from the HTTP headers. This value is required if signing is enabled in your authorizer.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] token_signing_public_keys: The public keys used to verify the digital signature returned by your custom authentication service. This value is required if signing is enabled in your authorizer.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _AuthorizerState.__new__(_AuthorizerState)
__props__.__dict__["arn"] = arn
__props__.__dict__["authorizer_function_arn"] = authorizer_function_arn
__props__.__dict__["name"] = name
__props__.__dict__["signing_disabled"] = signing_disabled
__props__.__dict__["status"] = status
__props__.__dict__["token_key_name"] = token_key_name
__props__.__dict__["token_signing_public_keys"] = token_signing_public_keys
return Authorizer(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
The ARN of the authorizer.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="authorizerFunctionArn")
def authorizer_function_arn(self) -> pulumi.Output[str]:
"""
The ARN of the authorizer's Lambda function.
"""
return pulumi.get(self, "authorizer_function_arn")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the authorizer.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="signingDisabled")
def signing_disabled(self) -> pulumi.Output[Optional[bool]]:
"""
Specifies whether AWS IoT validates the token signature in an authorization request. Default: `false`.
"""
return pulumi.get(self, "signing_disabled")
@property
@pulumi.getter
def status(self) -> pulumi.Output[Optional[str]]:
"""
The status of Authorizer request at creation. Valid values: `ACTIVE`, `INACTIVE`. Default: `ACTIVE`.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="tokenKeyName")
def token_key_name(self) -> pulumi.Output[Optional[str]]:
"""
The name of the token key used to extract the token from the HTTP headers. This value is required if signing is enabled in your authorizer.
"""
return pulumi.get(self, "token_key_name")
@property
@pulumi.getter(name="tokenSigningPublicKeys")
def token_signing_public_keys(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
The public keys used to verify the digital signature returned by your custom authentication service. This value is required if signing is enabled in your authorizer.
"""
return pulumi.get(self, "token_signing_public_keys")
| 45.85
| 253
| 0.660187
| 2,545
| 21,091
| 5.242829
| 0.072299
| 0.083265
| 0.076594
| 0.061006
| 0.871618
| 0.854231
| 0.84149
| 0.832796
| 0.828974
| 0.824402
| 0
| 0.000187
| 0.241003
| 21,091
| 459
| 254
| 45.949891
| 0.833333
| 0.355602
| 0
| 0.746988
| 1
| 0
| 0.103745
| 0.049421
| 0
| 0
| 0
| 0
| 0
| 1
| 0.160643
| false
| 0.004016
| 0.02008
| 0
| 0.277108
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3a4150826f9e1e33e949280ee3fb6c4f39c8bd36
| 137
|
py
|
Python
|
Hackerrank/Python/Introduction/aritmetic_operator.py
|
ritwik1503/Competitive-Coding-1
|
ffefe5f8b299c623af1ef01bf024af339401de0b
|
[
"MIT"
] | 29
|
2016-09-02T04:48:59.000Z
|
2016-09-08T18:13:05.000Z
|
Hackerrank/Python/Introduction/aritmetic_operator.py
|
ritwik1503/Competitive-Coding-1
|
ffefe5f8b299c623af1ef01bf024af339401de0b
|
[
"MIT"
] | 2
|
2016-09-02T05:20:02.000Z
|
2016-10-13T06:31:31.000Z
|
Hackerrank/Python/Introduction/aritmetic_operator.py
|
ritwik1503/Competitive-Coding-1
|
ffefe5f8b299c623af1ef01bf024af339401de0b
|
[
"MIT"
] | 7
|
2017-04-01T20:07:03.000Z
|
2020-10-16T12:28:54.000Z
|
# Enter your code here. Read input from STDIN. Print output to STDOUT
a=int(raw_input())
b=int(raw_input())
print a+b
print a-b
print a*b
| 22.833333
| 69
| 0.737226
| 29
| 137
| 3.413793
| 0.551724
| 0.181818
| 0.212121
| 0.242424
| 0.212121
| 0.212121
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145985
| 137
| 6
| 70
| 22.833333
| 0.846154
| 0.489051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 0
| null | null | 0
| 0
| null | null | 0.6
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
28d8f124820c4b014de7de195e3ea6d1144efb95
| 8,418
|
py
|
Python
|
disvae/models/decoders.py
|
gokceneraslan/disentangling-vae
|
83f5841cfc834805f3ee05e2675dc0540850c2bd
|
[
"MIT"
] | null | null | null |
disvae/models/decoders.py
|
gokceneraslan/disentangling-vae
|
83f5841cfc834805f3ee05e2675dc0540850c2bd
|
[
"MIT"
] | null | null | null |
disvae/models/decoders.py
|
gokceneraslan/disentangling-vae
|
83f5841cfc834805f3ee05e2675dc0540850c2bd
|
[
"MIT"
] | null | null | null |
"""
Module containing the decoders.
"""
import numpy as np
import torch
from torch import nn
class DecoderConv2D(nn.Module):
def __init__(self, img_size, latent_dim=10, kernel_size=4, num_layers=6, stride=2, padding=1, hidden_dim=256, hidden_channels=32):
r"""Decoder of the model proposed in [1].
Parameters
----------
img_size : tuple of ints
Size of images. E.g. (1, 32, 32) or (3, 64, 64).
latent_dim : int
Dimensionality of latent output.
Model Architecture (transposed for decoder)
------------
- 4 convolutional layers (each with 32 channels), (4 x 4 kernel), (stride of 2)
- 2 fully connected layers (each of 256 units)
- Latent distribution:
- 1 fully connected layer of 20 units (log variance and mean for 10 Gaussians)
References:
[1] Burgess, Christopher P., et al. "Understanding disentangling in
$\beta$-VAE." arXiv preprint arXiv:1804.03599 (2018).
"""
super().__init__()
# Layer parameters
self.hidden_channels = hidden_channels
self.kernel_size = kernel_size
self.hidden_dim = hidden_dim
self.latent_dim = latent_dim
self.num_layers = num_layers
self.img_size = img_size
# Shape required to start transpose convs
self.reshape = (self.hidden_channels, img_size[1]//(2**num_layers), img_size[2]//(2**num_layers))
n_chan = self.img_size[0]
self.img_size = img_size
# Fully connected layers
self.lin1 = nn.Linear(self.latent_dim, self.hidden_dim)
self.lin2 = nn.Linear(self.hidden_dim, self.hidden_dim)
self.lin3 = nn.Linear(self.hidden_dim, np.product(self.reshape))
# Convolutional layers
cnn_kwargs = dict(stride=stride, padding=padding)
self.convt_layers = nn.ModuleList()
for i in range(num_layers):
target_channels = self.hidden_channels if i < (num_layers-1) else n_chan
convt_layer = nn.ConvTranspose2d(self.hidden_channels, target_channels, self.kernel_size, **cnn_kwargs)
self.convt_layers.append(convt_layer)
def forward(self, z):
batch_size = z.size(0)
# Fully connected layers with ReLu activations
x = torch.relu(self.lin1(z))
x = torch.relu(self.lin2(x))
x = torch.relu(self.lin3(x))
x = x.view(batch_size, *self.reshape)
# Convolutional layers
for i, convt_layer in enumerate(self.convt_layers):
if i < (self.num_layers-1):
x = torch.relu(convt_layer(x))
else:
# Sigmoid activation for final conv layer
x = torch.sigmoid(convt_layer(x))
return x
class CondDecoderConv2D(nn.Module):
def __init__(self, img_size, cond_dim, latent_dim=10, kernel_size=4, num_layers=6, stride=2, padding=1, hidden_dim=256, hidden_channels=32):
r"""Decoder of the model proposed in [1].
Parameters
----------
img_size : tuple of ints
Size of images. E.g. (1, 32, 32) or (3, 64, 64).
latent_dim : int
Dimensionality of latent output.
Model Architecture (transposed for decoder)
------------
- 4 convolutional layers (each with 32 channels), (4 x 4 kernel), (stride of 2)
- 2 fully connected layers (each of 256 units)
- Latent distribution:
- 1 fully connected layer of 20 units (log variance and mean for 10 Gaussians)
References:
[1] Burgess, Christopher P., et al. "Understanding disentangling in
$\beta$-VAE." arXiv preprint arXiv:1804.03599 (2018).
"""
super().__init__()
# Layer parameters
self.hidden_channels = hidden_channels
self.kernel_size = kernel_size
self.hidden_dim = hidden_dim
self.cond_dim = cond_dim
self.latent_dim = latent_dim
self.num_layers = num_layers
self.img_size = img_size
# Shape required to start transpose convs
self.reshape = (self.hidden_channels, img_size[1]//(2**num_layers), img_size[2]//(2**num_layers))
n_chan = self.img_size[0]
self.img_size = img_size
# Fully connected layers
self.lin1 = nn.Linear(self.latent_dim + self.cond_dim, self.hidden_dim)
self.lin2 = nn.Linear(self.hidden_dim, self.hidden_dim)
self.lin3 = nn.Linear(self.hidden_dim, np.product(self.reshape))
# Convolutional layers
cnn_kwargs = dict(stride=stride, padding=padding)
self.convt_layers = nn.ModuleList()
for i in range(num_layers):
target_channels = self.hidden_channels if i < (num_layers-1) else n_chan
convt_layer = nn.ConvTranspose2d(self.hidden_channels, target_channels, self.kernel_size, **cnn_kwargs)
self.convt_layers.append(convt_layer)
def forward(self, *, z, y):
batch_size = z.size(0)
# Fully connected layers with ReLu activations
x = torch.relu(self.lin1(torch.cat([z, y], dim=-1)))
x = torch.relu(self.lin2(x))
x = torch.relu(self.lin3(x))
x = x.view(batch_size, *self.reshape)
# Convolutional layers
for i, convt_layer in enumerate(self.convt_layers):
if i < (self.num_layers-1):
x = torch.relu(convt_layer(x))
else:
# Sigmoid activation for final conv layer
x = torch.sigmoid(convt_layer(x))
return x
class DecoderFC(nn.Module):
def __init__(self, output_dim, latent_dim=10, hidden_dim=256, num_layers=1, output_activation='linear'):
super(DecoderFC, self).__init__()
self.output_activation = output_activation
# Layer parameters
self.hidden_dim = hidden_dim
self.output_dim = output_dim
self.num_layers = num_layers
self.fc_layers = nn.ModuleList()
# Fully connected layers
for i in range(num_layers):
self.fc_layers.append(nn.Linear(latent_dim if i == 0 else hidden_dim, hidden_dim))
self.output_layer = nn.Linear(hidden_dim, self.output_dim)
def forward(self, x):
# Fully connected layers with ReLu activations
for layer in self.fc_layers:
x = torch.relu(layer(x))
x = self.output_layer(x)
if self.output_activation == 'sigmoid':
x = torch.sigmoid(x)
elif self.output_activation == 'relu':
x = torch.relu(x)
elif self.output_activation == 'softplus':
x = torch.nn.functional.softplus(x)
elif self.output_activation == 'exp':
x = torch.exp(x)
return x
class CondDecoderFC(nn.Module):
def __init__(self, output_dim, cond_dim, latent_dim=10, hidden_dim=256, num_layers=1, output_activation='linear', concat_all_layers=False):
super().__init__()
self.output_activation = output_activation
self.concat_all_layers = concat_all_layers
# Layer parameters
self.hidden_dim = hidden_dim
self.cond_dim = cond_dim
self.output_dim = output_dim
self.num_layers = num_layers
self.fc_layers = nn.ModuleList()
extra_hidden = 0 if not self.concat_all_layers else cond_dim
# Fully connected layers
for i in range(num_layers):
self.fc_layers.append(nn.Linear(latent_dim + cond_dim if i == 0 else (hidden_dim + extra_hidden),
hidden_dim))
self.output_layer = nn.Linear(hidden_dim + extra_hidden, self.output_dim)
def forward(self, *, z, y):
x = torch.cat([z, y], dim=-1)
# Fully connected layers with ReLu activations
for layer in self.fc_layers:
x = torch.relu(layer(x))
if self.concat_all_layers:
x = torch.cat([x, y], dim=-1)
x = self.output_layer(x)
if self.output_activation == 'sigmoid':
x = torch.sigmoid(x)
elif self.output_activation == 'relu':
x = torch.relu(x)
elif self.output_activation == 'softplus':
x = torch.nn.functional.softplus(x)
elif self.output_activation == 'exp':
x = torch.exp(x)
return x
| 36.441558
| 144
| 0.608458
| 1,100
| 8,418
| 4.456364
| 0.128182
| 0.047736
| 0.034476
| 0.020808
| 0.933701
| 0.930845
| 0.897797
| 0.868829
| 0.858833
| 0.842105
| 0
| 0.02541
| 0.28938
| 8,418
| 231
| 145
| 36.441558
| 0.794049
| 0.222381
| 0
| 0.76378
| 0
| 0
| 0.008957
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.062992
| false
| 0
| 0.023622
| 0
| 0.149606
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c961af5541767f9d4137f4b6ca1db89cad3ff8c1
| 37,510
|
py
|
Python
|
src/tt_personal_messages/tt_personal_messages/tests/test_handlers.py
|
al-arz/the-tale
|
542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5
|
[
"BSD-3-Clause"
] | 1
|
2020-04-02T11:51:20.000Z
|
2020-04-02T11:51:20.000Z
|
src/tt_personal_messages/tt_personal_messages/tests/test_handlers.py
|
devapromix/the-tale
|
2a10efd3270734f8cf482b4cfbc5353ef8f0494c
|
[
"BSD-3-Clause"
] | null | null | null |
src/tt_personal_messages/tt_personal_messages/tests/test_handlers.py
|
devapromix/the-tale
|
2a10efd3270734f8cf482b4cfbc5353ef8f0494c
|
[
"BSD-3-Clause"
] | null | null | null |
import datetime
import asyncio
from aiohttp import test_utils
from tt_protocol.protocol import personal_messages_pb2
from tt_web import utils
from tt_web import exceptions
from tt_web import postgresql as db
from .. import objects
from .. import protobuf
from .. import relations
from .. import operations
from . import helpers
async def load_protocol_messages_by_bodies(bodies):
messages = []
for body in bodies:
result = await db.sql('SELECT * FROM messages WHERE body LIKE %(body)s', {'body': '%{}%'.format(body)})
messages.append(protobuf.from_message(operations.message_from_row(result[0])))
return messages
class Mixin(object):
async def create_messages(self):
request = await self.client.post('/send-message', data=personal_messages_pb2.SendMessageRequest(sender_id=1,
recipients_ids=[2, 3],
body='message 1').SerializeToString())
await self.check_answer(request, personal_messages_pb2.SendMessageResponse)
request = await self.client.post('/send-message', data=personal_messages_pb2.SendMessageRequest(sender_id=2,
recipients_ids=[3],
body='message 2').SerializeToString())
await self.check_answer(request, personal_messages_pb2.SendMessageResponse)
class NewMessagesNumberTests(Mixin, helpers.BaseTests):
@test_utils.unittest_run_loop
async def test_no_messages(self):
request = await self.client.post('/new-messages-number', data=personal_messages_pb2.NewMessagesNumberRequest(account_id=1).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.NewMessagesNumberResponse)
self.assertEqual(data.number, 0)
@test_utils.unittest_run_loop
async def test_has_messages(self):
await self.create_messages()
request = await self.client.post('/new-messages-number', data=personal_messages_pb2.NewMessagesNumberRequest(account_id=1).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.NewMessagesNumberResponse)
self.assertEqual(data.number, 0)
request = await self.client.post('/new-messages-number', data=personal_messages_pb2.NewMessagesNumberRequest(account_id=2).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.NewMessagesNumberResponse)
self.assertEqual(data.number, 1)
request = await self.client.post('/new-messages-number', data=personal_messages_pb2.NewMessagesNumberRequest(account_id=3).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.NewMessagesNumberResponse)
self.assertEqual(data.number, 2)
class GetContactsTests(Mixin, helpers.BaseTests):
@test_utils.unittest_run_loop
async def test_no_contacts(self):
request = await self.client.post('/get-contacts', data=personal_messages_pb2.GetContactsRequest(account_id=1).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetContactsResponse)
self.assertEqual(data.accounts_ids, [])
@test_utils.unittest_run_loop
async def test_has_contacts(self):
await self.create_messages()
request = await self.client.post('/get-contacts', data=personal_messages_pb2.GetContactsRequest(account_id=1).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetContactsResponse)
self.assertCountEqual(data.accounts_ids, [2, 3])
request = await self.client.post('/get-contacts', data=personal_messages_pb2.GetContactsRequest(account_id=2).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetContactsResponse)
self.assertCountEqual(data.accounts_ids, [1, 3])
request = await self.client.post('/get-contacts', data=personal_messages_pb2.GetContactsRequest(account_id=3).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetContactsResponse)
self.assertCountEqual(data.accounts_ids, [1, 2])
class ReadMessagesTests(Mixin, helpers.BaseTests):
@test_utils.unittest_run_loop
async def test_no_messages(self):
request = await self.client.post('/read-messages', data=personal_messages_pb2.ReadMessagesRequest(account_id=1).SerializeToString())
await self.check_answer(request, personal_messages_pb2.ReadMessagesResponse)
request = await self.client.post('/read-messages', data=personal_messages_pb2.ReadMessagesRequest(account_id=1).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.NewMessagesNumberResponse)
self.assertEqual(data.number, 0)
@test_utils.unittest_run_loop
async def test_has_messages(self):
await self.create_messages()
request = await self.client.post('/read-messages', data=personal_messages_pb2.ReadMessagesRequest(account_id=1).SerializeToString())
await self.check_answer(request, personal_messages_pb2.ReadMessagesResponse)
request = await self.client.post('/read-messages', data=personal_messages_pb2.ReadMessagesRequest(account_id=2).SerializeToString())
await self.check_answer(request, personal_messages_pb2.ReadMessagesResponse)
request = await self.client.post('/read-messages', data=personal_messages_pb2.ReadMessagesRequest(account_id=3).SerializeToString())
await self.check_answer(request, personal_messages_pb2.ReadMessagesResponse)
request = await self.client.post('/new-messages-number', data=personal_messages_pb2.NewMessagesNumberRequest(account_id=1).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.NewMessagesNumberResponse)
self.assertEqual(data.number, 0)
request = await self.client.post('/new-messages-number', data=personal_messages_pb2.NewMessagesNumberRequest(account_id=2).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.NewMessagesNumberResponse)
self.assertEqual(data.number, 0)
request = await self.client.post('/new-messages-number', data=personal_messages_pb2.NewMessagesNumberRequest(account_id=3).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.NewMessagesNumberResponse)
self.assertEqual(data.number, 0)
class SendMessageTests(Mixin, helpers.BaseTests):
@test_utils.unittest_run_loop
async def test_send(self):
await self.create_messages()
result = await db.sql('SELECT sender, recipients, body FROM messages')
self.assertEqual({(row['sender'], tuple(row['recipients']), row['body']) for row in result},
{(1, (2, 3), 'message 1'),
(2, (3,), 'message 2')})
result = await db.sql('SELECT v.account as account, m.body as body, v.visible as visible FROM visibilities AS v JOIN messages AS m ON v.message=m.id')
self.assertCountEqual([dict(row) for row in result],
[{'account': 1, 'body': 'message 1', 'visible': True},
{'account': 2, 'body': 'message 1', 'visible': True},
{'account': 3, 'body': 'message 1', 'visible': True},
{'account': 2, 'body': 'message 2', 'visible': True},
{'account': 3, 'body': 'message 2', 'visible': True}])
result = await db.sql('SELECT c.account_1 as account_1, c.account_2 as account_2, m.body as body FROM conversations AS c JOIN messages AS m ON c.message=m.id')
self.assertCountEqual([dict(row) for row in result],
[{'account_1': 1, 'account_2': 2, 'body': 'message 1'},
{'account_1': 1, 'account_2': 3, 'body': 'message 1'},
{'account_1': 2, 'account_2': 3, 'body': 'message 2'}])
class HideMessageTests(Mixin, helpers.BaseTests):
@test_utils.unittest_run_loop
async def test_hide(self):
await self.create_messages()
result = await db.sql('SELECT id, body, body FROM messages')
messages_ids = {row['body']: row['id'] for row in result}
request = await self.client.post('/hide-message', data=personal_messages_pb2.HideMessageRequest(account_id=1, message_id=messages_ids['message 1']).SerializeToString())
await self.check_answer(request, personal_messages_pb2.HideMessageResponse)
request = await self.client.post('/hide-message', data=personal_messages_pb2.HideMessageRequest(account_id=1, message_id=messages_ids['message 2']).SerializeToString())
await self.check_answer(request, personal_messages_pb2.HideMessageResponse)
request = await self.client.post('/hide-message', data=personal_messages_pb2.HideMessageRequest(account_id=1, message_id=1000500).SerializeToString())
await self.check_answer(request, personal_messages_pb2.HideMessageResponse)
request = await self.client.post('/hide-message', data=personal_messages_pb2.HideMessageRequest(account_id=2, message_id=messages_ids['message 1']).SerializeToString())
await self.check_answer(request, personal_messages_pb2.HideMessageResponse)
request = await self.client.post('/hide-message', data=personal_messages_pb2.HideMessageRequest(account_id=2, message_id=messages_ids['message 2']).SerializeToString())
await self.check_answer(request, personal_messages_pb2.HideMessageResponse)
result = await db.sql('SELECT v.account as account, m.body as body, v.visible as visible FROM visibilities AS v JOIN messages AS m ON v.message=m.id')
self.assertCountEqual([dict(row) for row in result],
[{'account': 1, 'body': 'message 1', 'visible': False},
{'account': 2, 'body': 'message 1', 'visible': False},
{'account': 3, 'body': 'message 1', 'visible': True},
{'account': 2, 'body': 'message 2', 'visible': False},
{'account': 3, 'body': 'message 2', 'visible': True}])
class HideAllMessagesTests(Mixin, helpers.BaseTests):
@test_utils.unittest_run_loop
async def test_hide(self):
await self.create_messages()
request = await self.client.post('/hide-all-messages', data=personal_messages_pb2.HideAllMessagesRequest(account_id=3).SerializeToString())
await self.check_answer(request, personal_messages_pb2.HideAllMessagesResponse)
result = await db.sql('SELECT v.account as account, m.body as body, v.visible as visible FROM visibilities AS v JOIN messages AS m ON v.message=m.id')
self.assertCountEqual([dict(row) for row in result],
[{'account': 1, 'body': 'message 1', 'visible': True},
{'account': 2, 'body': 'message 1', 'visible': True},
{'account': 3, 'body': 'message 1', 'visible': False},
{'account': 2, 'body': 'message 2', 'visible': True},
{'account': 3, 'body': 'message 2', 'visible': False}])
class HideConversationTests(Mixin, helpers.BaseTests):
@test_utils.unittest_run_loop
async def test_hide(self):
request = await self.client.post('/send-message', data=personal_messages_pb2.SendMessageRequest(sender_id=1,
recipients_ids=[2, 3],
body='message 1').SerializeToString())
request = await self.client.post('/send-message', data=personal_messages_pb2.SendMessageRequest(sender_id=2,
recipients_ids=[3],
body='message 2').SerializeToString())
request = await self.client.post('/send-message', data=personal_messages_pb2.SendMessageRequest(sender_id=3,
recipients_ids=[2, 1],
body='message 3').SerializeToString())
request = await self.client.post('/hide-conversation', data=personal_messages_pb2.HideConversationRequest(account_id=1, partner_id=3).SerializeToString())
await self.check_answer(request, personal_messages_pb2.HideConversationResponse)
result = await db.sql('SELECT v.account as account, m.body as body, v.visible as visible FROM visibilities AS v JOIN messages AS m ON v.message=m.id')
self.assertCountEqual([dict(row) for row in result],
[{'account': 1, 'body': 'message 1', 'visible': False},
{'account': 2, 'body': 'message 1', 'visible': True},
{'account': 3, 'body': 'message 1', 'visible': True},
{'account': 2, 'body': 'message 2', 'visible': True},
{'account': 3, 'body': 'message 2', 'visible': True},
{'account': 1, 'body': 'message 3', 'visible': False},
{'account': 2, 'body': 'message 3', 'visible': True},
{'account': 3, 'body': 'message 3', 'visible': True},])
class RemoveOldMessagesTests(Mixin, helpers.BaseTests):
async def prepair_data(self):
request = await self.client.post('/send-message', data=personal_messages_pb2.SendMessageRequest(sender_id=1,
recipients_ids=[2, 3],
body='message 1').SerializeToString())
await self.check_answer(request, personal_messages_pb2.SendMessageResponse)
request = await self.client.post('/send-message', data=personal_messages_pb2.SendMessageRequest(sender_id=2,
recipients_ids=[3],
body='message 2').SerializeToString())
await self.check_answer(request, personal_messages_pb2.SendMessageResponse)
request = await self.client.post('/send-message', data=personal_messages_pb2.SendMessageRequest(sender_id=3,
recipients_ids=[1, 2],
body='message 3').SerializeToString())
await self.check_answer(request, personal_messages_pb2.SendMessageResponse)
now = datetime.datetime.now()
time_1 = now - datetime.timedelta(days=2)
time_2 = now - datetime.timedelta(days=1)
time_3 = now
await db.sql("UPDATE messages SET created_at=%(time)s WHERE body='message 1'", {'time': time_1})
await db.sql("UPDATE messages SET created_at=%(time)s WHERE body='message 2'", {'time': time_2})
await db.sql("UPDATE messages SET created_at=%(time)s WHERE body='message 3'", {'time': time_3})
return time_2
@test_utils.unittest_run_loop
async def test_remove__all(self):
barrier = await self.prepair_data()
request = await self.client.post('/remove-old-messages', data=personal_messages_pb2.RemoveOldMessagesRequest(accounts_ids=[1, 2, 3],
barrier=barrier.timestamp()+1).SerializeToString())
await self.check_answer(request, personal_messages_pb2.RemoveOldMessagesResponse)
result = await db.sql('SELECT body FROM messages')
self.assertEqual({row['body'] for row in result},
{'message 3'})
@test_utils.unittest_run_loop
async def test_remove(self):
barrier = await self.prepair_data()
request = await self.client.post('/remove-old-messages', data=personal_messages_pb2.RemoveOldMessagesRequest(accounts_ids=[1, 3],
barrier=barrier.timestamp()+1).SerializeToString())
await self.check_answer(request, personal_messages_pb2.RemoveOldMessagesResponse)
result = await db.sql('SELECT body FROM messages')
self.assertEqual({row['body'] for row in result},
{'message 2', 'message 3'})
class GetMessagesTests(Mixin, helpers.BaseTests):
async def prepair_data(self):
request = await self.client.post('/send-message', data=personal_messages_pb2.SendMessageRequest(sender_id=1,
recipients_ids=[2, 3],
body='a message 1').SerializeToString())
await self.check_answer(request, personal_messages_pb2.SendMessageResponse)
request = await self.client.post('/send-message', data=personal_messages_pb2.SendMessageRequest(sender_id=2,
recipients_ids=[3],
body='a message 2').SerializeToString())
await self.check_answer(request, personal_messages_pb2.SendMessageResponse)
request = await self.client.post('/send-message', data=personal_messages_pb2.SendMessageRequest(sender_id=3,
recipients_ids=[1, 2],
body='b message 3').SerializeToString())
await self.check_answer(request, personal_messages_pb2.SendMessageResponse)
request = await self.client.post('/send-message', data=personal_messages_pb2.SendMessageRequest(sender_id=1,
recipients_ids=[2, 4],
body='b message 4').SerializeToString())
await self.check_answer(request, personal_messages_pb2.SendMessageResponse)
@test_utils.unittest_run_loop
async def test_no_messages(self):
await self.prepair_data()
request = await self.client.post('/get-messages', data=personal_messages_pb2.GetMessagesRequest(account_id=666,
type=relations.OWNER_TYPE.random().protocol_value,
text=None,
offset=None,
limit=None).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetMessagesResponse)
self.assertEqual(data.total, 0)
self.assertEqual(list(data.messages), [])
@test_utils.unittest_run_loop
async def test_sender_type(self):
await self.prepair_data()
request = await self.client.post('/get-messages', data=personal_messages_pb2.GetMessagesRequest(account_id=1,
type=relations.OWNER_TYPE.SENDER.protocol_value,
text=None,
offset=None,
limit=None).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetMessagesResponse)
self.assertEqual(data.total, 2)
self.assertEqual(list(data.messages), await load_protocol_messages_by_bodies(['message 4', 'message 1']))
@test_utils.unittest_run_loop
async def test_recipient_type(self):
await self.prepair_data()
request = await self.client.post('/get-messages', data=personal_messages_pb2.GetMessagesRequest(account_id=2,
type=relations.OWNER_TYPE.RECIPIENT.protocol_value,
text=None,
offset=None,
limit=None).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetMessagesResponse)
self.assertEqual(data.total, 3)
self.assertEqual(list(data.messages), await load_protocol_messages_by_bodies(['message 4', 'message 3', 'message 1']))
@test_utils.unittest_run_loop
async def test_text_filter__single(self):
await self.prepair_data()
request = await self.client.post('/get-messages', data=personal_messages_pb2.GetMessagesRequest(account_id=2,
type=relations.OWNER_TYPE.RECIPIENT.protocol_value,
text='3',
offset=None,
limit=None).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetMessagesResponse)
self.assertEqual(data.total, 1)
self.assertEqual(list(data.messages), await load_protocol_messages_by_bodies(['message 3']))
@test_utils.unittest_run_loop
async def test_text_filter__multiple(self):
await self.prepair_data()
request = await self.client.post('/get-messages', data=personal_messages_pb2.GetMessagesRequest(account_id=2,
type=relations.OWNER_TYPE.RECIPIENT.protocol_value,
text='b message',
offset=None,
limit=None).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetMessagesResponse)
self.assertEqual(data.total, 2)
self.assertEqual(list(data.messages), await load_protocol_messages_by_bodies(['message 4', 'message 3']))
@test_utils.unittest_run_loop
async def test_offset(self):
await self.prepair_data()
request = await self.client.post('/get-messages', data=personal_messages_pb2.GetMessagesRequest(account_id=2,
type=relations.OWNER_TYPE.RECIPIENT.protocol_value,
text=None,
offset=1,
limit=None).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetMessagesResponse)
self.assertEqual(data.total, 3)
self.assertEqual(list(data.messages), await load_protocol_messages_by_bodies(['message 3', 'message 1']))
@test_utils.unittest_run_loop
async def test_limit(self):
await self.prepair_data()
request = await self.client.post('/get-messages', data=personal_messages_pb2.GetMessagesRequest(account_id=2,
type=relations.OWNER_TYPE.RECIPIENT.protocol_value,
text=None,
offset=None,
limit=2).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetMessagesResponse)
self.assertEqual(data.total, 3)
self.assertEqual(list(data.messages), await load_protocol_messages_by_bodies(['message 4', 'message 3']))
class GetConversationTests(Mixin, helpers.BaseTests):
async def prepair_data(self):
request = await self.client.post('/send-message', data=personal_messages_pb2.SendMessageRequest(sender_id=1,
recipients_ids=[2, 3],
body='a message 1').SerializeToString())
await self.check_answer(request, personal_messages_pb2.SendMessageResponse)
request = await self.client.post('/send-message', data=personal_messages_pb2.SendMessageRequest(sender_id=2,
recipients_ids=[3],
body='a message 2').SerializeToString())
await self.check_answer(request, personal_messages_pb2.SendMessageResponse)
request = await self.client.post('/send-message', data=personal_messages_pb2.SendMessageRequest(sender_id=3,
recipients_ids=[1, 2],
body='b message 3').SerializeToString())
await self.check_answer(request, personal_messages_pb2.SendMessageResponse)
request = await self.client.post('/send-message', data=personal_messages_pb2.SendMessageRequest(sender_id=2,
recipients_ids=[1, 4],
body='b message 4').SerializeToString())
await self.check_answer(request, personal_messages_pb2.SendMessageResponse)
request = await self.client.post('/send-message', data=personal_messages_pb2.SendMessageRequest(sender_id=1,
recipients_ids=[2, 4],
body='b message 5').SerializeToString())
await self.check_answer(request, personal_messages_pb2.SendMessageResponse)
@test_utils.unittest_run_loop
async def test_no_messages(self):
await self.prepair_data()
request = await self.client.post('/get-conversation', data=personal_messages_pb2.GetConversationRequest(account_id=666,
partner_id=1,
text=None,
offset=None,
limit=None).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetConversationResponse)
self.assertEqual(data.total, 0)
self.assertEqual(list(data.messages), [])
@test_utils.unittest_run_loop
async def test_reversed(self):
await self.prepair_data()
request = await self.client.post('/get-conversation', data=personal_messages_pb2.GetConversationRequest(account_id=1,
partner_id=2,
text=None,
offset=None,
limit=None).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetConversationResponse)
self.assertEqual(data.total, 3)
self.assertEqual(list(data.messages), await load_protocol_messages_by_bodies(['message 5', 'message 4', 'message 1']))
request = await self.client.post('/get-conversation', data=personal_messages_pb2.GetConversationRequest(account_id=2,
partner_id=1,
text=None,
offset=None,
limit=None).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetConversationResponse)
self.assertEqual(data.total, 3)
self.assertEqual(list(data.messages), await load_protocol_messages_by_bodies(['message 5', 'message 4', 'message 1']))
@test_utils.unittest_run_loop
async def test_text_filter__single(self):
await self.prepair_data()
request = await self.client.post('/get-conversation', data=personal_messages_pb2.GetConversationRequest(account_id=2,
partner_id=1,
text='4',
offset=None,
limit=None).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetConversationResponse)
self.assertEqual(data.total, 1)
self.assertEqual(list(data.messages), await load_protocol_messages_by_bodies(['message 4']))
@test_utils.unittest_run_loop
async def test_text_filter__multiple(self):
await self.prepair_data()
request = await self.client.post('/get-conversation', data=personal_messages_pb2.GetConversationRequest(account_id=2,
partner_id=1,
text='b message',
offset=None,
limit=None).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetConversationResponse)
self.assertEqual(data.total, 2)
self.assertEqual(list(data.messages), await load_protocol_messages_by_bodies(['message 5', 'message 4']))
@test_utils.unittest_run_loop
async def test_offset(self):
await self.prepair_data()
request = await self.client.post('/get-conversation', data=personal_messages_pb2.GetConversationRequest(account_id=2,
partner_id=1,
text=None,
offset=1,
limit=None).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetConversationResponse)
self.assertEqual(data.total, 3)
self.assertEqual(list(data.messages), await load_protocol_messages_by_bodies(['message 4', 'message 1']))
@test_utils.unittest_run_loop
async def test_limit(self):
await self.prepair_data()
request = await self.client.post('/get-conversation', data=personal_messages_pb2.GetConversationRequest(account_id=2,
partner_id=1,
text=None,
offset=None,
limit=2).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetConversationResponse)
self.assertEqual(data.total, 3)
self.assertEqual(list(data.messages), await load_protocol_messages_by_bodies(['message 5', 'message 4']))
class GetMessageTests(Mixin, helpers.BaseTests):
async def prepair_data(self):
request = await self.client.post('/send-message', data=personal_messages_pb2.SendMessageRequest(sender_id=1,
recipients_ids=[2],
body='a message 1').SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.SendMessageResponse)
self.message_id = data.message_id
@test_utils.unittest_run_loop
async def test_sender(self):
await self.prepair_data()
request = await self.client.post('/get-message', data=personal_messages_pb2.GetMessageRequest(account_id=1, message_id=self.message_id).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetMessageResponse)
self.assertEqual(data.message.body, 'a message 1')
@test_utils.unittest_run_loop
async def test_recipient(self):
await self.prepair_data()
request = await self.client.post('/get-message', data=personal_messages_pb2.GetMessageRequest(account_id=2, message_id=self.message_id).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetMessageResponse)
self.assertEqual(data.message.body, 'a message 1')
@test_utils.unittest_run_loop
async def test_no_relation(self):
await self.prepair_data()
request = await self.client.post('/get-message', data=personal_messages_pb2.GetMessageRequest(account_id=3, message_id=self.message_id).SerializeToString())
data = await self.check_answer(request, personal_messages_pb2.GetMessageResponse)
self.assertFalse(data.HasField('message'))
| 61.694079
| 176
| 0.524713
| 3,328
| 37,510
| 5.715445
| 0.049279
| 0.066716
| 0.11787
| 0.069397
| 0.928185
| 0.912781
| 0.902844
| 0.900794
| 0.900794
| 0.89233
| 0
| 0.017611
| 0.389949
| 37,510
| 607
| 177
| 61.795717
| 0.813617
| 0
| 0
| 0.732394
| 0
| 0.011737
| 0.084913
| 0
| 0
| 0
| 0
| 0
| 0.119718
| 1
| 0
| false
| 0
| 0.028169
| 0
| 0.061033
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a33f744b88675ffa9c34624a74bb034985698408
| 146
|
py
|
Python
|
creacion_de_aplicaciones/streamlit_app/views/__init__.py
|
soytupadrrre/Master_Python_Eip
|
c4774209d7dd15584233fe5d4cc01b1434c9316b
|
[
"MIT"
] | null | null | null |
creacion_de_aplicaciones/streamlit_app/views/__init__.py
|
soytupadrrre/Master_Python_Eip
|
c4774209d7dd15584233fe5d4cc01b1434c9316b
|
[
"MIT"
] | null | null | null |
creacion_de_aplicaciones/streamlit_app/views/__init__.py
|
soytupadrrre/Master_Python_Eip
|
c4774209d7dd15584233fe5d4cc01b1434c9316b
|
[
"MIT"
] | null | null | null |
from views import eliminar
from views import actualizar
from views import ver_datos
from views import machine_learning
from views import insertar
| 24.333333
| 34
| 0.863014
| 22
| 146
| 5.636364
| 0.454545
| 0.362903
| 0.604839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136986
| 146
| 5
| 35
| 29.2
| 0.984127
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6e75cf8202c8fd4a99f4c9bb04a8442cf6dcfda4
| 101
|
py
|
Python
|
boa3_test/test_sc/bytes_test/BytesToStrWithBuiltin.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 25
|
2020-07-22T19:37:43.000Z
|
2022-03-08T03:23:55.000Z
|
boa3_test/test_sc/bytes_test/BytesToStrWithBuiltin.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 419
|
2020-04-23T17:48:14.000Z
|
2022-03-31T13:17:45.000Z
|
boa3_test/test_sc/bytes_test/BytesToStrWithBuiltin.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 15
|
2020-05-21T21:54:24.000Z
|
2021-11-18T06:17:24.000Z
|
from boa3.builtin import public
@public
def bytes_to_str() -> str:
return bytes.to_str(b'123')
| 14.428571
| 31
| 0.712871
| 17
| 101
| 4.058824
| 0.705882
| 0.202899
| 0.289855
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 0.168317
| 101
| 6
| 32
| 16.833333
| 0.77381
| 0
| 0
| 0
| 0
| 0
| 0.029703
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
6e8eb8bd74ffc7f0d7a567dc9f45c59172107357
| 13,942
|
py
|
Python
|
src/utils.py
|
fofferhorn/SC2ML
|
436e9b9883c78505cfecaaf1fe1abe9ec630dd88
|
[
"MIT"
] | null | null | null |
src/utils.py
|
fofferhorn/SC2ML
|
436e9b9883c78505cfecaaf1fe1abe9ec630dd88
|
[
"MIT"
] | null | null | null |
src/utils.py
|
fofferhorn/SC2ML
|
436e9b9883c78505cfecaaf1fe1abe9ec630dd88
|
[
"MIT"
] | null | null | null |
import numpy as np
import os
import math
from tensorflow.keras.utils import normalize
from tensorflow.keras import metrics
def load_data_with_game_crossover(data_path, train, validation, test, seed = None, maxes_path = None, normalized_data_path = None):
print('Loading data...')
# Make list of the paths to all the replays
data_paths = []
for file in os.listdir(data_path):
file_path = os.path.join(data_path, file)
if os.path.isfile(file_path) and file.lower().endswith('.npy'):
data_paths.append(file_path)
data_and_labels = []
for path in data_paths:
for data_point in np.load(path):
data_and_labels.append(data_point)
if seed is not None:
np.random.seed(seed)
np.random.shuffle(data_and_labels)
data = []
labels = []
for data_point in data_and_labels[:len(data_and_labels)]:
if len(data_point) == 248:
data.append(data_point[:-54])
labels.append(data_point[-54:])
print('Data loaded.')
print('Normalizing data...')
if normalized_data_path is not None:
data = np.load(normalized_data_path)
else:
min_max_norm(data, maxes_path)
print('Data normalized.')
print('Splitting data...')
train_end = int(len(data) * train)
validation_end = int(len(data) * (train + validation))
train_data = []
train_labels = []
for index in range(train_end):
train_data.append(data[index])
train_labels.append(labels[index])
validation_data = []
validation_labels = []
for index in range(train_end, validation_end):
validation_data.append(data[index])
validation_labels.append(labels[index])
test_data = []
test_labels = []
for index in range(validation_end, len(data)):
test_data.append(data[index])
test_labels.append(labels[index])
print('Data split.')
print('_____________________________________________________________________________________')
print('Data meta data')
print('{:20s} {:7d}'.format('# of games', len(data_paths)))
print('{:20s} {:7d}'.format('# of data points', len(data_and_labels)))
print('Split seed: ' + str(seed))
print('-------------------------------------------------------------------------------------')
print('| {:25s} | {:25s} | {:25s} |'.format('Data', '# data points', '# data point dimensions'))
print('|---------------------------|---------------------------|---------------------------|')
print('| {:25s} | {:25d} | {:25d} |'.format('train_data shape', len(train_data), len(train_data[0])))
print('| {:25s} | {:25d} | {:25d} |'.format('train_labels shape', len(train_labels), len(train_labels[0])))
print('| {:25s} | {:25d} | {:25d} |'.format('validation_data shape', len(validation_data), len(validation_data[0])))
print('| {:25s} | {:25d} | {:25d} |'.format('validation_labels shape', len(validation_labels), len(validation_labels[0])))
print('| {:25s} | {:25d} | {:25d} |'.format('test_data shape', len(test_data), len(test_data[0])))
print('| {:25s} | {:25d} | {:25d} |'.format('test_labels shape', len(test_labels), len(test_labels[0])))
print('-------------------------------------------------------------------------------------')
return np.array(train_data), np.array(train_labels), np.array(validation_data), np.array(validation_labels), np.array(test_data), np.array(test_labels)
def load_data_without_game_crossover(data_path, train, validation, test, seed = None, maxes_path = None, normalized_data_path = None):
print('Loading data...')
# Make list of the paths to all the replays
data_paths = []
for file in os.listdir(data_path):
file_path = os.path.join(data_path, file)
if os.path.isfile(file_path) and file.lower().endswith('.npy'):
data_paths.append(file_path)
if seed is not None:
np.random.seed(seed)
np.random.shuffle(data_paths)
train_end = int(len(data_paths) * train)
validation_end = int(len(data_paths) * (train + validation))
train_paths = []
for index in range(train_end):
train_paths.append(data_paths[index])
validation_paths = []
for index in range(train_end, validation_end):
validation_paths.append(data_paths[index])
amount_train_data_points = 0
for path in train_paths:
for data_point in np.load(path):
if len(data_point) == 248:
amount_train_data_points += 1
amount_validation_data_points = 0
for path in validation_paths:
for data_point in np.load(path):
if len(data_point) == 248:
amount_validation_data_points += 1
data = []
labels = []
for path in data_paths:
for data_point in np.load(path):
if len(data_point) == 248:
data.append(data_point[:-54])
labels.append(data_point[-54:])
print('Data loaded.')
if normalized_data_path is not None:
print('Loading normalized data')
data = np.load(normalized_data_path)
print('Loaded normalized data')
else:
print('Performing L2 normalization...')
data = normalize(data, axis=-1, order=2)
print('L2 normalization done.')
# print('Performing min-max normalization...')
# min_max_norm(data, maxes_path)
# print('Min-max normalization done.')
print('Splitting data...')
train_end = amount_train_data_points
validation_end = amount_train_data_points + amount_validation_data_points
train_data = []
train_labels = []
for index in range(train_end):
train_data.append(data[index])
train_labels.append(labels[index])
validation_data = []
validation_labels = []
for index in range(train_end, validation_end):
validation_data.append(data[index])
validation_labels.append(labels[index])
test_data = []
test_labels = []
for index in range(validation_end, len(data)):
test_data.append(data[index])
test_labels.append(labels[index])
print('Data split.')
print('_____________________________________________________________________________________')
print('Data meta data')
print('{:20s} {:7d}'.format('# of games', len(data_paths)))
print('{:20s} {:7d}'.format('# of data points', len(data)))
print('Split seed: ' + str(seed))
print('-------------------------------------------------------------------------------------')
print('| {:25s} | {:25s} | {:25s} |'.format('Data', '# data points', '# data point dimensions'))
print('|---------------------------|---------------------------|---------------------------|')
print('| {:25s} | {:25d} | {:25d} |'.format('train_data shape', len(train_data), len(train_data[0])))
print('| {:25s} | {:25d} | {:25d} |'.format('train_labels shape', len(train_labels), len(train_labels[0])))
print('| {:25s} | {:25d} | {:25d} |'.format('validation_data shape', len(validation_data), len(validation_data[0])))
print('| {:25s} | {:25d} | {:25d} |'.format('validation_labels shape', len(validation_labels), len(validation_labels[0])))
print('| {:25s} | {:25d} | {:25d} |'.format('test_data shape', len(test_data), len(test_data[0])))
print('| {:25s} | {:25d} | {:25d} |'.format('test_labels shape', len(test_labels), len(test_labels[0])))
print('-------------------------------------------------------------------------------------')
return np.array(train_data), np.array(train_labels), np.array(validation_data), np.array(validation_labels), np.array(test_data), np.array(test_labels)
def load_data_part_of_game(data_path, train, validation, test, time_start, time_end, maxes_path = None, seed = None, ):
print('Loading data...')
# Make list of the paths to all the replays
data_paths = []
for file in os.listdir(data_path):
file_path = os.path.join(data_path, file)
if os.path.isfile(file_path) and file.lower().endswith('.npy'):
data_paths.append(file_path)
if seed is not None:
np.random.seed(seed)
np.random.shuffle(data_paths)
train_end = int(len(data_paths) * train)
validation_end = int(len(data_paths) * (train + validation))
train_paths = []
for index in range(train_end):
train_paths.append(data_paths[index])
validation_paths = []
for index in range(train_end, validation_end):
validation_paths.append(data_paths[index])
amount_train_data_points = 0
for path in train_paths:
for data_point in np.load(path):
if len(data_point) == 248 and time_start <= data_point[0] < time_end:
amount_train_data_points += 1
amount_validation_data_points = 0
for path in validation_paths:
for data_point in np.load(path):
if len(data_point) == 248 and time_start <= data_point[0] < time_end:
amount_validation_data_points += 1
data = []
labels = []
for path in data_paths:
for data_point in np.load(path):
if len(data_point) == 248 and time_start <= data_point[0] < time_end:
data.append(data_point[:-54])
labels.append(data_point[-54:])
print('Data loaded.')
if len(data) == 0:
return np.array([]), np.array([]), np.array([]), np.array([]), np.array([]), np.array([]),
print('Performing L2 normalization...')
data = normalize(data, axis=-1, order=2)
print('L2 normalization done.')
# print('Performing min-max normalization...')
# min_max_norm(data, maxes_path)
# print('Min-max normalization done.')
print('Splitting data...')
train_end = amount_train_data_points
validation_end = amount_train_data_points + amount_validation_data_points
train_data = []
train_labels = []
for index in range(train_end):
train_data.append(data[index])
train_labels.append(labels[index])
validation_data = []
validation_labels = []
for index in range(train_end, validation_end):
validation_data.append(data[index])
validation_labels.append(labels[index])
test_data = []
test_labels = []
for index in range(validation_end, len(data)):
test_data.append(data[index])
test_labels.append(labels[index])
print('Data split.')
print('_____________________________________________________________________________________')
print('Data meta data')
print('{:20s} {:7d}'.format('# of games', len(data_paths)))
print('{:20s} {:7d}'.format('# of data points', len(data)))
print('Split seed: ' + str(seed))
print('-------------------------------------------------------------------------------------')
print('| {:25s} | {:25s} | {:25s} |'.format('Data', '# data points', '# data point dimensions'))
print('|---------------------------|---------------------------|---------------------------|')
print('| {:25s} | {:25d} | {:25d} |'.format('train_data shape', len(train_data), len(train_data[0])))
print('| {:25s} | {:25d} | {:25d} |'.format('train_labels shape', len(train_labels), len(train_labels[0])))
print('| {:25s} | {:25d} | {:25d} |'.format('validation_data shape', len(validation_data), len(validation_data[0])))
print('| {:25s} | {:25d} | {:25d} |'.format('validation_labels shape', len(validation_labels), len(validation_labels[0])))
print('| {:25s} | {:25d} | {:25d} |'.format('test_data shape', len(test_data), len(test_data[0])))
print('| {:25s} | {:25d} | {:25d} |'.format('test_labels shape', len(test_labels), len(test_labels[0])))
print('-------------------------------------------------------------------------------------')
return np.array(train_data), np.array(train_labels), np.array(validation_data), np.array(validation_labels), np.array(test_data), np.array(test_labels)
def min_max_norm(data, maxes_path = None):
if maxes_path is None:
print('Finding maxes for normalizaiton...')
maxes = []
for i in range(len(data[0])):
max = 0.0
for j in range(len(data)):
if data[j][i] > max:
max = data[j][i]
maxes.append(max)
np.array(maxes)
np.savetxt('maxes.txt', maxes)
print('Maxes found for normalization.')
else:
print('Loading maxes for normalization...')
maxes = np.loadtxt(maxes_path)
print('Maxes loaded for normalization.')
print('Normalizing data...')
norm_data = []
for i in range(len(data)):
norm_point = []
for j in range(len(data[i])):
if maxes[j] == 0.0:
norm_point.append(0.0)
else:
norm_value = data[i][j]/maxes[j]
if math.isnan(norm_value):
norm_point.append(0.0)
else:
norm_point.append(norm_value)
norm_data.append(norm_point)
np.save('normalized_data.npy', norm_data)
print('Normalized data.')
return norm_data
def top_3_categorical_accuracy(y_true, y_pred):
return metrics.top_k_categorical_accuracy(y_true, y_pred, k=3)
def top_1_categorical_accuracy(y_true, y_pred):
return metrics.top_k_categorical_accuracy(y_true, y_pred, k=1)
if __name__ == "__main__":
load_data_without_game_crossover('extracted_actions', 0.7, 0.15, 0.15)
data = np.load('normalized_data.npy')
for i in range(len(data)):
if len(data[i]) != 194:
print('LENGTH ERROR AT ' + str(i) + ' ACTUAL LENGTH IS ' + str(len(data[i])))
for j in range(len(data[i])):
has_printed = False
if has_printed and (0.0 > data[i][j] or data[i][j] > 1.0):
print('RANGE ERROR IN ' + str(data[i]))
has_printed = True
| 40.88563
| 155
| 0.590303
| 1,721
| 13,942
| 4.421267
| 0.075537
| 0.029439
| 0.026022
| 0.033119
| 0.866211
| 0.85136
| 0.824156
| 0.801551
| 0.801551
| 0.801551
| 0
| 0.022131
| 0.212452
| 13,942
| 341
| 156
| 40.885631
| 0.670856
| 0.025176
| 0
| 0.778598
| 0
| 0
| 0.215874
| 0.075099
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02214
| false
| 0
| 0.01845
| 0.00738
| 0.066421
| 0.280443
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6ea50a348130b073638c97bc5ea3f255f830e19b
| 101
|
py
|
Python
|
pymdmix_solvent/__init__.py
|
mdmix4/pymdmix-solvent
|
043c193d43c59675e5ba13ff4d338de668565c35
|
[
"MIT"
] | null | null | null |
pymdmix_solvent/__init__.py
|
mdmix4/pymdmix-solvent
|
043c193d43c59675e5ba13ff4d338de668565c35
|
[
"MIT"
] | 1
|
2021-02-22T19:17:31.000Z
|
2021-02-22T19:17:31.000Z
|
pymdmix_solvent/__init__.py
|
mdmix4/pymdmix-solvent
|
043c193d43c59675e5ba13ff4d338de668565c35
|
[
"MIT"
] | null | null | null |
from pymdmix_solvent.solvent import SolventPlugin
def get_plugin_class():
return SolventPlugin
| 16.833333
| 49
| 0.821782
| 12
| 101
| 6.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138614
| 101
| 5
| 50
| 20.2
| 0.91954
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
42c5df270b33af1e6a8a0d273ce863611e883f09
| 61
|
py
|
Python
|
beunique/__init__.py
|
Dara-Ekanth/todo_custom_package
|
f1585db03a1977dcdadc60d196b90f978f11ee27
|
[
"MIT"
] | null | null | null |
beunique/__init__.py
|
Dara-Ekanth/todo_custom_package
|
f1585db03a1977dcdadc60d196b90f978f11ee27
|
[
"MIT"
] | null | null | null |
beunique/__init__.py
|
Dara-Ekanth/todo_custom_package
|
f1585db03a1977dcdadc60d196b90f978f11ee27
|
[
"MIT"
] | null | null | null |
from .first import data_collection
from .first import repeat
| 20.333333
| 34
| 0.836066
| 9
| 61
| 5.555556
| 0.666667
| 0.36
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131148
| 61
| 2
| 35
| 30.5
| 0.943396
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6e1afb9beb3d2bb1ebdb2f74ca9450877c15fc0b
| 185
|
py
|
Python
|
tests/blocks/test_empty.py
|
cariad/comprehemd
|
668c9cb5b757f0fd09ffb8e73f70d432294ad394
|
[
"MIT"
] | null | null | null |
tests/blocks/test_empty.py
|
cariad/comprehemd
|
668c9cb5b757f0fd09ffb8e73f70d432294ad394
|
[
"MIT"
] | 4
|
2021-11-27T09:00:53.000Z
|
2021-11-30T16:25:45.000Z
|
tests/blocks/test_empty.py
|
cariad/comprehemd
|
668c9cb5b757f0fd09ffb8e73f70d432294ad394
|
[
"MIT"
] | null | null | null |
from comprehemd.blocks import EmptyBlock
def test_repr() -> None:
assert repr(EmptyBlock()) == "EmptyBlock"
def test_str() -> None:
assert str(EmptyBlock()) == "EmptyBlock"
| 18.5
| 45
| 0.681081
| 21
| 185
| 5.904762
| 0.52381
| 0.209677
| 0.274194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172973
| 185
| 9
| 46
| 20.555556
| 0.810458
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
288600d0aa1f0513f79d228665953ddf9e00a33c
| 45,692
|
py
|
Python
|
QBank/qbank/views.py
|
aboekelh/OpenDSA
|
e1ab8118da59c813e1196b240d5c9a4851de2626
|
[
"MIT"
] | 1
|
2019-03-08T20:08:12.000Z
|
2019-03-08T20:08:12.000Z
|
QBank/qbank/views.py
|
aboekelh/OpenDSA
|
e1ab8118da59c813e1196b240d5c9a4851de2626
|
[
"MIT"
] | null | null | null |
QBank/qbank/views.py
|
aboekelh/OpenDSA
|
e1ab8118da59c813e1196b240d5c9a4851de2626
|
[
"MIT"
] | null | null | null |
# Create your views here.
from django.shortcuts import render_to_response, get_object_or_404
from django.core.context_processors import csrf
from django.utils.encoding import smart_str
from django.template import RequestContext, loader,Context
from django.forms.formsets import formset_factory, BaseFormSet
from django.core.management.base import BaseCommand, CommandError
from django.forms.models import inlineformset_factory, BaseInlineFormSet
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from qbank.forms import *
from qbank.models import *
import mimetypes
from django.core.servers.basehttp import FileWrapper
import csv
import os
#@login_required
def index(request):
# This class is used to make empty formset forms required
class RequiredFormSet(BaseFormSet):
def __init__(self, *args, **kwargs):
super(RequiredFormSet, self).__init__(*args, **kwargs)
for form in self.forms:
form.empty_permitted = True
HintFormSet = formset_factory(HintForm, max_num = 10, formset = RequiredFormSet)
VariableFormSet = formset_factory(VariableForm, max_num = 10, formset = RequiredFormSet)
ChoiceFormSet = formset_factory(ChoiceForm, max_num = 10, formset = RequiredFormSet)
ScriptFormSet = formset_factory(ScriptForm, max_num = 10, formset = RequiredFormSet)
if request.method == 'POST': # If the form has been submitted...
problem_form = ProblemForm(request.POST)
problem_template_form = ProblemTemplateForm(request.POST, prefix='template')
answer_form = AnswerForm(request.POST, prefix='answer')
hint_formset = HintFormSet(request.POST, request.FILES, prefix='hints')
variable_formset = VariableFormSet(request.POST, request.FILES, prefix='variables')
choice_formset = ChoiceFormSet(request.POST, request.FILES, prefix='choices')
script_formset = ScriptFormSet(request.POST, request.FILES, prefix='scripts')
if problem_form.is_valid() and problem_template_form.is_valid() and choice_formset.is_valid() and hint_formset.is_valid() and variable_formset.is_valid() and answer_form.is_valid() and script_formset.is_valid():
problem = problem_form.save()
problem_template = problem_template_form.save(commit = False)
problem_template.problem = problem
problem_template.save()
answer = answer_form.save(commit = False)
answer.problem = problem
answer.save()
for form in hint_formset.forms:
hint = form.save(commit = False)
hint.problem = problem
hint.save()
for form in variable_formset.forms:
variable = form.save(commit = False)
variable.problem = problem
variable.save()
for form in script_formset.forms:
script = form.save(commit = False)
script.problem = problem
script.save()
for form in choice_formset.forms:
choice = form.save(commit = False)
choice.problem = problem
choice.save() # Redirect to a 'success' page
return HttpResponseRedirect('/qbank/problems/')
else:
problem_form = ProblemForm()
choice_formset = ChoiceFormSet(prefix='choices')
problem_template_form = ProblemTemplateForm(prefix='template')
answer_form = AnswerForm(prefix='answer')
script_formset = ScriptFormSet(prefix='scripts')
variable_formset = VariableFormSet(prefix='variables')
hint_formset = HintFormSet(prefix='hints')
c = {'problem_form' : problem_form,
'choice_formset' : choice_formset,
'problem_template_form' : problem_template_form,
'answer_form': answer_form,
'variable_formset' : variable_formset,
'script_formset' : script_formset,
'hint_formset' : hint_formset,
}
c.update(csrf(request))
return render_to_response('add.html', c)
#@login_required
def edit(request, problem_id):
problem = get_object_or_404(Problem, id=problem_id)
class RequiredFormSet(BaseFormSet):
def __init__(self, *args, **kwargs):
super(RequiredFormSet, self).__init__(*args, **kwargs)
for form in self.forms:
form.empty_permitted = True
class MyInline(BaseInlineFormSet):
def __init__(self, *args, **kwargs):
super(MyInline, self).__init__(*args, **kwargs)
self.can_delete = False
maxpt = max(0, len(ProblemTemplate.objects.filter(problem=problem)))
ProblemTemplateInlineFormSet = inlineformset_factory(Problem, ProblemTemplate, max_num=maxpt)
maxa = max(0, len(Answer.objects.filter(problem=problem)))
AnswerInlineFormSet = inlineformset_factory(Problem, Answer, max_num =maxa)
maxv = max(0, len(Variable.objects.filter(problem=problem)))
VariableInlineFormSet = inlineformset_factory(Problem, Variable, max_num=maxv)
maxc = max(0, len(CommonIntroduction.objects.filter(problem=problem)))
CommonIntroductionFormSet = inlineformset_factory(Problem, CommonIntroduction, max_num =maxc )
maxch = max(0, len(Choice.objects.filter(problem=problem)))
ChoiceInlineFormSet = inlineformset_factory(Problem, Choice, max_num=maxch, formset=MyInline)
maxh = max(0, len(Hint.objects.filter(problem=problem)))
HintInlineFormSet = inlineformset_factory(Problem, Hint, max_num=maxh)
maxs = max(0, len(Script.objects.filter(problem=problem)))
ScriptInlineFormSet = inlineformset_factory(Problem, Script, max_num=maxs)
if request.method == 'POST':
problem_form =ProblemForm(request.POST, instance=problem)
problem_template_formset = ProblemTemplateInlineFormSet(request.POST,request.FILES, instance=problem, prefix='templates')
common_introduction_formset = CommonIntroductionForm(request.POST, request.FILES, prefix='common_intro', instance =problem)
answer_formset = AnswerInlineFormSet(request.POST, instance=problem, prefix='answer')
hint_formset = HintInlineFormSet(request.POST, request.FILES, instance=problem, prefix='hints')
choice_formset = ChoiceInlineFormSet(request.POST, request.FILES, instance=problem, prefix='choices')
script_formset = ScriptInlineFormSet(request.POST, request.FILES, instance=problem, prefix='scripts')
variable_formset = VariableInlineFormSet(request.POST, request.FILES,instance=problem, prefix='variables')
if problem_form.is_valid() and variable_formset.is_valid() and problem_template_formset.is_valid() and choice_formset.is_valid() and hint_formset.is_valid() and answer_formset.is_valid() and script_formset.is_valid() and common_introduction_formset.is_valid() :
problem = problem_form.save()
answer_formset.save(commit = False)
common_introduction_formset.save(commit = False)
problem_template_formset.save(commit =False)
variable_formset.save(commit = False)
for form in hint_formset.forms:
hint = form.save(commit = False)
hint.problem = problem
hint.save()
for form in script_formset.forms:
script = form.save(commit = False)
script.problem = problem
script.save()
for form in choice_formset.forms:
choice = form.save(commit = False)
choice.problem = problem
choice.save()
return HttpResponseRedirect('/qbank/problems/')
else:
problem_form = ProblemForm(instance=problem)
problem_template_formset = ProblemTemplateInlineFormSet( instance=problem, prefix='templates')
choice_formset = ChoiceInlineFormSet(instance=problem, prefix='choices')
answer_formset = AnswerInlineFormSet(instance=problem, prefix='answer')
script_formset = ScriptInlineFormSet(instance=problem, prefix='scripts')
variable_formset = VariableInlineFormSet(instance=problem, prefix='variables')
common_introduction_formset = CommonIntroductionFormSet(instance=problem, prefix='common_intro')
hint_formset = HintInlineFormSet( instance=problem, prefix='hints')
c = {
'problem_form' : problem_form,
'choice_formset' : choice_formset,
'problem_template_formset' :problem_template_formset,
'answer_formset': answer_formset,
'variable_formset' : variable_formset,
'script_formset' : script_formset,
'common_introduction_formset' : common_introduction_formset,
'hint_formset' : hint_formset,
}
c.update(csrf(request))
return render_to_response('edit.html', c)
def splashpage(request):
return HttpResponseRedirect('splashpage')
def problems(request):
problems = Problem.objects.all()
context = Context({'problems':problems})
return render_to_response('problems.html', context, context_instance=RequestContext(request))
def ka_error(request, problem_id):
problems = Problem.objects.all()
p = get_object_or_404(Problem, id=problem_id)
context = Context({'p':p})
return render_to_response('ka_error.html', context)
def export(request):
problems = Problem.objects.all()
context = Context({'problems':problems})
return render_to_response('export.html', context)
def problems_Summary(request):
problems = Problem.objects.all()
context = Context({'problems':problems})
return render_to_response('problems_Summary.html', context)
def ka_details(request, problem_id):
p = get_object_or_404(Problem, id=problem_id)
q = ProblemTemplate.objects.get(problem = p)
# v = Variable.objects.get(problem = p)
s = Answer.objects.get(problem = p)
# c = Choice.objects.get(problem = p)
h = p.hint_set.all()
destination = open('/home/OpenDSA/exercises/'+p.title+'.html', 'wb+')
str ="<!DOCTYPE html>"+"\n"+"<html data-require=\"math math-format word-problems spin\"><head>"+"\n"+"<title>"+"\n"+p.title+"\n"+"</title>"+"\n"+"<script src=\"../../lib/jquery.min.js\">"+"\n"+"</script>"+"\n"+"<script src=\"../../lib/jquery-ui.min.js\">"+"\n"+"</script>"+"\n"+"<script>urlBaseOverride = \"../../ODSAkhan-exercises/\";</script>"+"\n"+"<script src=\"../../lib/khan-exercise-min.js\">"+"\n"+"</script>"+"\n"+"<script type=\"text/javascript\"src=\"http://cdn.mathjax.org/mathjax/1.1-latest/MathJax.js?config=http://algoviz.org/OpenDSA/ODSAkhan-exercises/KAthJax-77111459c7d82564a705f9c5480e2c88.js\">"+"\n"+"</script>"+"\n"
for scr in p.script_set.all():
str += "<p>"
str += scr.script
str += "</p>"
str += "\n"
str += "</head>"+"\n"+"<body>"+"\n"+"<div class=\"exercise\">"+"\n"+"<div class=\"vars\">"+"\n"
for t in p.variable_set.all():
str +="<var id=\""
str += t.var_id
str += "\">"
str += t.var_value
str += "</var>"
str += "\n"
str += "</div>"+"\n"+" <div class=\"problems\">"+"\n"
if "spin" in q.question:
str += "<div id =\"problem\">"
str += q.question
str += "</div>"+"\n"
str += "<div class=\"solution\""
if "spin" not in q.question:
str += "data-type=\"custom\""
str += ">"+"\n"
str += s.solution
str += "</div>"
str += "\n"
for c in p.choice_set.all():
if c.choice == "":
break
else:
str += "<ul class =\"choices\">"
break
for c in p.choice_set.all():
if not c.choice == "":
str += "<li><var>"
str += c.choice
str += "</var></li>"
str += "\n"
for c in p.choice_set.all():
if c.choice == "":
break
else:
str += "</ul>"
break
str += "<div class=\"hints\">"
str += "\n"
for h in p.hint_set.all():
str += "<p>"
str += h.hint
str += "</p>"
str += "\n"
str += "</div>"+"\n"
if "spin" in q.question:
str += "</div>"
str += "</div>"+"\n"+"</div>"+"\n"+"</body>"+"\n"+"</html>"+"\n"
destination.write(bytes(str))
destination.close()
context = Context({
'p':p,
'title':p.title,
'question':q,
'solution':s,
# 'choice':c,
# 'hint':h
})
return render_to_response('ka_details.html', context)
#@login_required
def simple_details(request, problem_id):
p = get_object_or_404(Problem, id=problem_id)
q = ProblemTemplate.objects.get(problem = p)
# v = Variable.objects.get(problem = p)
s = Answer.objects.get(problem = p)
# c = Choice.objects.get(problem = p)
h = p.hint_set.all()
destination = open("/home/OpenDSA/exercises/"+p.title+".html", 'wb+')
str ="<!DOCTYPE html><html data-require=\"math math-format word-problems spin\"><head>"+"\n"+"<title>"+"\n"+p.title+"</title>"+"\n"+"<script src=\"../../lib/jquery.min.js\">"+"\n"+"</script>"+"\n"+"<script src=\"../../lib/jquery-ui.min.js\">"+"\n"+"</script>"+"\n"+"<script>urlBaseOverride = \"../../ODSAkhan-exercises/\";</script>"+"\n"+"<script src=\"../../lib/khan-exercise-min.js\">"+"\n"+"</script>"+"\n"+"</head><body><div class=\"exercise\"><div class=\"vars\"></div><div class=\"problems\"><div id=\"problem-type-or-description\"><p class=\"question\">"
str += q.question
str += "</p>"
str += "<div class=\"solution\"><var>\""
str += s.solution
str += "\"</var></div>"
str += "<ul class =\"choices\">"
for c in p.choice_set.all():
str += "<li><var>\""
str += c.choice
str += "\"</var></li>"
str += "</ul>"
str += "<div class=\"hints\">"
for h in p.hint_set.all():
str += "<p>\""
str += h.hint
str += "\"</p>"
str += "</div>"
str += "</div></div></div></body>"+"\n"+"<script type=\"text/javascript\"src=\"http://cdn.mathjax.org/mathjax/1.1-latest/MathJax.js?config=http://algoviz.org/OpenDSA/ODSAkhan-exercises/KAthJax-77111459c7d82564a705f9c5480e2c88.js\">"+"\n"+"</script>"+"</html>"
destination.write(bytes(str))
destination.close()
context = Context({
'p':p,
'title':p.title,
'question':q,
'solution':s,
# 'choice':c,
# 'hint':h
})
return render_to_response('simple_details.html', context)
def edit_ka(request, problem_id):
problem = get_object_or_404(Problem, id=problem_id)
class RequiredFormSet(BaseFormSet):
def __init__(self, *args, **kwargs):
super(RequiredFormSet, self).__init__(*args, **kwargs)
for form in self.forms:
form.empty_permitted = True
class MyInline(BaseInlineFormSet):
def __init__(self, *args, **kwargs):
super(MyInline, self).__init__(*args, **kwargs)
self.can_delete = False
maxpt = max(0, len(ProblemTemplate.objects.filter(problem=problem)))
ProblemTemplateInlineFormSet = inlineformset_factory(Problem, ProblemTemplate, max_num=maxpt)
maxa = max(0, len(Answer.objects.filter(problem=problem)))
AnswerInlineFormSet = inlineformset_factory(Problem, Answer, max_num =maxa)
maxv = max(0, len(Variable.objects.filter(problem=problem)))
VariableInlineFormSet = inlineformset_factory(Problem, Variable, max_num=maxv)
maxc = max(0, len(CommonIntroduction.objects.filter(problem=problem)))
CommonIntroductionFormSet = inlineformset_factory(Problem, CommonIntroduction, max_num =maxc )
maxch = max(0, len(Choice.objects.filter(problem=problem)))
ChoiceInlineFormSet = inlineformset_factory(Problem, Choice, max_num=maxch, formset=MyInline)
maxh = max(0, len(Hint.objects.filter(problem=problem)))
HintInlineFormSet = inlineformset_factory(Problem, Hint, max_num=maxh)
maxs = max(0, len(Script.objects.filter(problem=problem)))
ScriptInlineFormSet = inlineformset_factory(Problem, Script, max_num=maxs)
if request.method == 'POST':
problem_form =ProblemForm(request.POST, instance=problem)
problem_template_formset = ProblemTemplateInlineFormSet(request.POST,request.FILES, instance=problem, prefix='templates')
common_introduction_formset = CommonIntroductionForm(request.POST, request.FILES, prefix='common_intro', instance =problem)
answer_formset = AnswerInlineFormSet(request.POST, instance=problem, prefix='answer')
hint_formset = HintInlineFormSet(request.POST, request.FILES, instance=problem, prefix='hints')
choice_formset = ChoiceInlineFormSet(request.POST, request.FILES, instance=problem, prefix='choices')
script_formset = ScriptInlineFormSet(request.POST, request.FILES, instance=problem, prefix='scripts')
variable_formset = VariableInlineFormSet(request.POST, request.FILES,instance=problem, prefix='variables')
if problem_form.is_valid() and variable_formset.is_valid() and problem_template_formset.is_valid() and choice_formset.is_valid() and hint_formset.is_valid() and answer_formset.is_valid() and script_formset.is_valid() and common_introduction_formset.is_valid() :
problem = problem_form.save()
answer_formset.save(commit = False)
common_introduction_formset.save(commit = False)
problem_template_formset.save(commit =False)
variable_formset.save(commit = False)
for form in hint_formset.forms:
hint = form.save(commit = False)
hint.problem = problem
hint.save()
for form in script_formset.forms:
script = form.save(commit = False)
script.problem = problem
script.save()
for form in choice_formset.forms:
choice = form.save(commit = False)
choice.problem = problem
choice.save()
return HttpResponseRedirect('/qbank/problems/')
else:
problem_form = ProblemForm(instance=problem)
problem_template_formset = ProblemTemplateInlineFormSet( instance=problem, prefix='templates')
choice_formset = ChoiceInlineFormSet(instance=problem, prefix='choices')
answer_formset = AnswerInlineFormSet(instance=problem, prefix='answer')
script_formset = ScriptInlineFormSet(instance=problem, prefix='scripts')
variable_formset = VariableInlineFormSet(instance=problem, prefix='variables')
common_introduction_formset = CommonIntroductionFormSet(instance=problem, prefix='common_intro')
hint_formset = HintInlineFormSet( instance=problem, prefix='hints')
c = {
'problem_form' : problem_form,
'choice_formset' : choice_formset,
'problem_template_formset' :problem_template_formset,
'answer_formset': answer_formset,
'variable_formset' : variable_formset,
'script_formset' : script_formset,
'common_introduction_formset' : common_introduction_formset,
'hint_formset' : hint_formset,
}
c.update(csrf(request))
return render_to_response('edit.html', c)
def edit_simple(request, problem_id):
problem = get_object_or_404(Problem, id=problem_id)
class RequiredFormSet(BaseFormSet):
def __init__(self, *args, **kwargs):
super(RequiredFormSet, self).__init__(*args, **kwargs)
for form in self.forms:
form.empty_permitted = True
class MyInline(BaseInlineFormSet):
def __init__(self, *args, **kwargs):
super(MyInline, self).__init__(*args, **kwargs)
self.can_delete = False
maxpt = max(0, len(ProblemTemplate.objects.filter(problem=problem)))
ProblemTemplateInlineFormSet = inlineformset_factory(Problem, ProblemTemplate, max_num=maxpt)
maxa = max(0, len(Answer.objects.filter(problem=problem)))
AnswerInlineFormSet = inlineformset_factory(Problem, Answer, max_num =maxa)
maxch = max(0, len(Choice.objects.filter(problem=problem)))
ChoiceInlineFormSet = inlineformset_factory(Problem, Choice, max_num=maxch, formset=MyInline)
maxh = max(0, len(Hint.objects.filter(problem=problem)))
HintInlineFormSet = inlineformset_factory(Problem, Hint, max_num=maxh)
if request.method == 'POST':
problem_form =ProblemForm(request.POST, instance=problem)
problem_template_formset = ProblemTemplateInlineFormSet(request.POST,request.FILES, instance=problem, prefix='templates')
answer_formset = AnswerInlineFormSet(request.POST, instance=problem, prefix='answer')
hint_formset = HintInlineFormSet(request.POST, request.FILES, instance=problem, prefix='hints')
choice_formset = ChoiceInlineFormSet(request.POST, request.FILES, instance=problem, prefix='choices')
if problem_form.is_valid() and problem_template_formset.is_valid() and choice_formset.is_valid() and hint_formset.is_valid() and answer_formset.is_valid():
problem = problem_form.save()
answer_formset.save(commit = False)
problem_template_formset.save(commit =False)
for form in hint_formset.forms:
hint = form.save(commit = False)
hint.problem = problem
hint.save()
for form in choice_formset.forms:
choice = form.save(commit = False)
choice.problem = problem
choice.save()
return HttpResponseRedirect('/qbank/problems/')
else:
problem_form = ProblemForm(instance=problem)
problem_template_formset = ProblemTemplateInlineFormSet( instance=problem, prefix='templates')
choice_formset = ChoiceInlineFormSet(instance=problem, prefix='choices')
answer_formset = AnswerInlineFormSet(instance=problem, prefix='answer')
hint_formset = HintInlineFormSet( instance=problem, prefix='hints')
c = {
'problem_form' : problem_form,
'choice_formset' : choice_formset,
'problem_template_formset' :problem_template_formset,
'answer_formset': answer_formset,
'hint_formset' : hint_formset,
}
c.update(csrf(request))
return render_to_response('simple.html', c)
def edit_list(request, problem_id):
problem = get_object_or_404(Problem, id=problem_id)
class RequiredFormSet(BaseFormSet):
def __init__(self, *args, **kwargs):
super(RequiredFormSet, self).__init__(*args, **kwargs)
for form in self.forms:
form.empty_permitted = True
class MyInline(BaseInlineFormSet):
def __init__(self, *args, **kwargs):
super(MyInline, self).__init__(*args, **kwargs)
self.can_delete = False
maxpt = max(0, len(ProblemTemplate.objects.filter(problem=problem)))
ProblemTemplateInlineFormSet = inlineformset_factory(Problem, ProblemTemplate, max_num=maxpt)
maxa = max(0, len(Answer.objects.filter(problem=problem)))
AnswerInlineFormSet = inlineformset_factory(Problem, Answer, max_num =maxa)
maxv = max(0, len(Variable.objects.filter(problem=problem)))
VariableInlineFormSet = inlineformset_factory(Problem, Variable, max_num=maxv)
maxh = max(0, len(Hint.objects.filter(problem=problem)))
HintInlineFormSet = inlineformset_factory(Problem, Hint, max_num=maxh)
if request.method == 'POST':
problem_form =ProblemForm(request.POST, instance=problem)
problem_template_formset = ProblemTemplateInlineFormSet(request.POST,request.FILES, instance=problem, prefix='templates')
answer_formset = AnswerInlineFormSet(request.POST, instance=problem, prefix='answer')
hint_formset = HintInlineFormSet(request.POST, request.FILES, instance=problem, prefix='hints')
variable_formset = VariableInlineFormSet(request.POST, request.FILES,instance=problem, prefix='variables')
if problem_form.is_valid() and variable_formset.is_valid() and problem_template_formset.is_valid() and hint_formset.is_valid() and answer_formset.is_valid():
problem = problem_form.save()
answer_formset.save(commit = False)
problem_template_formset.save(commit =False)
variable_formset.save(commit = False)
for form in hint_formset.forms:
hint = form.save(commit = False)
hint.problem = problem
hint.save()
return HttpResponseRedirect('/qbank/problems/')
else:
problem_form = ProblemForm(instance=problem)
problem_template_formset = ProblemTemplateInlineFormSet( instance=problem, prefix='templates')
answer_formset = AnswerInlineFormSet(instance=problem, prefix='answer')
variable_formset = VariableInlineFormSet(instance=problem, prefix='variables')
hint_formset = HintInlineFormSet( instance=problem, prefix='hints')
c = {
'problem_form' : problem_form,
'problem_template_formset' :problem_template_formset,
'answer_formset': answer_formset,
'variable_formset' : variable_formset,
'hint_formset' : hint_formset,
}
c.update(csrf(request))
return render_to_response('list.html', c, context_instance=RequestContext(request))
def edit_range(request, problem_id):
problem = get_object_or_404(Problem, id=problem_id)
class RequiredFormSet(BaseFormSet):
def __init__(self, *args, **kwargs):
super(RequiredFormSet, self).__init__(*args, **kwargs)
for form in self.forms:
form.empty_permitted = True
class MyInline(BaseInlineFormSet):
def __init__(self, *args, **kwargs):
super(MyInline, self).__init__(*args, **kwargs)
self.can_delete = False
maxpt = max(0, len(ProblemTemplate.objects.filter(problem=problem)))
ProblemTemplateInlineFormSet = inlineformset_factory(Problem, ProblemTemplate, max_num=maxpt)
maxa = max(0, len(Answer.objects.filter(problem=problem)))
AnswerInlineFormSet = inlineformset_factory(Problem, Answer, max_num =maxa)
maxv = max(0, len(Variable.objects.filter(problem=problem)))
VariableInlineFormSet = inlineformset_factory(Problem, Variable, max_num=maxv)
if request.method == 'POST':
problem_form =ProblemForm(request.POST, instance=problem)
problem_template_formset = ProblemTemplateInlineFormSet(request.POST,request.FILES, instance=problem, prefix='templates')
answer_formset = AnswerInlineFormSet(request.POST, instance=problem, prefix='answer')
variable_formset = VariableInlineFormSet(request.POST, request.FILES, prefix='variables', instance=problem)
if problem_form.is_valid() and variable_formset.is_valid() and problem_template_formset.is_valid() and answer_formset.is_valid():
problem = problem_form.save()
answer_formset.save(commit = False)
problem_template_formset.save(commit =False)
for form in variable_formset.forms:
variable = form.save(commit = False)
variable.problem = problem
variable.save()
return HttpResponseRedirect('/qbank/problems/')
else:
problem_form = ProblemForm(instance=problem)
problem_template_formset = ProblemTemplateInlineFormSet(instance=problem, prefix='templates')
answer_formset = AnswerInlineFormSet(instance=problem, prefix='answer')
variable_formset = VariableInlineFormSet(instance=problem, prefix='variables')
c = {
'problem_form' : problem_form,
'problem_template_formset' :problem_template_formset,
'answer_formset': answer_formset,
'variable_formset' : variable_formset,
}
c.update(csrf(request))
return render_to_response('range.html', c)
def edit_summative(request, problem_id):
problem = get_object_or_404(Problem, id=problem_id)
class RequiredFormSet(BaseFormSet):
def __init__(self, *args, **kwargs):
super(RequiredFormSet, self).__init__(*args, **kwargs)
for form in self.forms:
form.empty_permitted = True
class MyInline(BaseInlineFormSet):
def __init__(self, *args, **kwargs):
super(MyInline, self).__init__(*args, **kwargs)
self.can_delete = False
problems = Problem.objects.all()
maxpt = max(0, len(ProblemTemplate.objects.filter(problem=problem)))
ProblemTemplateInlineFormSet = inlineformset_factory(Problem, ProblemTemplate, max_num=maxpt)
maxc = max(0, len(CommonIntroduction.objects.filter(problem=problem)))
CommonIntroductionFormSet = inlineformset_factory(Problem, CommonIntroduction, max_num =maxc )
if request.method == 'POST':
problem_form =ProblemForm(request.POST, instance=problem)
problem_template_formset = ProblemTemplateInlineFormSet(request.POST,request.FILES, prefix='templates', instance=problem )
common_introduction_formset = CommonIntroductionFormSet(request.POST, request.FILES, prefix='common_intro', instance =problem)
if problem_form.is_valid() and problem_template_formset.is_valid() and common_introduction_formset.is_valid() :
problem = problem_form.save()
common_introduction_formset.save(commit = False)
for form in problem_template_formset.forms:
problem_template = form.save(commit = False)
problem_template.problem = problem
problem_template.save()
return HttpResponseRedirect('/qbank/problems/')
else:
problem_form = ProblemForm(instance=problem)
problem_template_formset = ProblemTemplateInlineFormSet(instance=problem, prefix='templates')
common_introduction_formset = CommonIntroductionFormSet(instance=problem, prefix='common_intro')
c = {
'problem_form' : problem_form,
'problem_template_formset' :problem_template_formset,
'common_introduction_formset' : common_introduction_formset,
'problems' : problems,
}
c.update(csrf(request))
return render_to_response('summative.html', c)
#@login_required
def summative_details(request, problem_id):
p = get_object_or_404(Problem, id=problem_id)
q = ProblemTemplate.objects.filter(problem = p)
# v = Variable.objects.get(problem = p)
# s = Answer.objects.get(problem = p)
# c = Choice.objects.get(problem = p)
# h = p.hint_set.all()
destination = open('/home/OpenDSA/exercises/'+p.title+'.html', 'wb+')
str ="<!DOCTYPE html>"+"\n"+"<html data-require=\"math math-format word-problems spin\">"+"\n"+"<head>"+"\n"+"<title>"+"\n"+p.title+"</title>"+"\n"+"<script src=\"../../lib/jquery.min.js\">"+"\n"+"</script>"+"\n"+"<script src=\"../../lib/jquery-ui.min.js\">"+"\n"+"</script>"+"\n"+"<script>urlBaseOverride = \"../../ODSAkhan-exercises/\";</script>"+"\n"+"<script src=\"../../lib/khan-exercise-min.js\">"+"\n"+"</script>"+"\n"+"</head>"+"\n"+"<body>"+"\n"
for c in p.problemtemplate_set.all():
str += "<div class=\"exercise\" data-name=\"/qbank/exercises/"
str += c.question
str += "\">"
str += "</div>"+"\n"
str += "</body>"+"\n"+"<script type=\"text/javascript\"src=\"http://cdn.mathjax.org/mathjax/1.1-latest/MathJax.js?config=http://algoviz.org/OpenDSA/ODSAkhan-exercises/KAthJax-77111459c7d82564a705f9c5480e2c88.js\">"+"\n"+"</script>"+"</html>"
destination.write(bytes(str))
destination.close()
context = Context({
'p':p,
'title':p.title,
'question':q,
# 'solution':s,
# 'choice':c,
# 'hint':h
})
return render_to_response('summative_details.html', context)
#@login_required
def ka_gen(request, problem_id):
p = get_object_or_404(Problem, id=problem_id)
q = ProblemTemplate.objects.filter(problem = p)
# v = Variable.objects.get(problem = p)
# s = Answer.objects.get(problem = p)
# c = Choice.objects.get(problem = p)
# h = p.hint_set.all()
destination = open('/home/OpenDSA/temp/'+p.title+'_View.html', 'wb+')
str ="<!DOCTYPE html>"+"\n"+"<html data-require=\"math math-format word-problems spin\">"+"\n"+"<head>"+"\n"+"<title>"+"\n"+p.title+"</title>"+"\n"+"<script src=\"../../lib/jquery.min.js\">"+"\n"+"</script>"+"\n"+"<script src=\"../../lib/jquery-ui.min.js\">"+"\n"+"</script>"+"\n"+"<script>urlBaseOverride = \"../../ODSAkhan-exercises/\";</script>"+"\n"+"<script src=\"../../lib/khan-exercise-min.js\">"+"\n"+"</script>"+"\n"+"</head>"+"\n"+"<body>"+"\n"
str += "<div class=\"exercise\" data-name=\"/qbank/exercises/"
str += p.title
str += "\">"
str += "\n"+"</div>"
str +="</body>"+"\n"+"<script type=\"text/javascript\" src=\"http://cdn.mathjax.org/mathjax/1.1-latest/MathJax.js?config=http://algoviz.org/OpenDSA/ODSAkhan-exercises/KAthJax-77111459c7d82564a705f9c5480e2c88.js\">"+"</script>"+"</html>"
destination.write(bytes(str))
destination.close()
context = Context({
'p':p,
'title':p.title,
'question':q,
# 'solution':s,
# 'choice':c,
# 'hint':h
})
return render_to_response('ka_gen.html', context)
#@login_required
def range_details(request, problem_id):
p = get_object_or_404(Problem, id=problem_id)
q = ProblemTemplate.objects.get(problem = p)
# v = Variable.objects.get(problem = p)
s = Answer.objects.get(problem = p)
# c = Choice.objects.get(problem = p)
h = p.hint_set.all()
destination = open('/home/OpenDSA/exercises/'+p.title+'.html', 'wb+')
str ="<!DOCTYPE html>"+"\n"+"<html data-require=\"math math-format word-problems spin\">"+"\n"+"<head>"+"\n"+"<title>"+"\n"+p.title+"</title>"+"\n"+"<script src=\"../../lib/jquery.min.js\">"+"\n"+"</script>"+"\n"+"<script src=\"../../lib/jquery-ui.min.js\">"+"\n"+"</script>"+"\n"+"<script>urlBaseOverride = \"../../ODSAkhan-exercises/\";</script>"+"\n"+"<script src=\"../../lib/khan-exercise-min.js\">"+"\n"+"</script>"+"\n"+"</head>"+"\n"+"<body>"+"\n"+"<div class=\"exercise\"><div class=\"vars\">"
for t in p.variable_set.all():
str +="<var id=\""
str += t.var_id
str += "\">"
str += t.var_value
str += "</var>"+"\n"
str += "</div>"+"\n"+"<div class=\"problems\"> "+"\n"+"<div id=\"problem-type-or-description\">"+"\n"+"<p class=\"problem\">"+"\n"+"<p class=\"question\">"
str += q.question
str += "</p>"+"\n"+"<div class=\"solution\""
if "log" in q.question:
str += "data-forms=\"log\""
str += ">"+"\n"+"<var>"
str += s.solution
str += "</var>"+"\n"+"</div>"+"\n"+"</div>"+"\n"+"</div>"+"\n"+"</div>"+"\n"+"</body>"+"\n"+"<script type=\"text/javascript\"src=\"http://cdn.mathjax.org/mathjax/1.1-latest/MathJax.js?config=http://algoviz.org/OpenDSA/ODSAkhan-exercises/KAthJax-77111459c7d82564a705f9c5480e2c88.js\">"+"\n"+"</script>"+"</html>"
destination.write(bytes(str))
destination.close()
context = Context({
'p':p,
'title':p.title,
'question':q,
'solution':s,
# 'choice':c,
# 'hint':h
})
return render_to_response('range_details.html', context)
#@login_required
def list_details(request, problem_id):
p = get_object_or_404(Problem, id=problem_id)
q = ProblemTemplate.objects.get(problem = p)
# v = Variable.objects.get(problem = p)
s = Answer.objects.get(problem = p)
# c = Choice.objects.get(problem = p)
h = p.hint_set.all()
destination = open('/home/OpenDSA/exercises/'+p.title+'.html', 'wb+')
str ="<!DOCTYPE html>"+"\n"+"<html data-require=\"math math-format word-problems spin\">"+"\n"+"<head>"+"\n"+"<title>"+"\n"+p.title+"</title>"+"\n"+"<script src=\"../../lib/jquery.min.js\">"+"\n"+"</script>"+"\n"+"<script src=\"../../lib/jquery-ui.min.js\">"+"\n"+"</script>"+"\n"+"<script>urlBaseOverride = \"../../ODSAkhan-exercises/\";</script>"+"\n"+"<script src=\"../../lib/khan-exercise-min.js\">"+"\n"+"</script>"+"\n"+"</head>"+"\n"+"<body>"+"\n"+"<div class=\"exercise\">"+"\n"+"<div class=\"vars\">"+"\n"
solution_list = (s.solution).split(",")
index = 1
ans_uniq = []
for t in solution_list:
if t not in ans_uniq:
ans_uniq.append(t)
j = "A%d" %index
str += "<var id=\""
str += j
str += "\">"+t
str += "</var>"+"\n"
index = index +1
count = 0
var_count_array = []
for t in p.variable_set.all():
str +="<var id=\""
str += t.var_id
str += "\">["
str += t.var_value
str += "]</var>"+"\n"
j = "x%d" %(count+1)
var_elements = (t.var_value).split(",")
var_count_array.append(len(var_elements))
str += "<var id=\""
str += j
str += "\">randRange(0,%d" %(len(var_elements) -1)
str += ")</var>"+"\n"
count = count+1
eq = "x%d" % len(var_count_array)
var_count = count -1
coef = 1
while (var_count>0):
coef = coef * var_count_array[var_count]
var = "%d" %coef
var += "*x"
var += "%d" %var_count
eq = var +"+"+eq
var_count = var_count-1
str += "<var id =\"INDEX\">"
str += eq
str += "</var>"+"\n"
str += "<var id=\"ANSWER\">["
str += s.solution
str += "]</var>"+"\n"
str += "</div>"+"\n"+"<div class=\"problems\"> "+"\n"+"<div id=\"problem-type-or-description\">"+"\n"+"<p class=\"problem\">"+"\n"+"<p class=\"question\">"
str += q.question
str += "</p>"+"\n"+"<div class=\"solution\"><var>ANSWER[INDEX]</var>"+"\n"+"</div>"+"\n"+"<ul class =\"choices\" data-category=\"true\">"
num=1
answer_unique = []
for t in solution_list:
if t not in answer_unique:
answer_unique.append(t)
str += "<li><var>"
str += "A%d" %num
num = num +1
str += "</var></li>"
str += "</ul>"
str += "<div class=\"hints\">"
for h in p.hint_set.all():
str += "<p>\""
str += h.hint
str += "\"</p>"
str += "</div>"
str += "</div>"+"\n"+"</div>"+"\n"+"</div>"+"\n"+"</body>"+"\n"+"<script type=\"text/javascript\"src=\"http://cdn.mathjax.org/mathjax/1.1-latest/MathJax.js?config=http://algoviz.org/OpenDSA/ODSAkhan-exercises/KAthJax-77111459c7d82564a705f9c5480e2c88.js\">"+"\n"+"</script>"+"</html>"
destination.write(bytes(str))
destination.close()
context = Context({
'p':p,
'title':p.title,
'question':q,
'solution':s,
# 'choice':c,
# 'hint':h
})
return render_to_response('list_details.html', context, context_instance=RequestContext(request))
#@login_required
def write_file(request, problem_id):
response = HttpResponse( content_type = 'text/csv')
p = get_object_or_404(Problem, id=problem_id)
response['Content-Disposition'] = 'attachment; filename="'+p.title+'.csv"'
writer = csv.writer(response)
problems = Problem.objects.filter(id = problem_id)
for p in problems:
writer.writerow(['TITLE',p.title])
writer.writerow(['DIFFICULTY',p.difficulty_level])
q = ProblemTemplate.objects.filter(problem = p)
for t in q:
writer.writerow(['QUESTION', t.question])
for t in p.variable_set.all():
writer.writerow(['VAR_NAME', t.var_id])
writer.writerow(['VAR_VALUE', t.var_value])
writer.writerow(['ATTR_INFO',t.attribute])
for t in p.script_set.all():
writer.writerow(['SCRIPT',t.script])
for t in p.choice_set.all():
writer.writerow(['CHOICE', t.choice])
for t in p.hint_set.all():
writer.writerow(['HINT', t.hint])
try:
c = CommonIntroduction.objects.get(problem = p)
writer.writerow(['COMMON_INTRO', c.common_intro])
except CommonIntroduction.DoesNotExist:
c = None
try:
s = Answer.objects.get(problem = p)
writer.writerow(['SOLUTION', s.solution])
except Answer.DoesNotExist:
s = None
return response
#@login_required
def delete(request, problem_id):
p= Problem.objects.get(id = problem_id)
p.delete()
return HttpResponseRedirect('/qbank/problems/')
#@login_required
def simple(request):
# This class is used to make empty formset forms required
# See http://stackoverflow.com/questions/2406537/django-formsets-make-first-required/4951032#4951032
class RequiredFormSet(BaseFormSet):
def __init__(self, *args, **kwargs):
super(RequiredFormSet, self).__init__(*args, **kwargs)
for form in self.forms:
form.empty_permitted = True
HintFormSet = formset_factory(HintForm, max_num = 10, formset = RequiredFormSet)
ChoiceFormSet = formset_factory(ChoiceForm, max_num = 10, formset = RequiredFormSet)
if request.method == 'POST': # If the form has been submitted...
problem_form = SimpleProblemForm(request.POST)
problem_template_form = ProblemTemplateForm(request.POST, prefix='template')
answer_form = AnswerForm(request.POST, prefix='answer')
hint_formset = HintFormSet(request.POST, request.FILES, prefix='hints')
choice_formset = ChoiceFormSet(request.POST, request.FILES, prefix='choices')
if problem_form.is_valid() and problem_template_form.is_valid() and choice_formset.is_valid() and hint_formset.is_valid() and answer_form.is_valid():
problem = problem_form.save()
problem_template = problem_template_form.save(commit = False)
problem_template.problem = problem
problem_template.save()
answer = answer_form.save(commit = False)
answer.problem = problem
answer.save()
for form in hint_formset.forms:
hint = form.save(commit = False)
hint.problem = problem
hint.save()
for form in choice_formset.forms:
choice = form.save(commit = False)
choice.problem = problem
choice.save() # Redirect to a 'success' page
return HttpResponseRedirect('/qbank/problems/')
else:
problem_form = SimpleProblemForm()
choice_formset = ChoiceFormSet(prefix='choices')
problem_template_form = ProblemTemplateForm(prefix='template')
answer_form = AnswerForm(prefix='answer')
hint_formset = HintFormSet(prefix='hints')
c = {'problem_form' : problem_form,
'choice_formset' : choice_formset,
'problem_template_form' : problem_template_form,
'answer_form': answer_form,
'hint_formset' : hint_formset,
}
c.update(csrf(request))
return render_to_response('simple.html', c)
#@login_required
def list(request):
# This class is used to make empty formset forms required
# See http://stackoverflow.com/questions/2406537/django-formsets-make-first-required/4951032#4951032
class RequiredFormSet(BaseFormSet):
def __init__(self, *args, **kwargs):
super(RequiredFormSet, self).__init__(*args, **kwargs)
for form in self.forms:
form.empty_permitted = True
VariableFormSet = formset_factory(VariableForm, max_num = 10, formset = RequiredFormSet)
HintFormSet = formset_factory(HintForm, max_num = 10, formset = RequiredFormSet)
#ChoiceFormSet = formset_factory(ChoiceForm, max_num = 10, formset = RequiredFormSet)
if request.method == 'POST': # If the form has been submitted...
problem_form = ListProblemForm(request.POST)
problem_template_form = ProblemTemplateForm(request.POST, prefix='template')
answer_form = AnswerForm(request.POST, prefix='answer')
variable_formset = VariableFormSet(request.POST,request.FILES, prefix='variables')
hint_formset = HintFormSet(request.POST, request.FILES, prefix='hints')
#choice_formset = ChoiceFormSet(request.POST, request.FILES, prefix='choices')
if problem_form.is_valid() and problem_template_form.is_valid() and variable_formset.is_valid() and hint_formset.is_valid() and answer_form.is_valid():
problem = problem_form.save()
problem_template = problem_template_form.save(commit = False)
problem_template.problem = problem
problem_template.save()
answer = answer_form.save(commit = False)
answer.problem = problem
answer.save()
for form in variable_formset.forms:
variable = form.save(commit = False)
variable.problem = problem
variable.save() # Redirect to a 'success' page
for form in hint_formset.forms:
hint = form.save(commit = False)
hint.problem = problem
hint.save()
return HttpResponseRedirect('/qbank/problems/')
else:
problem_form = ListProblemForm()
problem_template_form = ProblemTemplateForm(prefix='template')
answer_form = AnswerForm(prefix='answer')
variable_formset = VariableFormSet(prefix='variables')
#choice_formset = ChoiceFormSet(prefix='choices')
hint_formset = HintFormSet(prefix='hints')
c = {'problem_form' : problem_form,
'problem_template_form' : problem_template_form,
'answer_form': answer_form,
'variable_formset' : variable_formset,
#'choice_formset': choice_formset,
'hint_formset' : hint_formset,
}
c.update(csrf(request))
return render_to_response('list.html', c)
#@login_required
def range(request):
# This class is used to make empty formset forms required
# See http://stackoverflow.com/questions/2406537/django-formsets-make-first-required/4951032#4951032
class RequiredFormSet(BaseFormSet):
def __init__(self, *args, **kwargs):
super(RequiredFormSet, self).__init__(*args, **kwargs)
for form in self.forms:
form.empty_permitted = True
VariableFormSet = formset_factory(VariableForm, max_num = 10, formset = RequiredFormSet)
if request.method == 'POST': # If the form has been submitted...
problem_form = RangeProblemForm(request.POST)
problem_template_form = ProblemTemplateForm(request.POST, prefix='template')
answer_form = AnswerForm(request.POST, prefix='answer')
variable_formset = VariableFormSet(request.POST,request.FILES, prefix='variables')
if problem_form.is_valid() and problem_template_form.is_valid() and variable_formset.is_valid() and answer_form.is_valid():
problem = problem_form.save()
problem_template = problem_template_form.save(commit = False)
problem_template.problem = problem
problem_template.save()
answer = answer_form.save(commit = False)
answer.problem = problem
answer.save()
for form in variable_formset.forms:
variable = form.save(commit = False)
variable.problem = problem
variable.save() # Redirect to a 'success' page
return HttpResponseRedirect('/qbank/problems/')
else:
problem_form = RangeProblemForm()
problem_template_form = ProblemTemplateForm(prefix='template')
answer_form = AnswerForm(prefix='answer')
variable_formset = VariableFormSet(prefix='variables')
c = {'problem_form' : problem_form,
'problem_template_form' : problem_template_form,
'answer_form': answer_form,
'variable_formset' : variable_formset,
}
c.update(csrf(request))
return render_to_response('range.html', c)
def summative(request):
# This class is used to make empty formset forms required
# See http://stackoverflow.com/questions/2406537/django-formsets-make-first-required/4951032#4951032
class RequiredFormSet(BaseFormSet):
def __init__(self, *args, **kwargs):
super(RequiredFormSet, self).__init__(*args, **kwargs)
for form in self.forms:
form.empty_permitted = True
problems = Problem.objects.all()
ProblemTemplateFormSet = formset_factory(ProblemTemplateForm, max_num = 10, formset = RequiredFormSet)
if request.method == 'POST': # If the form has been submitted...
problem_form = SummativeProblemForm(request.POST)
common_introduction_form = CommonIntroductionForm(request.POST, prefix='common_intro')
problem_template_formset = ProblemTemplateFormSet(request.POST, request.FILES, prefix='templates')
if problem_form.is_valid() and common_introduction_form.is_valid() and problem_template_formset.is_valid():
problem = problem_form.save()
common_intro = common_introduction_form.save(commit=False)
common_intro.problem = problem
common_intro.save()
for form in problem_template_formset.forms:
problem_template = form.save(commit=False)
problem_template.problem = problem
problem_template.save() # Redirect to a 'success' page
return HttpResponseRedirect('/qbank/problems/')
else:
problem_form = SummativeProblemForm()
common_introduction_form = CommonIntroductionForm(prefix='common_intro')
problem_template_formset = ProblemTemplateFormSet(prefix='templates')
c = {'problem_form' : problem_form,
'common_introduction_form' : common_introduction_form,
'problem_template_formset' : problem_template_formset,
'problems':problems,
}
c.update(csrf(request))
return render_to_response('summative.html', c)
def d(request, problem_id):
p = get_object_or_404(Problem, id=problem_id)
file_path = "/home/OpenDSA/exercises/"+p.title+".html"
try:
file_wrapper = FileWrapper(file(file_path,'rb'))
except:
return HttpResponseRedirect('ka_error/')
file_mimetype = mimetypes.guess_type(file_path)
response = HttpResponse(file_wrapper, content_type=file_mimetype)
response['X-Sendfile'] = file_path
response['Content-Length'] = os.stat(file_path).st_size
response['Content-Disposition'] = 'attachment; filename=%s/' % smart_str(p.title)
return response
def server_error(request, template_name = '500.html'):
return render_to_response(template_name,context_instance = RequestContext(request))
| 37.544782
| 638
| 0.715727
| 5,682
| 45,692
| 5.579021
| 0.05051
| 0.037981
| 0.032461
| 0.023943
| 0.888139
| 0.868896
| 0.850126
| 0.838801
| 0.832429
| 0.814858
| 0
| 0.008974
| 0.124464
| 45,692
| 1,216
| 639
| 37.575658
| 0.783427
| 0.047054
| 0
| 0.764838
| 0
| 0
| 0.120773
| 0.016839
| 0.033595
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.016797
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
95f003e80e1eb39d8a08d62776abc4b0ed16320a
| 62,530
|
py
|
Python
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 64
|
2016-10-20T15:47:18.000Z
|
2021-11-11T11:57:32.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 126
|
2016-10-05T10:36:14.000Z
|
2019-05-15T08:43:23.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 63
|
2016-11-07T15:23:08.000Z
|
2021-09-22T14:41:16.000Z
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import state
from . import router_lsa
from . import network_lsa
from . import summary_lsa
from . import as_external_lsa
from . import nssa_external_lsa
from . import opaque_lsa
class lsa(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa-types/lsa-type/lsas/lsa. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: List of the LSAs of a specified type in the
LSDB for the specified area
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__link_state_id",
"__state",
"__router_lsa",
"__network_lsa",
"__summary_lsa",
"__as_external_lsa",
"__nssa_external_lsa",
"__opaque_lsa",
)
_yang_name = "lsa"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__link_state_id = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="link-state-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=False,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__router_lsa = YANGDynClass(
base=router_lsa.router_lsa,
is_container="container",
yang_name="router-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__network_lsa = YANGDynClass(
base=network_lsa.network_lsa,
is_container="container",
yang_name="network-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__summary_lsa = YANGDynClass(
base=summary_lsa.summary_lsa,
is_container="container",
yang_name="summary-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__as_external_lsa = YANGDynClass(
base=as_external_lsa.as_external_lsa,
is_container="container",
yang_name="as-external-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__nssa_external_lsa = YANGDynClass(
base=nssa_external_lsa.nssa_external_lsa,
is_container="container",
yang_name="nssa-external-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__opaque_lsa = YANGDynClass(
base=opaque_lsa.opaque_lsa,
is_container="container",
yang_name="opaque-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"ospfv2",
"areas",
"area",
"lsdb",
"lsa-types",
"lsa-type",
"lsas",
"lsa",
]
def _get_link_state_id(self):
"""
Getter method for link_state_id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/link_state_id (leafref)
YANG Description: Reference to the Link State ID of the LSA
"""
return self.__link_state_id
def _set_link_state_id(self, v, load=False):
"""
Setter method for link_state_id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/link_state_id (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_link_state_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_link_state_id() directly.
YANG Description: Reference to the Link State ID of the LSA
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError(
"Cannot set keys directly when" + " within an instantiated list"
)
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="link-state-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """link_state_id must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="link-state-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)""",
}
)
self.__link_state_id = t
if hasattr(self, "_set"):
self._set()
def _unset_link_state_id(self):
self.__link_state_id = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="link-state-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=False,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/state (container)
YANG Description: Operational state parameters relating to all
LSA types
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: Operational state parameters relating to all
LSA types
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_router_lsa(self):
"""
Getter method for router_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/router_lsa (container)
YANG Description: Contents of the router LSA
"""
return self.__router_lsa
def _set_router_lsa(self, v, load=False):
"""
Setter method for router_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/router_lsa (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_router_lsa is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_router_lsa() directly.
YANG Description: Contents of the router LSA
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=router_lsa.router_lsa,
is_container="container",
yang_name="router-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """router_lsa must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=router_lsa.router_lsa, is_container='container', yang_name="router-lsa", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__router_lsa = t
if hasattr(self, "_set"):
self._set()
def _unset_router_lsa(self):
self.__router_lsa = YANGDynClass(
base=router_lsa.router_lsa,
is_container="container",
yang_name="router-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_network_lsa(self):
"""
Getter method for network_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/network_lsa (container)
YANG Description: Contents of the network LSA
"""
return self.__network_lsa
def _set_network_lsa(self, v, load=False):
"""
Setter method for network_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/network_lsa (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_network_lsa is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_network_lsa() directly.
YANG Description: Contents of the network LSA
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=network_lsa.network_lsa,
is_container="container",
yang_name="network-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """network_lsa must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=network_lsa.network_lsa, is_container='container', yang_name="network-lsa", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__network_lsa = t
if hasattr(self, "_set"):
self._set()
def _unset_network_lsa(self):
self.__network_lsa = YANGDynClass(
base=network_lsa.network_lsa,
is_container="container",
yang_name="network-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_summary_lsa(self):
"""
Getter method for summary_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/summary_lsa (container)
YANG Description: Contents of the summary LSA
"""
return self.__summary_lsa
def _set_summary_lsa(self, v, load=False):
"""
Setter method for summary_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/summary_lsa (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_summary_lsa is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_summary_lsa() directly.
YANG Description: Contents of the summary LSA
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=summary_lsa.summary_lsa,
is_container="container",
yang_name="summary-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """summary_lsa must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=summary_lsa.summary_lsa, is_container='container', yang_name="summary-lsa", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__summary_lsa = t
if hasattr(self, "_set"):
self._set()
def _unset_summary_lsa(self):
self.__summary_lsa = YANGDynClass(
base=summary_lsa.summary_lsa,
is_container="container",
yang_name="summary-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_as_external_lsa(self):
"""
Getter method for as_external_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/as_external_lsa (container)
YANG Description: Contents of the AS External LSA
"""
return self.__as_external_lsa
def _set_as_external_lsa(self, v, load=False):
"""
Setter method for as_external_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/as_external_lsa (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_as_external_lsa is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_as_external_lsa() directly.
YANG Description: Contents of the AS External LSA
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=as_external_lsa.as_external_lsa,
is_container="container",
yang_name="as-external-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """as_external_lsa must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=as_external_lsa.as_external_lsa, is_container='container', yang_name="as-external-lsa", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__as_external_lsa = t
if hasattr(self, "_set"):
self._set()
def _unset_as_external_lsa(self):
self.__as_external_lsa = YANGDynClass(
base=as_external_lsa.as_external_lsa,
is_container="container",
yang_name="as-external-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_nssa_external_lsa(self):
"""
Getter method for nssa_external_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/nssa_external_lsa (container)
YANG Description: Contents of the NSSA External LSA
"""
return self.__nssa_external_lsa
def _set_nssa_external_lsa(self, v, load=False):
"""
Setter method for nssa_external_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/nssa_external_lsa (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_nssa_external_lsa is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_nssa_external_lsa() directly.
YANG Description: Contents of the NSSA External LSA
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=nssa_external_lsa.nssa_external_lsa,
is_container="container",
yang_name="nssa-external-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """nssa_external_lsa must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=nssa_external_lsa.nssa_external_lsa, is_container='container', yang_name="nssa-external-lsa", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__nssa_external_lsa = t
if hasattr(self, "_set"):
self._set()
def _unset_nssa_external_lsa(self):
self.__nssa_external_lsa = YANGDynClass(
base=nssa_external_lsa.nssa_external_lsa,
is_container="container",
yang_name="nssa-external-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_opaque_lsa(self):
"""
Getter method for opaque_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa (container)
YANG Description: Contents of the opaque LSA
"""
return self.__opaque_lsa
def _set_opaque_lsa(self, v, load=False):
"""
Setter method for opaque_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_opaque_lsa is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_opaque_lsa() directly.
YANG Description: Contents of the opaque LSA
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=opaque_lsa.opaque_lsa,
is_container="container",
yang_name="opaque-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """opaque_lsa must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=opaque_lsa.opaque_lsa, is_container='container', yang_name="opaque-lsa", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__opaque_lsa = t
if hasattr(self, "_set"):
self._set()
def _unset_opaque_lsa(self):
self.__opaque_lsa = YANGDynClass(
base=opaque_lsa.opaque_lsa,
is_container="container",
yang_name="opaque-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
link_state_id = __builtin__.property(_get_link_state_id)
state = __builtin__.property(_get_state)
router_lsa = __builtin__.property(_get_router_lsa)
network_lsa = __builtin__.property(_get_network_lsa)
summary_lsa = __builtin__.property(_get_summary_lsa)
as_external_lsa = __builtin__.property(_get_as_external_lsa)
nssa_external_lsa = __builtin__.property(_get_nssa_external_lsa)
opaque_lsa = __builtin__.property(_get_opaque_lsa)
_pyangbind_elements = OrderedDict(
[
("link_state_id", link_state_id),
("state", state),
("router_lsa", router_lsa),
("network_lsa", network_lsa),
("summary_lsa", summary_lsa),
("as_external_lsa", as_external_lsa),
("nssa_external_lsa", nssa_external_lsa),
("opaque_lsa", opaque_lsa),
]
)
from . import state
from . import router_lsa
from . import network_lsa
from . import summary_lsa
from . import as_external_lsa
from . import nssa_external_lsa
from . import opaque_lsa
class lsa(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa-types/lsa-type/lsas/lsa. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: List of the LSAs of a specified type in the
LSDB for the specified area
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__link_state_id",
"__state",
"__router_lsa",
"__network_lsa",
"__summary_lsa",
"__as_external_lsa",
"__nssa_external_lsa",
"__opaque_lsa",
)
_yang_name = "lsa"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__link_state_id = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="link-state-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=False,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__router_lsa = YANGDynClass(
base=router_lsa.router_lsa,
is_container="container",
yang_name="router-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__network_lsa = YANGDynClass(
base=network_lsa.network_lsa,
is_container="container",
yang_name="network-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__summary_lsa = YANGDynClass(
base=summary_lsa.summary_lsa,
is_container="container",
yang_name="summary-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__as_external_lsa = YANGDynClass(
base=as_external_lsa.as_external_lsa,
is_container="container",
yang_name="as-external-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__nssa_external_lsa = YANGDynClass(
base=nssa_external_lsa.nssa_external_lsa,
is_container="container",
yang_name="nssa-external-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__opaque_lsa = YANGDynClass(
base=opaque_lsa.opaque_lsa,
is_container="container",
yang_name="opaque-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"ospfv2",
"areas",
"area",
"lsdb",
"lsa-types",
"lsa-type",
"lsas",
"lsa",
]
def _get_link_state_id(self):
"""
Getter method for link_state_id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/link_state_id (leafref)
YANG Description: Reference to the Link State ID of the LSA
"""
return self.__link_state_id
def _set_link_state_id(self, v, load=False):
"""
Setter method for link_state_id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/link_state_id (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_link_state_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_link_state_id() directly.
YANG Description: Reference to the Link State ID of the LSA
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError(
"Cannot set keys directly when" + " within an instantiated list"
)
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="link-state-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """link_state_id must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="link-state-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)""",
}
)
self.__link_state_id = t
if hasattr(self, "_set"):
self._set()
def _unset_link_state_id(self):
self.__link_state_id = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="link-state-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=False,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/state (container)
YANG Description: Operational state parameters relating to all
LSA types
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: Operational state parameters relating to all
LSA types
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_router_lsa(self):
"""
Getter method for router_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/router_lsa (container)
YANG Description: Contents of the router LSA
"""
return self.__router_lsa
def _set_router_lsa(self, v, load=False):
"""
Setter method for router_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/router_lsa (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_router_lsa is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_router_lsa() directly.
YANG Description: Contents of the router LSA
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=router_lsa.router_lsa,
is_container="container",
yang_name="router-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """router_lsa must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=router_lsa.router_lsa, is_container='container', yang_name="router-lsa", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__router_lsa = t
if hasattr(self, "_set"):
self._set()
def _unset_router_lsa(self):
self.__router_lsa = YANGDynClass(
base=router_lsa.router_lsa,
is_container="container",
yang_name="router-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_network_lsa(self):
"""
Getter method for network_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/network_lsa (container)
YANG Description: Contents of the network LSA
"""
return self.__network_lsa
def _set_network_lsa(self, v, load=False):
"""
Setter method for network_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/network_lsa (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_network_lsa is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_network_lsa() directly.
YANG Description: Contents of the network LSA
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=network_lsa.network_lsa,
is_container="container",
yang_name="network-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """network_lsa must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=network_lsa.network_lsa, is_container='container', yang_name="network-lsa", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__network_lsa = t
if hasattr(self, "_set"):
self._set()
def _unset_network_lsa(self):
self.__network_lsa = YANGDynClass(
base=network_lsa.network_lsa,
is_container="container",
yang_name="network-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_summary_lsa(self):
"""
Getter method for summary_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/summary_lsa (container)
YANG Description: Contents of the summary LSA
"""
return self.__summary_lsa
def _set_summary_lsa(self, v, load=False):
"""
Setter method for summary_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/summary_lsa (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_summary_lsa is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_summary_lsa() directly.
YANG Description: Contents of the summary LSA
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=summary_lsa.summary_lsa,
is_container="container",
yang_name="summary-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """summary_lsa must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=summary_lsa.summary_lsa, is_container='container', yang_name="summary-lsa", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__summary_lsa = t
if hasattr(self, "_set"):
self._set()
def _unset_summary_lsa(self):
self.__summary_lsa = YANGDynClass(
base=summary_lsa.summary_lsa,
is_container="container",
yang_name="summary-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_as_external_lsa(self):
"""
Getter method for as_external_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/as_external_lsa (container)
YANG Description: Contents of the AS External LSA
"""
return self.__as_external_lsa
def _set_as_external_lsa(self, v, load=False):
"""
Setter method for as_external_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/as_external_lsa (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_as_external_lsa is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_as_external_lsa() directly.
YANG Description: Contents of the AS External LSA
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=as_external_lsa.as_external_lsa,
is_container="container",
yang_name="as-external-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """as_external_lsa must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=as_external_lsa.as_external_lsa, is_container='container', yang_name="as-external-lsa", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__as_external_lsa = t
if hasattr(self, "_set"):
self._set()
def _unset_as_external_lsa(self):
self.__as_external_lsa = YANGDynClass(
base=as_external_lsa.as_external_lsa,
is_container="container",
yang_name="as-external-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_nssa_external_lsa(self):
"""
Getter method for nssa_external_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/nssa_external_lsa (container)
YANG Description: Contents of the NSSA External LSA
"""
return self.__nssa_external_lsa
def _set_nssa_external_lsa(self, v, load=False):
"""
Setter method for nssa_external_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/nssa_external_lsa (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_nssa_external_lsa is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_nssa_external_lsa() directly.
YANG Description: Contents of the NSSA External LSA
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=nssa_external_lsa.nssa_external_lsa,
is_container="container",
yang_name="nssa-external-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """nssa_external_lsa must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=nssa_external_lsa.nssa_external_lsa, is_container='container', yang_name="nssa-external-lsa", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__nssa_external_lsa = t
if hasattr(self, "_set"):
self._set()
def _unset_nssa_external_lsa(self):
self.__nssa_external_lsa = YANGDynClass(
base=nssa_external_lsa.nssa_external_lsa,
is_container="container",
yang_name="nssa-external-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_opaque_lsa(self):
"""
Getter method for opaque_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa (container)
YANG Description: Contents of the opaque LSA
"""
return self.__opaque_lsa
def _set_opaque_lsa(self, v, load=False):
"""
Setter method for opaque_lsa, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_opaque_lsa is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_opaque_lsa() directly.
YANG Description: Contents of the opaque LSA
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=opaque_lsa.opaque_lsa,
is_container="container",
yang_name="opaque-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """opaque_lsa must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=opaque_lsa.opaque_lsa, is_container='container', yang_name="opaque-lsa", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__opaque_lsa = t
if hasattr(self, "_set"):
self._set()
def _unset_opaque_lsa(self):
self.__opaque_lsa = YANGDynClass(
base=opaque_lsa.opaque_lsa,
is_container="container",
yang_name="opaque-lsa",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
link_state_id = __builtin__.property(_get_link_state_id)
state = __builtin__.property(_get_state)
router_lsa = __builtin__.property(_get_router_lsa)
network_lsa = __builtin__.property(_get_network_lsa)
summary_lsa = __builtin__.property(_get_summary_lsa)
as_external_lsa = __builtin__.property(_get_as_external_lsa)
nssa_external_lsa = __builtin__.property(_get_nssa_external_lsa)
opaque_lsa = __builtin__.property(_get_opaque_lsa)
_pyangbind_elements = OrderedDict(
[
("link_state_id", link_state_id),
("state", state),
("router_lsa", router_lsa),
("network_lsa", network_lsa),
("summary_lsa", summary_lsa),
("as_external_lsa", as_external_lsa),
("nssa_external_lsa", nssa_external_lsa),
("opaque_lsa", opaque_lsa),
]
)
| 41.35582
| 411
| 0.611099
| 6,828
| 62,530
| 5.322203
| 0.029145
| 0.06852
| 0.050083
| 0.056302
| 0.989681
| 0.983709
| 0.983709
| 0.983709
| 0.983709
| 0.983709
| 0
| 0.001223
| 0.293907
| 62,530
| 1,511
| 412
| 41.38319
| 0.821843
| 0.199824
| 0
| 0.892308
| 0
| 0.013675
| 0.265074
| 0.094112
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044444
| false
| 0
| 0.024786
| 0
| 0.108547
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
25037e84ed23e71963e5bef603a9efa45be1675a
| 30,042
|
py
|
Python
|
sdk/python/pulumi_akamai/trafficmanagement/gtm_datacenter.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2021-01-21T15:22:12.000Z
|
2021-08-25T14:15:29.000Z
|
sdk/python/pulumi_akamai/trafficmanagement/gtm_datacenter.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | 59
|
2020-08-13T14:39:36.000Z
|
2022-03-31T15:19:48.000Z
|
sdk/python/pulumi_akamai/trafficmanagement/gtm_datacenter.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['GtmDatacenterArgs', 'GtmDatacenter']
@pulumi.input_type
class GtmDatacenterArgs:
def __init__(__self__, *,
domain: pulumi.Input[str],
city: Optional[pulumi.Input[str]] = None,
clone_of: Optional[pulumi.Input[int]] = None,
cloud_server_host_header_override: Optional[pulumi.Input[bool]] = None,
cloud_server_targeting: Optional[pulumi.Input[bool]] = None,
continent: Optional[pulumi.Input[str]] = None,
country: Optional[pulumi.Input[str]] = None,
default_load_object: Optional[pulumi.Input['GtmDatacenterDefaultLoadObjectArgs']] = None,
latitude: Optional[pulumi.Input[float]] = None,
longitude: Optional[pulumi.Input[float]] = None,
nickname: Optional[pulumi.Input[str]] = None,
state_or_province: Optional[pulumi.Input[str]] = None,
wait_on_complete: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a GtmDatacenter resource.
"""
pulumi.set(__self__, "domain", domain)
if city is not None:
pulumi.set(__self__, "city", city)
if clone_of is not None:
pulumi.set(__self__, "clone_of", clone_of)
if cloud_server_host_header_override is not None:
pulumi.set(__self__, "cloud_server_host_header_override", cloud_server_host_header_override)
if cloud_server_targeting is not None:
pulumi.set(__self__, "cloud_server_targeting", cloud_server_targeting)
if continent is not None:
pulumi.set(__self__, "continent", continent)
if country is not None:
pulumi.set(__self__, "country", country)
if default_load_object is not None:
pulumi.set(__self__, "default_load_object", default_load_object)
if latitude is not None:
pulumi.set(__self__, "latitude", latitude)
if longitude is not None:
pulumi.set(__self__, "longitude", longitude)
if nickname is not None:
pulumi.set(__self__, "nickname", nickname)
if state_or_province is not None:
pulumi.set(__self__, "state_or_province", state_or_province)
if wait_on_complete is not None:
pulumi.set(__self__, "wait_on_complete", wait_on_complete)
@property
@pulumi.getter
def domain(self) -> pulumi.Input[str]:
return pulumi.get(self, "domain")
@domain.setter
def domain(self, value: pulumi.Input[str]):
pulumi.set(self, "domain", value)
@property
@pulumi.getter
def city(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "city")
@city.setter
def city(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "city", value)
@property
@pulumi.getter(name="cloneOf")
def clone_of(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "clone_of")
@clone_of.setter
def clone_of(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "clone_of", value)
@property
@pulumi.getter(name="cloudServerHostHeaderOverride")
def cloud_server_host_header_override(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "cloud_server_host_header_override")
@cloud_server_host_header_override.setter
def cloud_server_host_header_override(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "cloud_server_host_header_override", value)
@property
@pulumi.getter(name="cloudServerTargeting")
def cloud_server_targeting(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "cloud_server_targeting")
@cloud_server_targeting.setter
def cloud_server_targeting(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "cloud_server_targeting", value)
@property
@pulumi.getter
def continent(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "continent")
@continent.setter
def continent(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "continent", value)
@property
@pulumi.getter
def country(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "country")
@country.setter
def country(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "country", value)
@property
@pulumi.getter(name="defaultLoadObject")
def default_load_object(self) -> Optional[pulumi.Input['GtmDatacenterDefaultLoadObjectArgs']]:
return pulumi.get(self, "default_load_object")
@default_load_object.setter
def default_load_object(self, value: Optional[pulumi.Input['GtmDatacenterDefaultLoadObjectArgs']]):
pulumi.set(self, "default_load_object", value)
@property
@pulumi.getter
def latitude(self) -> Optional[pulumi.Input[float]]:
return pulumi.get(self, "latitude")
@latitude.setter
def latitude(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "latitude", value)
@property
@pulumi.getter
def longitude(self) -> Optional[pulumi.Input[float]]:
return pulumi.get(self, "longitude")
@longitude.setter
def longitude(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "longitude", value)
@property
@pulumi.getter
def nickname(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "nickname")
@nickname.setter
def nickname(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "nickname", value)
@property
@pulumi.getter(name="stateOrProvince")
def state_or_province(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "state_or_province")
@state_or_province.setter
def state_or_province(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state_or_province", value)
@property
@pulumi.getter(name="waitOnComplete")
def wait_on_complete(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "wait_on_complete")
@wait_on_complete.setter
def wait_on_complete(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "wait_on_complete", value)
@pulumi.input_type
class _GtmDatacenterState:
def __init__(__self__, *,
city: Optional[pulumi.Input[str]] = None,
clone_of: Optional[pulumi.Input[int]] = None,
cloud_server_host_header_override: Optional[pulumi.Input[bool]] = None,
cloud_server_targeting: Optional[pulumi.Input[bool]] = None,
continent: Optional[pulumi.Input[str]] = None,
country: Optional[pulumi.Input[str]] = None,
datacenter_id: Optional[pulumi.Input[int]] = None,
default_load_object: Optional[pulumi.Input['GtmDatacenterDefaultLoadObjectArgs']] = None,
domain: Optional[pulumi.Input[str]] = None,
latitude: Optional[pulumi.Input[float]] = None,
longitude: Optional[pulumi.Input[float]] = None,
nickname: Optional[pulumi.Input[str]] = None,
ping_interval: Optional[pulumi.Input[int]] = None,
ping_packet_size: Optional[pulumi.Input[int]] = None,
score_penalty: Optional[pulumi.Input[int]] = None,
servermonitor_liveness_count: Optional[pulumi.Input[int]] = None,
servermonitor_load_count: Optional[pulumi.Input[int]] = None,
servermonitor_pool: Optional[pulumi.Input[str]] = None,
state_or_province: Optional[pulumi.Input[str]] = None,
virtual: Optional[pulumi.Input[bool]] = None,
wait_on_complete: Optional[pulumi.Input[bool]] = None):
"""
Input properties used for looking up and filtering GtmDatacenter resources.
"""
if city is not None:
pulumi.set(__self__, "city", city)
if clone_of is not None:
pulumi.set(__self__, "clone_of", clone_of)
if cloud_server_host_header_override is not None:
pulumi.set(__self__, "cloud_server_host_header_override", cloud_server_host_header_override)
if cloud_server_targeting is not None:
pulumi.set(__self__, "cloud_server_targeting", cloud_server_targeting)
if continent is not None:
pulumi.set(__self__, "continent", continent)
if country is not None:
pulumi.set(__self__, "country", country)
if datacenter_id is not None:
pulumi.set(__self__, "datacenter_id", datacenter_id)
if default_load_object is not None:
pulumi.set(__self__, "default_load_object", default_load_object)
if domain is not None:
pulumi.set(__self__, "domain", domain)
if latitude is not None:
pulumi.set(__self__, "latitude", latitude)
if longitude is not None:
pulumi.set(__self__, "longitude", longitude)
if nickname is not None:
pulumi.set(__self__, "nickname", nickname)
if ping_interval is not None:
pulumi.set(__self__, "ping_interval", ping_interval)
if ping_packet_size is not None:
pulumi.set(__self__, "ping_packet_size", ping_packet_size)
if score_penalty is not None:
pulumi.set(__self__, "score_penalty", score_penalty)
if servermonitor_liveness_count is not None:
pulumi.set(__self__, "servermonitor_liveness_count", servermonitor_liveness_count)
if servermonitor_load_count is not None:
pulumi.set(__self__, "servermonitor_load_count", servermonitor_load_count)
if servermonitor_pool is not None:
pulumi.set(__self__, "servermonitor_pool", servermonitor_pool)
if state_or_province is not None:
pulumi.set(__self__, "state_or_province", state_or_province)
if virtual is not None:
pulumi.set(__self__, "virtual", virtual)
if wait_on_complete is not None:
pulumi.set(__self__, "wait_on_complete", wait_on_complete)
@property
@pulumi.getter
def city(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "city")
@city.setter
def city(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "city", value)
@property
@pulumi.getter(name="cloneOf")
def clone_of(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "clone_of")
@clone_of.setter
def clone_of(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "clone_of", value)
@property
@pulumi.getter(name="cloudServerHostHeaderOverride")
def cloud_server_host_header_override(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "cloud_server_host_header_override")
@cloud_server_host_header_override.setter
def cloud_server_host_header_override(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "cloud_server_host_header_override", value)
@property
@pulumi.getter(name="cloudServerTargeting")
def cloud_server_targeting(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "cloud_server_targeting")
@cloud_server_targeting.setter
def cloud_server_targeting(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "cloud_server_targeting", value)
@property
@pulumi.getter
def continent(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "continent")
@continent.setter
def continent(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "continent", value)
@property
@pulumi.getter
def country(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "country")
@country.setter
def country(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "country", value)
@property
@pulumi.getter(name="datacenterId")
def datacenter_id(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "datacenter_id")
@datacenter_id.setter
def datacenter_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "datacenter_id", value)
@property
@pulumi.getter(name="defaultLoadObject")
def default_load_object(self) -> Optional[pulumi.Input['GtmDatacenterDefaultLoadObjectArgs']]:
return pulumi.get(self, "default_load_object")
@default_load_object.setter
def default_load_object(self, value: Optional[pulumi.Input['GtmDatacenterDefaultLoadObjectArgs']]):
pulumi.set(self, "default_load_object", value)
@property
@pulumi.getter
def domain(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "domain")
@domain.setter
def domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "domain", value)
@property
@pulumi.getter
def latitude(self) -> Optional[pulumi.Input[float]]:
return pulumi.get(self, "latitude")
@latitude.setter
def latitude(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "latitude", value)
@property
@pulumi.getter
def longitude(self) -> Optional[pulumi.Input[float]]:
return pulumi.get(self, "longitude")
@longitude.setter
def longitude(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "longitude", value)
@property
@pulumi.getter
def nickname(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "nickname")
@nickname.setter
def nickname(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "nickname", value)
@property
@pulumi.getter(name="pingInterval")
def ping_interval(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "ping_interval")
@ping_interval.setter
def ping_interval(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ping_interval", value)
@property
@pulumi.getter(name="pingPacketSize")
def ping_packet_size(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "ping_packet_size")
@ping_packet_size.setter
def ping_packet_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ping_packet_size", value)
@property
@pulumi.getter(name="scorePenalty")
def score_penalty(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "score_penalty")
@score_penalty.setter
def score_penalty(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "score_penalty", value)
@property
@pulumi.getter(name="servermonitorLivenessCount")
def servermonitor_liveness_count(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "servermonitor_liveness_count")
@servermonitor_liveness_count.setter
def servermonitor_liveness_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "servermonitor_liveness_count", value)
@property
@pulumi.getter(name="servermonitorLoadCount")
def servermonitor_load_count(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "servermonitor_load_count")
@servermonitor_load_count.setter
def servermonitor_load_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "servermonitor_load_count", value)
@property
@pulumi.getter(name="servermonitorPool")
def servermonitor_pool(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "servermonitor_pool")
@servermonitor_pool.setter
def servermonitor_pool(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "servermonitor_pool", value)
@property
@pulumi.getter(name="stateOrProvince")
def state_or_province(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "state_or_province")
@state_or_province.setter
def state_or_province(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state_or_province", value)
@property
@pulumi.getter
def virtual(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "virtual")
@virtual.setter
def virtual(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "virtual", value)
@property
@pulumi.getter(name="waitOnComplete")
def wait_on_complete(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "wait_on_complete")
@wait_on_complete.setter
def wait_on_complete(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "wait_on_complete", value)
warnings.warn("""akamai.trafficmanagement.GtmDatacenter has been deprecated in favor of akamai.GtmDatacenter""", DeprecationWarning)
class GtmDatacenter(pulumi.CustomResource):
warnings.warn("""akamai.trafficmanagement.GtmDatacenter has been deprecated in favor of akamai.GtmDatacenter""", DeprecationWarning)
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
city: Optional[pulumi.Input[str]] = None,
clone_of: Optional[pulumi.Input[int]] = None,
cloud_server_host_header_override: Optional[pulumi.Input[bool]] = None,
cloud_server_targeting: Optional[pulumi.Input[bool]] = None,
continent: Optional[pulumi.Input[str]] = None,
country: Optional[pulumi.Input[str]] = None,
default_load_object: Optional[pulumi.Input[pulumi.InputType['GtmDatacenterDefaultLoadObjectArgs']]] = None,
domain: Optional[pulumi.Input[str]] = None,
latitude: Optional[pulumi.Input[float]] = None,
longitude: Optional[pulumi.Input[float]] = None,
nickname: Optional[pulumi.Input[str]] = None,
state_or_province: Optional[pulumi.Input[str]] = None,
wait_on_complete: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
Create a GtmDatacenter resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: GtmDatacenterArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Create a GtmDatacenter resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param GtmDatacenterArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(GtmDatacenterArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
city: Optional[pulumi.Input[str]] = None,
clone_of: Optional[pulumi.Input[int]] = None,
cloud_server_host_header_override: Optional[pulumi.Input[bool]] = None,
cloud_server_targeting: Optional[pulumi.Input[bool]] = None,
continent: Optional[pulumi.Input[str]] = None,
country: Optional[pulumi.Input[str]] = None,
default_load_object: Optional[pulumi.Input[pulumi.InputType['GtmDatacenterDefaultLoadObjectArgs']]] = None,
domain: Optional[pulumi.Input[str]] = None,
latitude: Optional[pulumi.Input[float]] = None,
longitude: Optional[pulumi.Input[float]] = None,
nickname: Optional[pulumi.Input[str]] = None,
state_or_province: Optional[pulumi.Input[str]] = None,
wait_on_complete: Optional[pulumi.Input[bool]] = None,
__props__=None):
pulumi.log.warn("""GtmDatacenter is deprecated: akamai.trafficmanagement.GtmDatacenter has been deprecated in favor of akamai.GtmDatacenter""")
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = GtmDatacenterArgs.__new__(GtmDatacenterArgs)
__props__.__dict__["city"] = city
__props__.__dict__["clone_of"] = clone_of
__props__.__dict__["cloud_server_host_header_override"] = cloud_server_host_header_override
__props__.__dict__["cloud_server_targeting"] = cloud_server_targeting
__props__.__dict__["continent"] = continent
__props__.__dict__["country"] = country
__props__.__dict__["default_load_object"] = default_load_object
if domain is None and not opts.urn:
raise TypeError("Missing required property 'domain'")
__props__.__dict__["domain"] = domain
__props__.__dict__["latitude"] = latitude
__props__.__dict__["longitude"] = longitude
__props__.__dict__["nickname"] = nickname
__props__.__dict__["state_or_province"] = state_or_province
__props__.__dict__["wait_on_complete"] = wait_on_complete
__props__.__dict__["datacenter_id"] = None
__props__.__dict__["ping_interval"] = None
__props__.__dict__["ping_packet_size"] = None
__props__.__dict__["score_penalty"] = None
__props__.__dict__["servermonitor_liveness_count"] = None
__props__.__dict__["servermonitor_load_count"] = None
__props__.__dict__["servermonitor_pool"] = None
__props__.__dict__["virtual"] = None
super(GtmDatacenter, __self__).__init__(
'akamai:trafficmanagement/gtmDatacenter:GtmDatacenter',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
city: Optional[pulumi.Input[str]] = None,
clone_of: Optional[pulumi.Input[int]] = None,
cloud_server_host_header_override: Optional[pulumi.Input[bool]] = None,
cloud_server_targeting: Optional[pulumi.Input[bool]] = None,
continent: Optional[pulumi.Input[str]] = None,
country: Optional[pulumi.Input[str]] = None,
datacenter_id: Optional[pulumi.Input[int]] = None,
default_load_object: Optional[pulumi.Input[pulumi.InputType['GtmDatacenterDefaultLoadObjectArgs']]] = None,
domain: Optional[pulumi.Input[str]] = None,
latitude: Optional[pulumi.Input[float]] = None,
longitude: Optional[pulumi.Input[float]] = None,
nickname: Optional[pulumi.Input[str]] = None,
ping_interval: Optional[pulumi.Input[int]] = None,
ping_packet_size: Optional[pulumi.Input[int]] = None,
score_penalty: Optional[pulumi.Input[int]] = None,
servermonitor_liveness_count: Optional[pulumi.Input[int]] = None,
servermonitor_load_count: Optional[pulumi.Input[int]] = None,
servermonitor_pool: Optional[pulumi.Input[str]] = None,
state_or_province: Optional[pulumi.Input[str]] = None,
virtual: Optional[pulumi.Input[bool]] = None,
wait_on_complete: Optional[pulumi.Input[bool]] = None) -> 'GtmDatacenter':
"""
Get an existing GtmDatacenter resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _GtmDatacenterState.__new__(_GtmDatacenterState)
__props__.__dict__["city"] = city
__props__.__dict__["clone_of"] = clone_of
__props__.__dict__["cloud_server_host_header_override"] = cloud_server_host_header_override
__props__.__dict__["cloud_server_targeting"] = cloud_server_targeting
__props__.__dict__["continent"] = continent
__props__.__dict__["country"] = country
__props__.__dict__["datacenter_id"] = datacenter_id
__props__.__dict__["default_load_object"] = default_load_object
__props__.__dict__["domain"] = domain
__props__.__dict__["latitude"] = latitude
__props__.__dict__["longitude"] = longitude
__props__.__dict__["nickname"] = nickname
__props__.__dict__["ping_interval"] = ping_interval
__props__.__dict__["ping_packet_size"] = ping_packet_size
__props__.__dict__["score_penalty"] = score_penalty
__props__.__dict__["servermonitor_liveness_count"] = servermonitor_liveness_count
__props__.__dict__["servermonitor_load_count"] = servermonitor_load_count
__props__.__dict__["servermonitor_pool"] = servermonitor_pool
__props__.__dict__["state_or_province"] = state_or_province
__props__.__dict__["virtual"] = virtual
__props__.__dict__["wait_on_complete"] = wait_on_complete
return GtmDatacenter(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def city(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "city")
@property
@pulumi.getter(name="cloneOf")
def clone_of(self) -> pulumi.Output[Optional[int]]:
return pulumi.get(self, "clone_of")
@property
@pulumi.getter(name="cloudServerHostHeaderOverride")
def cloud_server_host_header_override(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "cloud_server_host_header_override")
@property
@pulumi.getter(name="cloudServerTargeting")
def cloud_server_targeting(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "cloud_server_targeting")
@property
@pulumi.getter
def continent(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "continent")
@property
@pulumi.getter
def country(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "country")
@property
@pulumi.getter(name="datacenterId")
def datacenter_id(self) -> pulumi.Output[int]:
return pulumi.get(self, "datacenter_id")
@property
@pulumi.getter(name="defaultLoadObject")
def default_load_object(self) -> pulumi.Output[Optional['outputs.GtmDatacenterDefaultLoadObject']]:
return pulumi.get(self, "default_load_object")
@property
@pulumi.getter
def domain(self) -> pulumi.Output[str]:
return pulumi.get(self, "domain")
@property
@pulumi.getter
def latitude(self) -> pulumi.Output[Optional[float]]:
return pulumi.get(self, "latitude")
@property
@pulumi.getter
def longitude(self) -> pulumi.Output[Optional[float]]:
return pulumi.get(self, "longitude")
@property
@pulumi.getter
def nickname(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "nickname")
@property
@pulumi.getter(name="pingInterval")
def ping_interval(self) -> pulumi.Output[int]:
return pulumi.get(self, "ping_interval")
@property
@pulumi.getter(name="pingPacketSize")
def ping_packet_size(self) -> pulumi.Output[int]:
return pulumi.get(self, "ping_packet_size")
@property
@pulumi.getter(name="scorePenalty")
def score_penalty(self) -> pulumi.Output[int]:
return pulumi.get(self, "score_penalty")
@property
@pulumi.getter(name="servermonitorLivenessCount")
def servermonitor_liveness_count(self) -> pulumi.Output[int]:
return pulumi.get(self, "servermonitor_liveness_count")
@property
@pulumi.getter(name="servermonitorLoadCount")
def servermonitor_load_count(self) -> pulumi.Output[int]:
return pulumi.get(self, "servermonitor_load_count")
@property
@pulumi.getter(name="servermonitorPool")
def servermonitor_pool(self) -> pulumi.Output[str]:
return pulumi.get(self, "servermonitor_pool")
@property
@pulumi.getter(name="stateOrProvince")
def state_or_province(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "state_or_province")
@property
@pulumi.getter
def virtual(self) -> pulumi.Output[bool]:
return pulumi.get(self, "virtual")
@property
@pulumi.getter(name="waitOnComplete")
def wait_on_complete(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "wait_on_complete")
| 42.312676
| 151
| 0.664337
| 3,366
| 30,042
| 5.596851
| 0.055556
| 0.089336
| 0.147248
| 0.064228
| 0.872286
| 0.844418
| 0.812039
| 0.784224
| 0.753543
| 0.682096
| 0
| 0.000043
| 0.223021
| 30,042
| 709
| 152
| 42.372355
| 0.807043
| 0.037914
| 0
| 0.763889
| 1
| 0
| 0.130747
| 0.052243
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0.001736
| 0.012153
| 0.095486
| 0.28125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
25082e580fea7531da6990239f4077f3eaf0ce55
| 30,007
|
py
|
Python
|
python/dlxapi/api/plans_api.py
|
dlens/dlxapi
|
189a6519240ce625d7a9cdb89e305a335d2aa045
|
[
"MIT"
] | null | null | null |
python/dlxapi/api/plans_api.py
|
dlens/dlxapi
|
189a6519240ce625d7a9cdb89e305a335d2aa045
|
[
"MIT"
] | 1
|
2020-08-20T17:31:43.000Z
|
2020-08-20T17:31:43.000Z
|
python/dlxapi/api/plans_api.py
|
dlens/dlxapi
|
189a6519240ce625d7a9cdb89e305a335d2aa045
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Decision Lens API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from dlxapi.api_client import ApiClient
class PlansApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def export_whats_in(self, portfolio_id, portfolio_plan_ids, export_format, start_date, end_date, **kwargs): # noqa: E501
"""Export Whats In Comparison # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.export_whats_in(portfolio_id, portfolio_plan_ids, export_format, start_date, end_date, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str portfolio_id: portfolioId (required)
:param str portfolio_plan_ids: portfolioPlanIds to be exported (required)
:param object export_format: export format type (XLSX) (required)
:param int start_date: Start time period for which the grades are computed for. (required)
:param int end_date: End time period for which the grades are computed for. (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.export_whats_in_with_http_info(portfolio_id, portfolio_plan_ids, export_format, start_date, end_date, **kwargs) # noqa: E501
else:
(data) = self.export_whats_in_with_http_info(portfolio_id, portfolio_plan_ids, export_format, start_date, end_date, **kwargs) # noqa: E501
return data
def export_whats_in_with_http_info(self, portfolio_id, portfolio_plan_ids, export_format, start_date, end_date, **kwargs): # noqa: E501
"""Export Whats In Comparison # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.export_whats_in_with_http_info(portfolio_id, portfolio_plan_ids, export_format, start_date, end_date, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str portfolio_id: portfolioId (required)
:param str portfolio_plan_ids: portfolioPlanIds to be exported (required)
:param object export_format: export format type (XLSX) (required)
:param int start_date: Start time period for which the grades are computed for. (required)
:param int end_date: End time period for which the grades are computed for. (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['portfolio_id', 'portfolio_plan_ids', 'export_format', 'start_date', 'end_date'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method export_whats_in" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'portfolio_id' is set
if self.api_client.client_side_validation and ('portfolio_id' not in params or
params['portfolio_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `portfolio_id` when calling `export_whats_in`") # noqa: E501
# verify the required parameter 'portfolio_plan_ids' is set
if self.api_client.client_side_validation and ('portfolio_plan_ids' not in params or
params['portfolio_plan_ids'] is None): # noqa: E501
raise ValueError("Missing the required parameter `portfolio_plan_ids` when calling `export_whats_in`") # noqa: E501
# verify the required parameter 'export_format' is set
if self.api_client.client_side_validation and ('export_format' not in params or
params['export_format'] is None): # noqa: E501
raise ValueError("Missing the required parameter `export_format` when calling `export_whats_in`") # noqa: E501
# verify the required parameter 'start_date' is set
if self.api_client.client_side_validation and ('start_date' not in params or
params['start_date'] is None): # noqa: E501
raise ValueError("Missing the required parameter `start_date` when calling `export_whats_in`") # noqa: E501
# verify the required parameter 'end_date' is set
if self.api_client.client_side_validation and ('end_date' not in params or
params['end_date'] is None): # noqa: E501
raise ValueError("Missing the required parameter `end_date` when calling `export_whats_in`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'portfolio_id' in params:
query_params.append(('portfolioId', params['portfolio_id'])) # noqa: E501
if 'portfolio_plan_ids' in params:
query_params.append(('portfolioPlanIds', params['portfolio_plan_ids'])) # noqa: E501
if 'export_format' in params:
query_params.append(('exportFormat', params['export_format'])) # noqa: E501
if 'start_date' in params:
query_params.append(('startDate', params['start_date'])) # noqa: E501
if 'end_date' in params:
query_params.append(('endDate', params['end_date'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/vnd.openxmlformats-officedocument.spreadsheetml.sheet']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/portfolioPlans/whatsin/export', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_grades_for_portfolio_plan(self, id, **kwargs): # noqa: E501
"""Retrieves grades for portfolio plan. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_grades_for_portfolio_plan(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Portfolio Plan id (required)
:param int start_date: Start time period for which the grades are computed for.
:param int end_date: End time period for which the grades are computed for.
:param bool generate_report: Generates grades report. Default is false.
:return: PortfolioPlanGrades
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_grades_for_portfolio_plan_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_grades_for_portfolio_plan_with_http_info(id, **kwargs) # noqa: E501
return data
def get_grades_for_portfolio_plan_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieves grades for portfolio plan. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_grades_for_portfolio_plan_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Portfolio Plan id (required)
:param int start_date: Start time period for which the grades are computed for.
:param int end_date: End time period for which the grades are computed for.
:param bool generate_report: Generates grades report. Default is false.
:return: PortfolioPlanGrades
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'start_date', 'end_date', 'generate_report'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_grades_for_portfolio_plan" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `get_grades_for_portfolio_plan`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'start_date' in params:
query_params.append(('startDate', params['start_date'])) # noqa: E501
if 'end_date' in params:
query_params.append(('endDate', params['end_date'])) # noqa: E501
if 'generate_report' in params:
query_params.append(('generateReport', params['generate_report'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/portfolioPlans/{id}/grades', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PortfolioPlanGrades', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_insights_for_portfolio_plan(self, id, start_date, end_date, **kwargs): # noqa: E501
"""Retrieves insights for portfolio plan. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_insights_for_portfolio_plan(id, start_date, end_date, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Portfolio Plan id (required)
:param int start_date: Start time period for which the grades are computed for. (required)
:param int end_date: End time period for which the grades are computed for. (required)
:return: PortfolioPlanInsights
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_insights_for_portfolio_plan_with_http_info(id, start_date, end_date, **kwargs) # noqa: E501
else:
(data) = self.get_insights_for_portfolio_plan_with_http_info(id, start_date, end_date, **kwargs) # noqa: E501
return data
def get_insights_for_portfolio_plan_with_http_info(self, id, start_date, end_date, **kwargs): # noqa: E501
"""Retrieves insights for portfolio plan. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_insights_for_portfolio_plan_with_http_info(id, start_date, end_date, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Portfolio Plan id (required)
:param int start_date: Start time period for which the grades are computed for. (required)
:param int end_date: End time period for which the grades are computed for. (required)
:return: PortfolioPlanInsights
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'start_date', 'end_date'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_insights_for_portfolio_plan" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `get_insights_for_portfolio_plan`") # noqa: E501
# verify the required parameter 'start_date' is set
if self.api_client.client_side_validation and ('start_date' not in params or
params['start_date'] is None): # noqa: E501
raise ValueError("Missing the required parameter `start_date` when calling `get_insights_for_portfolio_plan`") # noqa: E501
# verify the required parameter 'end_date' is set
if self.api_client.client_side_validation and ('end_date' not in params or
params['end_date'] is None): # noqa: E501
raise ValueError("Missing the required parameter `end_date` when calling `get_insights_for_portfolio_plan`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'start_date' in params:
query_params.append(('startDate', params['start_date'])) # noqa: E501
if 'end_date' in params:
query_params.append(('endDate', params['end_date'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/portfolioPlans/{id}/insights', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PortfolioPlanInsights', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_value_insights_for_portfolio_plan(self, id, start_date, end_date, **kwargs): # noqa: E501
"""Retrieves value insights for portfolio plan. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_value_insights_for_portfolio_plan(id, start_date, end_date, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Portfolio Plan id (required)
:param int start_date: Start time period for which the grades are computed for. (required)
:param int end_date: End time period for which the grades are computed for. (required)
:return: PortfolioPlanInsights
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_value_insights_for_portfolio_plan_with_http_info(id, start_date, end_date, **kwargs) # noqa: E501
else:
(data) = self.get_value_insights_for_portfolio_plan_with_http_info(id, start_date, end_date, **kwargs) # noqa: E501
return data
def get_value_insights_for_portfolio_plan_with_http_info(self, id, start_date, end_date, **kwargs): # noqa: E501
"""Retrieves value insights for portfolio plan. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_value_insights_for_portfolio_plan_with_http_info(id, start_date, end_date, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Portfolio Plan id (required)
:param int start_date: Start time period for which the grades are computed for. (required)
:param int end_date: End time period for which the grades are computed for. (required)
:return: PortfolioPlanInsights
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'start_date', 'end_date'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_value_insights_for_portfolio_plan" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `get_value_insights_for_portfolio_plan`") # noqa: E501
# verify the required parameter 'start_date' is set
if self.api_client.client_side_validation and ('start_date' not in params or
params['start_date'] is None): # noqa: E501
raise ValueError("Missing the required parameter `start_date` when calling `get_value_insights_for_portfolio_plan`") # noqa: E501
# verify the required parameter 'end_date' is set
if self.api_client.client_side_validation and ('end_date' not in params or
params['end_date'] is None): # noqa: E501
raise ValueError("Missing the required parameter `end_date` when calling `get_value_insights_for_portfolio_plan`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'start_date' in params:
query_params.append(('startDate', params['start_date'])) # noqa: E501
if 'end_date' in params:
query_params.append(('endDate', params['end_date'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/portfolioPlans/{id}/insights/value', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PortfolioPlanInsights', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_whats_in(self, portfolio_id, portfolio_plan_ids, start_date, end_date, max_limit, **kwargs): # noqa: E501
"""Retrieves portfolioPlans with projects for WhatsIn # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_whats_in(portfolio_id, portfolio_plan_ids, start_date, end_date, max_limit, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str portfolio_id: portfolioId (required)
:param str portfolio_plan_ids: portfolioPlanIds for which projects are returned (required)
:param int start_date: Start time period for which the grades are computed for. (required)
:param int end_date: End time period for which the grades are computed for. (required)
:param int max_limit: Maximum limit for project; 0 < number of returned project(is variable, NOT fixed) < maxLimit (required)
:param int offset: Pagination offset
:return: WhatsIn
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_whats_in_with_http_info(portfolio_id, portfolio_plan_ids, start_date, end_date, max_limit, **kwargs) # noqa: E501
else:
(data) = self.get_whats_in_with_http_info(portfolio_id, portfolio_plan_ids, start_date, end_date, max_limit, **kwargs) # noqa: E501
return data
def get_whats_in_with_http_info(self, portfolio_id, portfolio_plan_ids, start_date, end_date, max_limit, **kwargs): # noqa: E501
"""Retrieves portfolioPlans with projects for WhatsIn # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_whats_in_with_http_info(portfolio_id, portfolio_plan_ids, start_date, end_date, max_limit, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str portfolio_id: portfolioId (required)
:param str portfolio_plan_ids: portfolioPlanIds for which projects are returned (required)
:param int start_date: Start time period for which the grades are computed for. (required)
:param int end_date: End time period for which the grades are computed for. (required)
:param int max_limit: Maximum limit for project; 0 < number of returned project(is variable, NOT fixed) < maxLimit (required)
:param int offset: Pagination offset
:return: WhatsIn
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['portfolio_id', 'portfolio_plan_ids', 'start_date', 'end_date', 'max_limit', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_whats_in" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'portfolio_id' is set
if self.api_client.client_side_validation and ('portfolio_id' not in params or
params['portfolio_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `portfolio_id` when calling `get_whats_in`") # noqa: E501
# verify the required parameter 'portfolio_plan_ids' is set
if self.api_client.client_side_validation and ('portfolio_plan_ids' not in params or
params['portfolio_plan_ids'] is None): # noqa: E501
raise ValueError("Missing the required parameter `portfolio_plan_ids` when calling `get_whats_in`") # noqa: E501
# verify the required parameter 'start_date' is set
if self.api_client.client_side_validation and ('start_date' not in params or
params['start_date'] is None): # noqa: E501
raise ValueError("Missing the required parameter `start_date` when calling `get_whats_in`") # noqa: E501
# verify the required parameter 'end_date' is set
if self.api_client.client_side_validation and ('end_date' not in params or
params['end_date'] is None): # noqa: E501
raise ValueError("Missing the required parameter `end_date` when calling `get_whats_in`") # noqa: E501
# verify the required parameter 'max_limit' is set
if self.api_client.client_side_validation and ('max_limit' not in params or
params['max_limit'] is None): # noqa: E501
raise ValueError("Missing the required parameter `max_limit` when calling `get_whats_in`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'portfolio_id' in params:
query_params.append(('portfolioId', params['portfolio_id'])) # noqa: E501
if 'portfolio_plan_ids' in params:
query_params.append(('portfolioPlanIds', params['portfolio_plan_ids'])) # noqa: E501
if 'start_date' in params:
query_params.append(('startDate', params['start_date'])) # noqa: E501
if 'end_date' in params:
query_params.append(('endDate', params['end_date'])) # noqa: E501
if 'max_limit' in params:
query_params.append(('maxLimit', params['max_limit'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/portfolioPlans/whatsin', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WhatsIn', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 49.031046
| 151
| 0.629653
| 3,622
| 30,007
| 4.959139
| 0.056046
| 0.047211
| 0.037858
| 0.025832
| 0.95819
| 0.947946
| 0.945774
| 0.935865
| 0.932691
| 0.931745
| 0
| 0.015517
| 0.2848
| 30,007
| 611
| 152
| 49.111293
| 0.821444
| 0.336455
| 0
| 0.762611
| 1
| 0
| 0.219919
| 0.049664
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032641
| false
| 0
| 0.011869
| 0
| 0.091988
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2522d9abf7f4fcc6d398258759e255fc064f6901
| 15,340
|
py
|
Python
|
verification/tests.py
|
cielavenir/checkio-task-painting-wall
|
3502ac1d9f795b4e5c8fa46739e9fa438d16e026
|
[
"MIT"
] | null | null | null |
verification/tests.py
|
cielavenir/checkio-task-painting-wall
|
3502ac1d9f795b4e5c8fa46739e9fa438d16e026
|
[
"MIT"
] | 1
|
2017-06-21T18:00:21.000Z
|
2017-06-21T18:00:21.000Z
|
verification/tests.py
|
CheckIO-to-German/Painting-Wall
|
737e07e1b3b2176ea3fa999b916c30992b397506
|
[
"MIT"
] | 1
|
2017-06-21T17:57:21.000Z
|
2017-06-21T17:57:21.000Z
|
#TESTS is a dict with all you tests.
#Keys for this will be categories' names.
#Each test is dict with
# "input" -- input data for user function
# "answer" -- your right answer
# "explanation" -- not necessary key, it's using for additional info in animation.
TESTS = {
"1. Small By Hand 1 (Example)": [
{
"input": [5, [[1, 5], [11, 15], [2, 14], [21, 25]]],
"answer": 1,
"explanation": [25, [5]]
},
{
"input": [6, [[1, 5], [11, 15], [2, 14], [21, 25]]],
"answer": 2,
"explanation": [25, [5, 10]]
},
{
"input": [11, [[1, 5], [11, 15], [2, 14], [21, 25]]],
"answer": 3,
"explanation": [25, [5, 10, 15]]
},
{
"input": [16, [[1, 5], [11, 15], [2, 14], [21, 25]]],
"answer": 4,
"explanation": [25, [5, 10, 15, 20]]
},
{
"input": [21, [[1, 5], [11, 15], [2, 14], [21, 25]]],
"answer": -1,
"explanation": [25, [5, 10, 15, 20]]
}
],
"2. Small By Hand 2": [
{
"input": [5, [[1, 2], [20, 30], [25, 28], [5, 10], [4, 21], [1, 6]]],
"answer": 2,
"explanation": [30, [2, 13]]
},
{
"input": [15, [[1, 2], [20, 30], [25, 28], [5, 10], [4, 21], [1, 6]]],
"answer": 4,
"explanation": [30, [2, 13, 13, 19]]
},
{
"input": [20, [[1, 2], [20, 30], [25, 28], [5, 10], [4, 21], [1, 6]]],
"answer": 5,
"explanation": [30, [2, 13, 13, 19, 29]]
},
{
"input": [30, [[1, 2], [20, 30], [25, 28], [5, 10], [4, 21], [1, 6]]],
"answer": 6,
"explanation": [30, [2, 13, 13, 19, 29, 30]]
},
{
"input": [35, [[1, 2], [20, 30], [25, 28], [5, 10], [4, 21], [1, 6]]],
"answer": -1,
"explanation": [30, [2, 13, 13, 19, 29, 30]]
}
],
"3. Small Generated 1": [
{
"input": [1000, [[8598, 9442], [4221, 4432], [4864, 5415], [1315, 1960], [9577, 10482], [8147, 8346],
[6063, 6836],
[24, 606], [6170, 7131], [1397, 2020], [4690, 5651], [5267, 5464], [8422, 8886],
[5547, 5738],
[5722, 6511], [6605, 6905], [1321, 2242], [9335, 9993], [1626, 1887], [4699, 4926]]],
"answer": 2
},
{
"input": [5000, [[8598, 9442], [4221, 4432], [4864, 5415], [1315, 1960], [9577, 10482], [8147, 8346],
[6063, 6836],
[24, 606], [6170, 7131], [1397, 2020], [4690, 5651], [5267, 5464], [8422, 8886],
[5547, 5738],
[5722, 6511], [6605, 6905], [1321, 2242], [9335, 9993], [1626, 1887], [4699, 4926]]],
"answer": 9
},
{
"input": [5400, [[8598, 9442], [4221, 4432], [4864, 5415], [1315, 1960], [9577, 10482], [8147, 8346],
[6063, 6836],
[24, 606], [6170, 7131], [1397, 2020], [4690, 5651], [5267, 5464], [8422, 8886],
[5547, 5738],
[5722, 6511], [6605, 6905], [1321, 2242], [9335, 9993], [1626, 1887], [4699, 4926]]],
"answer": 11
},
{
"input": [5700, [[8598, 9442], [4221, 4432], [4864, 5415], [1315, 1960], [9577, 10482], [8147, 8346],
[6063, 6836],
[24, 606], [6170, 7131], [1397, 2020], [4690, 5651], [5267, 5464], [8422, 8886],
[5547, 5738],
[5722, 6511], [6605, 6905], [1321, 2242], [9335, 9993], [1626, 1887], [4699, 4926]]],
"answer": 14
},
{
"input": [6000, [[8598, 9442], [4221, 4432], [4864, 5415], [1315, 1960], [9577, 10482], [8147, 8346],
[6063, 6836],
[24, 606], [6170, 7131], [1397, 2020], [4690, 5651], [5267, 5464], [8422, 8886],
[5547, 5738],
[5722, 6511], [6605, 6905], [1321, 2242], [9335, 9993], [1626, 1887], [4699, 4926]]],
"answer": 15
},
{
"input": [6500, [[8598, 9442], [4221, 4432], [4864, 5415], [1315, 1960], [9577, 10482], [8147, 8346],
[6063, 6836],
[24, 606], [6170, 7131], [1397, 2020], [4690, 5651], [5267, 5464], [8422, 8886],
[5547, 5738],
[5722, 6511], [6605, 6905], [1321, 2242], [9335, 9993], [1626, 1887], [4699, 4926]]],
"answer": -1
}
],
"4. Small Generated 2": [
{
"input": [30000,
[[53013, 58178], [66996, 70770], [46244, 50076], [69373, 79267], [3343, 9935], [80414, 82602],
[61293, 68007], [50771, 53974], [34296, 43518], [92413, 100031], [17305, 19487], [84654, 87021],
[17333, 21892], [93387, 99456], [92406, 97098], [37781, 42924], [98927, 100960], [86738, 89338],
[48177, 52067], [28524, 32583]]],
"answer": 6
},
{
"input": [57000,
[[53013, 58178], [66996, 70770], [46244, 50076], [69373, 79267], [3343, 9935], [80414, 82602],
[61293, 68007], [50771, 53974], [34296, 43518], [92413, 100031], [17305, 19487], [84654, 87021],
[17333, 21892], [93387, 99456], [92406, 97098], [37781, 42924], [98927, 100960], [86738, 89338],
[48177, 52067], [28524, 32583]]],
"answer": 11
},
{
"input": [68000,
[[53013, 58178], [66996, 70770], [46244, 50076], [69373, 79267], [3343, 9935], [80414, 82602],
[61293, 68007], [50771, 53974], [34296, 43518], [92413, 100031], [17305, 19487], [84654, 87021],
[17333, 21892], [93387, 99456], [92406, 97098], [37781, 42924], [98927, 100960], [86738, 89338],
[48177, 52067], [28524, 32583]]],
"answer": 20
},
{
"input": [70000,
[[53013, 58178], [66996, 70770], [46244, 50076], [69373, 79267], [3343, 9935], [80414, 82602],
[61293, 68007], [50771, 53974], [34296, 43518], [92413, 100031], [17305, 19487], [84654, 87021],
[17333, 21892], [93387, 99456], [92406, 97098], [37781, 42924], [98927, 100960], [86738, 89338],
[48177, 52067], [28524, 32583]]],
"answer": -1
}
],
"5. Large By Hand": [
{
"input": [10000000000000, [[183456789012345, 193456789078479], [163456789034827, 173456789028737],
[103456789038198, 113456789073490], [123456789073249, 203456789073621]]],
"answer": 1
},
{
"input": [20000000000000, [[183456789012345, 193456789078479], [163456789034827, 173456789028737],
[103456789038198, 113456789073490], [123456789073249, 203456789073621]]],
"answer": 2
},
{
"input": [30000000000000, [[183456789012345, 193456789078479], [163456789034827, 173456789028737],
[103456789038198, 113456789073490], [123456789073249, 203456789073621]]],
"answer": 3
},
{
"input": [50000000000000, [[183456789012345, 193456789078479], [163456789034827, 173456789028737],
[103456789038198, 113456789073490], [123456789073249, 203456789073621]]],
"answer": 4
},
{
"input": [100000000000000, [[183456789012345, 193456789078479], [163456789034827, 173456789028737],
[103456789038198, 113456789073490], [123456789073249, 203456789073621]]],
"answer": -1
}
],
"6. Large Generated 1": [
{
"input": [464578738600000, [[618092831212219, 692920328043160], [784541961979258, 849250239149590],
[153212679000591, 170403813123184], [397402071388980, 494330805397679],
[947688880896887, 1031315325383029], [935301256930565, 986287435347942],
[15780649536663, 68473689252834], [526775205748614, 588991194047996],
[655862607385363, 673910180521194], [773094918999390, 841619268667280],
[155080217158458, 163523868832795], [619408007658049, 621341345909422],
[187242353118947, 253984595396399], [620306694433414, 710742874379812],
[268250344952342, 301251875692343], [493812097589401, 585287020679206],
[660549666775455, 686382635470395], [953288849899107, 981074697686018],
[752283881298002, 800681840076545], [628408208573475, 664503152611721]]],
"answer": 8
},
{
"input": [623032679900000, [[618092831212219, 692920328043160], [784541961979258, 849250239149590],
[153212679000591, 170403813123184], [397402071388980, 494330805397679],
[947688880896887, 1031315325383029], [935301256930565, 986287435347942],
[15780649536663, 68473689252834], [526775205748614, 588991194047996],
[655862607385363, 673910180521194], [773094918999390, 841619268667280],
[155080217158458, 163523868832795], [619408007658049, 621341345909422],
[187242353118947, 253984595396399], [620306694433414, 710742874379812],
[268250344952342, 301251875692343], [493812097589401, 585287020679206],
[660549666775455, 686382635470395], [953288849899107, 981074697686018],
[752283881298002, 800681840076545], [628408208573475, 664503152611721]]],
"answer": 16
},
{
"input": [650000000000000, [[618092831212219, 692920328043160], [784541961979258, 849250239149590],
[153212679000591, 170403813123184], [397402071388980, 494330805397679],
[947688880896887, 1031315325383029], [935301256930565, 986287435347942],
[15780649536663, 68473689252834], [526775205748614, 588991194047996],
[655862607385363, 673910180521194], [773094918999390, 841619268667280],
[155080217158458, 163523868832795], [619408007658049, 621341345909422],
[187242353118947, 253984595396399], [620306694433414, 710742874379812],
[268250344952342, 301251875692343], [493812097589401, 585287020679206],
[660549666775455, 686382635470395], [953288849899107, 981074697686018],
[752283881298002, 800681840076545], [628408208573475, 664503152611721]]],
"answer": -1
}
],
"7. Large Generated 2": [
{
"input": [409810512978000, [[858310018365524, 902063077244091], [932665378449117, 1028409672338264],
[882165278163239, 957945652291761], [155331862264691, 231608087199557],
[309323812898016, 328794059405147], [311727991597994, 391226174154816],
[826415306967097, 893972043882819], [170753995991478, 221100797836809],
[472995836315594, 478902758061898], [779003863306990, 822734502976504],
[539843675072188, 554844466580541], [977564633426502, 991018537238369],
[889461015856698, 901719104033374], [268288887276466, 292053591549963],
[87698520389374, 109261297832598], [650723837467456, 729926149124749],
[627448683684809, 644021001384284], [264317870081369, 322309330307873],
[238729907671924, 290743490959244], [938382837602825, 955450166170994]]],
"answer": 10
},
{
"input": [612742616513000, [[858310018365524, 902063077244091], [932665378449117, 1028409672338264],
[882165278163239, 957945652291761], [155331862264691, 231608087199557],
[309323812898016, 328794059405147], [311727991597994, 391226174154816],
[826415306967097, 893972043882819], [170753995991478, 221100797836809],
[472995836315594, 478902758061898], [779003863306990, 822734502976504],
[539843675072188, 554844466580541], [977564633426502, 991018537238369],
[889461015856698, 901719104033374], [268288887276466, 292053591549963],
[87698520389374, 109261297832598], [650723837467456, 729926149124749],
[627448683684809, 644021001384284], [264317870081369, 322309330307873],
[238729907671924, 290743490959244], [938382837602825, 955450166170994]]],
"answer": 19
},
{
"input": [620000000000000, [[858310018365524, 902063077244091], [932665378449117, 1028409672338264],
[882165278163239, 957945652291761], [155331862264691, 231608087199557],
[309323812898016, 328794059405147], [311727991597994, 391226174154816],
[826415306967097, 893972043882819], [170753995991478, 221100797836809],
[472995836315594, 478902758061898], [779003863306990, 822734502976504],
[539843675072188, 554844466580541], [977564633426502, 991018537238369],
[889461015856698, 901719104033374], [268288887276466, 292053591549963],
[87698520389374, 109261297832598], [650723837467456, 729926149124749],
[627448683684809, 644021001384284], [264317870081369, 322309330307873],
[238729907671924, 290743490959244], [938382837602825, 955450166170994]]],
"answer": -1
}
],
}
| 59.688716
| 119
| 0.469296
| 1,035
| 15,340
| 6.955556
| 0.243478
| 0.008751
| 0.010001
| 0.013335
| 0.896236
| 0.893735
| 0.887345
| 0.884289
| 0.877622
| 0.817197
| 0
| 0.687899
| 0.388005
| 15,340
| 256
| 120
| 59.921875
| 0.078931
| 0.016167
| 0
| 0.502008
| 0
| 0
| 0.039308
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2566957eaa777484fe80f2ccdcfb875a2a0ad3e4
| 3,165
|
py
|
Python
|
carbondesign/tests/test_breadcrumb.py
|
dozymoe/django-carbondesign
|
34aed0cfdccfa90fcb5bf2bbd347229815f1417b
|
[
"MIT"
] | null | null | null |
carbondesign/tests/test_breadcrumb.py
|
dozymoe/django-carbondesign
|
34aed0cfdccfa90fcb5bf2bbd347229815f1417b
|
[
"MIT"
] | null | null | null |
carbondesign/tests/test_breadcrumb.py
|
dozymoe/django-carbondesign
|
34aed0cfdccfa90fcb5bf2bbd347229815f1417b
|
[
"MIT"
] | null | null | null |
# pylint:disable=missing-module-docstring,missing-class-docstring,missing-function-docstring
from .base import compare_template, SimpleTestCase
class BreadcrumbTest(SimpleTestCase):
maxDiff = None
def test_rendered(self):
template = """
{% load carbondesign %}
{% Breadcrumb %}
{% endBreadcrumb %}
"""
expected = """
<nav class="bx--breadcrumb" aria-label="breadcrumb">
</nav>
"""
rendered = compare_template(template, expected)
self.assertEqual(*rendered)
class BreadcrumbItemTest(SimpleTestCase):
maxDiff = None
def test_rendered(self):
template = """
{% load carbondesign %}
{% BreadcrumbItem %}
{% endBreadcrumbItem %}
"""
expected = """
<div class="bx--breadcrumb-item">
<a href="#" class="bx--link">
</a>
</div>
"""
rendered = compare_template(template, expected)
self.assertEqual(*rendered)
class BreadcrumbHtmlTest(SimpleTestCase):
maxDiff = None
def test_breadcrumb(self):
template = """
{% load carbondesign %}
{% Breadcrumb %}
{% BreadcrumbItem href="#" %}
Breadcrumb 1
{% endBreadcrumbItem %}
{% BreadcrumbItem href="#" %}
Breadcrumb 2
{% endBreadcrumbItem %}
{% BreadcrumbItem href="#" %}
Breadcrumb 3
{% endBreadcrumbItem %}
{% endBreadcrumb %}
"""
expected = """
<nav class="bx--breadcrumb" aria-label="breadcrumb">
<div class="bx--breadcrumb-item">
<a href="#" class="bx--link">
Breadcrumb 1
</a>
</div>
<div class="bx--breadcrumb-item">
<a href="#" class="bx--link">
Breadcrumb 2
</a>
</div>
<div class="bx--breadcrumb-item">
<a href="#" class="bx--link">
Breadcrumb 3
</a>
</div>
</nav>
"""
rendered = compare_template(template, expected)
self.assertEqual(*rendered)
def test_current_page(self):
template = """
{% load carbondesign %}
{% Breadcrumb current=True %}
{% BreadcrumbItem href="#" %}
Breadcrumb 1
{% endBreadcrumbItem %}
{% BreadcrumbItem href="#" %}
Breadcrumb 2
{% endBreadcrumbItem %}
{% BreadcrumbItem href="#" current=True %}
Breadcrumb 3
{% endBreadcrumbItem %}
{% endBreadcrumb %}
"""
expected = """
<nav class="bx--breadcrumb bx--breadcrumb--no-trailing-slash" aria-label="breadcrumb">
<div class="bx--breadcrumb-item">
<a href="#" class="bx--link">
Breadcrumb 1
</a>
</div>
<div class="bx--breadcrumb-item">
<a href="#" class="bx--link">
Breadcrumb 2
</a>
</div>
<div class="bx--breadcrumb-item bx--breadcrumb-item--current">
<a href="#" class="bx--link" aria-current="page">
Breadcrumb 3
</a>
</div>
</nav>
"""
rendered = compare_template(template, expected)
self.assertEqual(*rendered)
def test_enabled(self):
template = """
{% load carbondesign %}
{% Breadcrumb %}
{% BreadcrumbItem href="#" %}
Breadcrumb 1
{% endBreadcrumbItem %}
{% endBreadcrumb %}
"""
expected = """
<nav class="bx--breadcrumb" aria-label="breadcrumb">
<div class="bx--breadcrumb-item">
<a href="#" class="bx--link">
Breadcrumb 1
</a>
</div>
</nav>
"""
rendered = compare_template(template, expected)
self.assertEqual(*rendered)
| 22.132867
| 92
| 0.623381
| 316
| 3,165
| 6.205696
| 0.155063
| 0.071392
| 0.104029
| 0.081591
| 0.828659
| 0.779704
| 0.779704
| 0.779704
| 0.779704
| 0.63845
| 0
| 0.005512
| 0.197472
| 3,165
| 142
| 93
| 22.288732
| 0.766535
| 0.028436
| 0
| 0.896
| 0
| 0.008
| 0.637813
| 0.15262
| 0
| 0
| 0
| 0
| 0.04
| 1
| 0.04
| false
| 0
| 0.008
| 0
| 0.096
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6c24f5d637670567b40650668e8f1b3bc6e6014b
| 4,294
|
py
|
Python
|
mrfd/trainer/losses.py
|
ivineetm007/Fall-detection
|
d18ac5f1e26a04ed8492b2e215a91c51b55f32dd
|
[
"MIT"
] | 13
|
2020-11-14T06:26:04.000Z
|
2022-03-30T10:46:22.000Z
|
mrfd/trainer/losses.py
|
ivineetm007/Fall-detection
|
d18ac5f1e26a04ed8492b2e215a91c51b55f32dd
|
[
"MIT"
] | 5
|
2020-11-14T06:27:10.000Z
|
2021-03-11T23:20:14.000Z
|
mrfd/trainer/losses.py
|
ivineetm007/Fall-detection
|
d18ac5f1e26a04ed8492b2e215a91c51b55f32dd
|
[
"MIT"
] | 4
|
2020-09-06T20:56:23.000Z
|
2021-11-25T05:46:59.000Z
|
from keras import backend as K
import numpy as np
#--------------------------------------------
#Keras loss functions for back propagation
#--------------------------------------------
def ROI_mean_squared_error_loss(mask):
'''
ROI masked reconstruction loss
'''
mask_sum=K.sum(mask)
def mse_loss(y_true,y_pred):
return K.sum(K.square(y_true*mask-y_pred*mask))/mask_sum
return mse_loss
def ROI_diff_temporal_loss(mask,diff_mask):
"""
ROI masked difference frames reconstruction loss
"""
diff_mask_sum=K.sum(diff_mask)
def diff_temporal_loss(y_true,y_pred):
pred=y_pred*mask
true=y_true*mask
pred_diff=pred[:,1:]-pred[:,:-1]
true_diff=true[:,1:]-true[:,:-1]
return K.sum(K.square(true_diff-pred_diff))/diff_mask_sum
return diff_temporal_loss
def ROI_diff_mse_joint_loss(mask,diff_mask,lamdba_S,lamdba_T):
"""
Combined loss for thermal autoencoder-
"""
diff_mask_sum=K.sum(diff_mask)
mask_sum=K.sum(mask)
def mse_loss(y_true,y_pred):
return K.sum(K.square(y_true*mask-y_pred*mask))/mask_sum
def diff_temporal_loss(y_true,y_pred):
pred=y_pred*mask
true=y_true*mask
pred_diff=pred[:,1:]-pred[:,:-1]
true_diff=true[:,1:]-true[:,:-1]
return K.sum(K.square(true_diff-pred_diff))/diff_mask_sum
def joint_loss(y_true,y_pred):
return lamdba_S*mse_loss(y_true,y_pred)+lamdba_T*diff_temporal_loss(y_true,y_pred)
return joint_loss
def ROI_diff_mse_joint_loss(mask,diff_mask,lamdba_S,lamdba_T):
"""
Joint loss for diff ROI 3DCAE
"""
diff_mask_sum=K.sum(diff_mask)
mask_sum=K.sum(mask)
def mse_loss(y_true,y_pred):
return K.sum(K.square(y_true*mask-y_pred*mask))/mask_sum
def diff_temporal_loss(y_true,y_pred):
pred=y_pred*mask
true=y_true*mask
pred_diff=pred[:,1:]-pred[:,:-1]
true_diff=true[:,1:]-true[:,:-1]
return K.sum(K.square(true_diff-pred_diff))/diff_mask_sum
def joint_loss(y_true,y_pred):
return lamdba_S*mse_loss(y_true,y_pred)+lamdba_T*diff_temporal_loss(y_true,y_pred)
return joint_loss
#--------------------------------------------
#python loss functions for numpy arrays
#--------------------------------------------
def wind_mean_squared_error(y_true,y_pred,win_length, img_height,img_width,channels):
'''
Reconstruction error
'''
y_pred = y_pred.reshape(len(y_pred),win_length, img_height*img_width*channels)#(samples-win_length+1, 5, wd*ht)
y_true = y_true.reshape(len(y_true),win_length, img_height*img_width*channels)#(samples-win_length+1, 5, wd*ht)
MSE = np.mean(np.power(y_true-y_pred, 2), axis = 2)# (samples-win_length+1,win_length)
return MSE
def wind_ROI_mean_squared_error(mask,y_true,y_pred,win_length, img_height,img_width,channels):
'''
ROI masked Reconstruction error
'''
#apply mask
y_true=y_true*mask
y_pred=y_pred*mask
y_pred = y_pred.reshape(len(y_pred),win_length, img_height*img_width*channels)#(samples-win_length+1, 5, wd*ht)
y_true = y_true.reshape(len(y_true),win_length, img_height*img_width*channels)#(samples-win_length+1, 5, wd*ht)
mask=mask.reshape(len(y_true),win_length, img_height*img_width*channels)
SE=np.sum(np.power(y_true - y_pred, 2), axis = 2)# (samples-win_length+1,win_length)
mask_sum=np.sum(mask,axis=2)
return SE/mask_sum
def wind_ROI_diff_temporal_loss(mask,diff_mask,y_true,y_pred,win_length, img_height,img_width,channels):
'''
ROI masked difference frames reconstruction loss
'''
#apply mask
y_true=y_true*mask
y_pred=y_pred*mask
y_pred = y_pred.reshape(len(y_pred),win_length, img_height*img_width*channels)#(samples-win_length+1, 5, wd*ht)
y_true = y_true.reshape(len(y_true),win_length, img_height*img_width*channels)#(samples-win_length+1, 5, wd*ht)
diff_mask=diff_mask.reshape(len(y_true),win_length-1, img_height*img_width*channels)
#diff
pred_diff=y_pred[:,1:]-y_pred[:,:-1]
true_diff=y_true[:,1:]-y_true[:,:-1]
SE=np.sum(np.power(true_diff - pred_diff, 2), axis = 2)# (samples-win_length+1,win_length-1)
diff_mask_sum=np.sum(diff_mask,axis=2)
return SE/diff_mask_sum
| 34.910569
| 115
| 0.669539
| 713
| 4,294
| 3.71108
| 0.085554
| 0.075586
| 0.049887
| 0.064248
| 0.812169
| 0.781935
| 0.752834
| 0.710129
| 0.710129
| 0.698035
| 0
| 0.01168
| 0.162552
| 4,294
| 122
| 116
| 35.196721
| 0.724138
| 0.192361
| 0
| 0.695652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.217391
| false
| 0
| 0.028986
| 0.072464
| 0.463768
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6c5e8b0844a58f6c47b2e67ff75fde67253c33aa
| 1,556
|
py
|
Python
|
Tests/test_image_getpixel.py
|
radicalgraphics/Pillow
|
9d22c16d539f6e0356d64849b84f2feec6787179
|
[
"Python-2.0"
] | null | null | null |
Tests/test_image_getpixel.py
|
radicalgraphics/Pillow
|
9d22c16d539f6e0356d64849b84f2feec6787179
|
[
"Python-2.0"
] | null | null | null |
Tests/test_image_getpixel.py
|
radicalgraphics/Pillow
|
9d22c16d539f6e0356d64849b84f2feec6787179
|
[
"Python-2.0"
] | null | null | null |
from tester import *
from PIL import Image
def color(mode):
bands = Image.getmodebands(mode)
if bands == 1:
return 1
else:
return tuple(range(1, bands+1))
def test_pixel():
def pixel(mode):
c = color(mode)
im = Image.new(mode, (1, 1), None)
im.putpixel((0, 0), c)
return im.getpixel((0, 0))
assert_equal(pixel("1"), 1)
assert_equal(pixel("L"), 1)
assert_equal(pixel("LA"), (1, 2))
assert_equal(pixel("I"), 1)
assert_equal(pixel("I;16"), 1)
assert_equal(pixel("I;16B"), 1)
assert_equal(pixel("F"), 1.0)
assert_equal(pixel("P"), 1)
assert_equal(pixel("PA"), (1, 2))
assert_equal(pixel("RGB"), (1, 2, 3))
assert_equal(pixel("RGBA"), (1, 2, 3, 4))
assert_equal(pixel("RGBX"), (1, 2, 3, 4))
assert_equal(pixel("CMYK"), (1, 2, 3, 4))
assert_equal(pixel("YCbCr"), (1, 2, 3))
def test_image():
def pixel(mode):
im = Image.new(mode, (1, 1), color(mode))
return im.getpixel((0, 0))
assert_equal(pixel("1"), 1)
assert_equal(pixel("L"), 1)
assert_equal(pixel("LA"), (1, 2))
assert_equal(pixel("I"), 1)
assert_equal(pixel("I;16"), 1)
assert_equal(pixel("I;16B"), 1)
assert_equal(pixel("F"), 1.0)
assert_equal(pixel("P"), 1)
assert_equal(pixel("PA"), (1, 2))
assert_equal(pixel("RGB"), (1, 2, 3))
assert_equal(pixel("RGBA"), (1, 2, 3, 4))
assert_equal(pixel("RGBX"), (1, 2, 3, 4))
assert_equal(pixel("CMYK"), (1, 2, 3, 4))
assert_equal(pixel("YCbCr"), (1, 2, 3))
| 26.827586
| 49
| 0.562982
| 247
| 1,556
| 3.425101
| 0.174089
| 0.364066
| 0.529551
| 0.241135
| 0.775414
| 0.775414
| 0.775414
| 0.728132
| 0.728132
| 0.728132
| 0
| 0.069536
| 0.22365
| 1,556
| 57
| 50
| 27.298246
| 0.630795
| 0
| 0
| 0.695652
| 0
| 0
| 0.048938
| 0
| 0
| 0
| 0
| 0
| 0.608696
| 1
| 0.108696
| false
| 0
| 0.043478
| 0
| 0.23913
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6c81426e4ae20ffe864e8d28fa584ae516c3339f
| 170
|
py
|
Python
|
build_you/models/__init__.py
|
bostud/build_you
|
258a336a82a1da9efc102770f5d8bf83abc13379
|
[
"MIT"
] | null | null | null |
build_you/models/__init__.py
|
bostud/build_you
|
258a336a82a1da9efc102770f5d8bf83abc13379
|
[
"MIT"
] | null | null | null |
build_you/models/__init__.py
|
bostud/build_you
|
258a336a82a1da9efc102770f5d8bf83abc13379
|
[
"MIT"
] | null | null | null |
from build_you.models.company import Company
from build_you.models.user import User
from build_you.models.object import BuildObject
from build_you.database import Base
| 24.285714
| 47
| 0.852941
| 27
| 170
| 5.222222
| 0.407407
| 0.255319
| 0.340426
| 0.382979
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105882
| 170
| 6
| 48
| 28.333333
| 0.927632
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
66b0199c08e27b67711fc64a1c97ec8704ba1758
| 6,409
|
py
|
Python
|
z2/part2/interactive/jm/random_fuzzy_arrows_1/416133383.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 1
|
2020-04-16T12:13:47.000Z
|
2020-04-16T12:13:47.000Z
|
z2/part2/interactive/jm/random_fuzzy_arrows_1/416133383.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 18
|
2020-03-06T17:50:15.000Z
|
2020-05-19T14:58:30.000Z
|
z2/part2/interactive/jm/random_fuzzy_arrows_1/416133383.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 18
|
2020-03-06T17:45:13.000Z
|
2020-06-09T19:18:31.000Z
|
from part1 import (
gamma_board,
gamma_busy_fields,
gamma_delete,
gamma_free_fields,
gamma_golden_move,
gamma_golden_possible,
gamma_move,
gamma_new,
)
"""
scenario: test_random_actions
uuid: 416133383
"""
"""
random actions, total chaos
"""
board = gamma_new(8, 5, 4, 13)
assert board is not None
assert gamma_move(board, 1, 2, 2) == 1
assert gamma_golden_possible(board, 4) == 1
assert gamma_move(board, 1, 6, 3) == 1
assert gamma_move(board, 1, 6, 3) == 0
assert gamma_golden_move(board, 1, 2, 5) == 0
assert gamma_move(board, 2, 2, 7) == 0
assert gamma_move(board, 2, 1, 0) == 1
assert gamma_move(board, 3, 4, 6) == 0
assert gamma_move(board, 4, 4, 1) == 1
assert gamma_move(board, 4, 1, 1) == 1
assert gamma_move(board, 1, 0, 3) == 1
assert gamma_move(board, 2, 2, 6) == 0
assert gamma_move(board, 2, 4, 2) == 1
assert gamma_move(board, 3, 4, 1) == 0
assert gamma_move(board, 4, 1, 2) == 1
assert gamma_move(board, 1, 3, 5) == 0
assert gamma_move(board, 1, 1, 3) == 1
assert gamma_busy_fields(board, 1) == 4
assert gamma_golden_move(board, 1, 1, 4) == 0
assert gamma_move(board, 2, 1, 4) == 1
assert gamma_golden_possible(board, 2) == 1
board229852308 = gamma_board(board)
assert board229852308 is not None
assert board229852308 == (".2......\n"
"11....1.\n"
".41.2...\n"
".4..4...\n"
".2......\n")
del board229852308
board229852308 = None
assert gamma_move(board, 3, 4, 2) == 0
assert gamma_move(board, 3, 3, 3) == 1
assert gamma_move(board, 4, 2, 0) == 1
assert gamma_move(board, 4, 0, 3) == 0
assert gamma_move(board, 2, 1, 7) == 0
assert gamma_move(board, 3, 1, 2) == 0
assert gamma_move(board, 3, 3, 3) == 0
assert gamma_move(board, 4, 3, 4) == 1
assert gamma_move(board, 1, 1, 0) == 0
assert gamma_move(board, 1, 2, 4) == 1
assert gamma_move(board, 2, 3, 5) == 0
assert gamma_golden_move(board, 2, 4, 2) == 0
assert gamma_move(board, 3, 7, 2) == 1
assert gamma_free_fields(board, 3) == 25
assert gamma_move(board, 4, 3, 2) == 1
assert gamma_move(board, 1, 0, 3) == 0
assert gamma_move(board, 1, 5, 4) == 1
assert gamma_move(board, 2, 0, 7) == 0
assert gamma_move(board, 3, 0, 1) == 1
assert gamma_move(board, 3, 2, 1) == 1
assert gamma_move(board, 1, 1, 3) == 0
assert gamma_move(board, 2, 4, 0) == 1
assert gamma_move(board, 2, 3, 0) == 1
assert gamma_move(board, 3, 1, 7) == 0
assert gamma_free_fields(board, 3) == 19
assert gamma_move(board, 1, 4, 6) == 0
assert gamma_move(board, 1, 6, 2) == 1
assert gamma_move(board, 2, 1, 7) == 0
assert gamma_move(board, 2, 4, 3) == 1
assert gamma_move(board, 3, 1, 7) == 0
assert gamma_move(board, 3, 0, 1) == 0
assert gamma_move(board, 4, 1, 4) == 0
assert gamma_busy_fields(board, 4) == 6
assert gamma_move(board, 1, 4, 6) == 0
assert gamma_golden_possible(board, 1) == 1
assert gamma_move(board, 2, 6, 3) == 0
assert gamma_move(board, 2, 0, 1) == 0
assert gamma_move(board, 3, 0, 7) == 0
assert gamma_move(board, 3, 3, 0) == 0
assert gamma_move(board, 4, 3, 2) == 0
assert gamma_busy_fields(board, 4) == 6
assert gamma_golden_possible(board, 1) == 1
assert gamma_move(board, 2, 0, 6) == 0
assert gamma_move(board, 2, 6, 1) == 1
assert gamma_busy_fields(board, 2) == 7
assert gamma_move(board, 3, 4, 7) == 0
assert gamma_move(board, 3, 4, 1) == 0
assert gamma_golden_possible(board, 3) == 1
assert gamma_move(board, 4, 4, 0) == 0
assert gamma_move(board, 2, 3, 5) == 0
assert gamma_move(board, 2, 4, 0) == 0
assert gamma_move(board, 3, 4, 4) == 1
assert gamma_move(board, 3, 3, 0) == 0
board982380107 = gamma_board(board)
assert board982380107 is not None
assert board982380107 == (".21431..\n"
"11.32.1.\n"
".4142.13\n"
"343.4.2.\n"
".2422...\n")
del board982380107
board982380107 = None
assert gamma_move(board, 4, 1, 4) == 0
assert gamma_move(board, 1, 4, 3) == 0
assert gamma_golden_possible(board, 1) == 1
board930247084 = gamma_board(board)
assert board930247084 is not None
assert board930247084 == (".21431..\n"
"11.32.1.\n"
".4142.13\n"
"343.4.2.\n"
".2422...\n")
del board930247084
board930247084 = None
assert gamma_move(board, 2, 3, 4) == 0
assert gamma_move(board, 3, 6, 1) == 0
assert gamma_move(board, 3, 6, 1) == 0
assert gamma_free_fields(board, 3) == 15
assert gamma_move(board, 4, 3, 7) == 0
assert gamma_free_fields(board, 4) == 15
assert gamma_move(board, 1, 3, 1) == 1
assert gamma_move(board, 1, 5, 2) == 1
assert gamma_busy_fields(board, 1) == 9
assert gamma_move(board, 2, 3, 7) == 0
assert gamma_move(board, 2, 2, 4) == 0
assert gamma_move(board, 3, 5, 0) == 1
assert gamma_move(board, 3, 5, 2) == 0
assert gamma_busy_fields(board, 3) == 6
assert gamma_move(board, 4, 0, 4) == 1
assert gamma_move(board, 1, 0, 7) == 0
assert gamma_free_fields(board, 2) == 11
assert gamma_move(board, 3, 1, 7) == 0
assert gamma_move(board, 4, 3, 0) == 0
assert gamma_move(board, 4, 2, 1) == 0
assert gamma_free_fields(board, 4) == 11
assert gamma_move(board, 1, 0, 6) == 0
assert gamma_move(board, 1, 3, 3) == 0
assert gamma_move(board, 2, 1, 5) == 0
assert gamma_move(board, 3, 3, 4) == 0
assert gamma_move(board, 3, 0, 3) == 0
assert gamma_move(board, 4, 0, 0) == 1
assert gamma_move(board, 1, 4, 6) == 0
assert gamma_move(board, 1, 6, 0) == 1
assert gamma_move(board, 2, 1, 7) == 0
assert gamma_move(board, 2, 0, 1) == 0
assert gamma_free_fields(board, 2) == 9
assert gamma_move(board, 3, 1, 4) == 0
assert gamma_move(board, 3, 4, 3) == 0
assert gamma_move(board, 4, 4, 7) == 0
assert gamma_move(board, 1, 4, 7) == 0
assert gamma_move(board, 1, 5, 3) == 1
assert gamma_move(board, 2, 3, 2) == 0
assert gamma_move(board, 2, 6, 4) == 1
assert gamma_move(board, 3, 4, 7) == 0
assert gamma_move(board, 3, 4, 4) == 0
assert gamma_busy_fields(board, 3) == 6
assert gamma_free_fields(board, 3) == 7
assert gamma_move(board, 4, 2, 1) == 0
assert gamma_move(board, 4, 5, 0) == 0
assert gamma_move(board, 1, 0, 7) == 0
assert gamma_move(board, 2, 0, 7) == 0
assert gamma_move(board, 3, 2, 0) == 0
assert gamma_move(board, 4, 1, 4) == 0
assert gamma_move(board, 1, 3, 7) == 0
assert gamma_busy_fields(board, 1) == 11
assert gamma_move(board, 2, 3, 2) == 0
assert gamma_move(board, 2, 1, 3) == 0
assert gamma_move(board, 3, 7, 3) == 1
assert gamma_move(board, 4, 1, 5) == 0
assert gamma_move(board, 4, 2, 1) == 0
assert gamma_golden_possible(board, 4) == 1
gamma_delete(board)
| 33.207254
| 46
| 0.653144
| 1,188
| 6,409
| 3.372896
| 0.042929
| 0.367856
| 0.404292
| 0.539057
| 0.855253
| 0.847018
| 0.735962
| 0.48016
| 0.380085
| 0.366359
| 0
| 0.134924
| 0.1824
| 6,409
| 192
| 47
| 33.380208
| 0.629771
| 0
| 0
| 0.287356
| 0
| 0
| 0.02373
| 0
| 0
| 0
| 0
| 0
| 0.810345
| 1
| 0
| false
| 0
| 0.005747
| 0
| 0.005747
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
66b4a3587fb504fd849220535a0a7fb859c90a62
| 635
|
py
|
Python
|
python/lib/uppercase.py
|
AndongTuomaining/stringine
|
8beb47eb14129ee2ea1ba2f145ae45d1ed6d9cea
|
[
"MIT"
] | 3
|
2020-07-08T16:38:10.000Z
|
2020-07-14T16:56:37.000Z
|
python/lib/uppercase.py
|
AndongTuomaining/stringine
|
8beb47eb14129ee2ea1ba2f145ae45d1ed6d9cea
|
[
"MIT"
] | null | null | null |
python/lib/uppercase.py
|
AndongTuomaining/stringine
|
8beb47eb14129ee2ea1ba2f145ae45d1ed6d9cea
|
[
"MIT"
] | 2
|
2021-09-03T19:21:23.000Z
|
2021-09-15T20:21:56.000Z
|
def uppercaseStart(string, number):
import os, sys
sys.path.append(os.path.dirname(__file__))
from substring import substringStart
from replace import main
String2 = substringStart(string, number)
String = main(string, String2, String2.upper())
return String
def uppercaseEnd(string, number):
String = string[::-1]
import os, sys
sys.path.append(os.path.dirname(__file__))
from substring import substringStart
from replace import main
String2 = substringStart(String, number)
String = main(String, String2, String2.upper())
return String[::-1]
| 27.608696
| 52
| 0.674016
| 72
| 635
| 5.833333
| 0.291667
| 0.114286
| 0.128571
| 0.066667
| 0.833333
| 0.833333
| 0.833333
| 0.833333
| 0.833333
| 0.833333
| 0
| 0.01636
| 0.229921
| 635
| 23
| 53
| 27.608696
| 0.842536
| 0
| 0
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| false
| 0
| 0.352941
| 0
| 0.588235
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
dd1e075c119a9d85b2c84f2daca530378ec1f833
| 4,447
|
py
|
Python
|
tests/_java_imports_test.py
|
xgouchet/AutoMergeTool
|
d63c057440a99e868e5eb25720f8d89640112f04
|
[
"Apache-2.0"
] | 41
|
2017-04-10T10:12:32.000Z
|
2022-02-11T09:34:43.000Z
|
tests/_java_imports_test.py
|
xgouchet/AutoMergeTool
|
d63c057440a99e868e5eb25720f8d89640112f04
|
[
"Apache-2.0"
] | 14
|
2017-02-17T09:58:57.000Z
|
2018-02-12T14:38:51.000Z
|
tests/_java_imports_test.py
|
xgouchet/ArachneMergeTool
|
d63c057440a99e868e5eb25720f8d89640112f04
|
[
"Apache-2.0"
] | 5
|
2017-04-11T13:03:20.000Z
|
2021-06-23T08:41:10.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
from automergetool.solvers.java_imports import *
JI_PATH = 'tests/data/java_imports/{0}.java'
class SolverTest(unittest.TestCase):
def test_is_import(self):
"""Test matching imports"""
# Given a Java Solver and imports
solver = JavaImportSolver()
path = JI_PATH.format("imports_only")
with open(path) as f:
for line in f:
self.assertTrue(solver.is_import_line(line))
def test_is_not_import(self):
"""Test non matching imports"""
# Given a Java Solver and imports
solver = JavaImportSolver()
path = JI_PATH.format("no_imports")
with open(path) as f:
for line in f:
self.assertFalse(solver.is_import_line(line))
def test_compare_imports(self):
"""Test comparing java imports"""
# Given a Java Solver
solver = JavaImportSolver()
fake_import = "import java.util.ArrayList;"
other_fake_import = " import java.util.ArrayList ;"
# When handling the conflict
same = solver.are_imports_the_same(fake_import, other_fake_import)
incompatible = solver.are_imports_incompatible(fake_import, other_fake_import)
# Then check the comparison
self.assertTrue(same)
self.assertFalse(incompatible)
def test_compare_different_imports(self):
"""Test comparing java imports"""
# Given a Java Solver
solver = JavaImportSolver()
fake_import = " import java.util.ArrayList;"
other_fake_import = "import\t java \t.util . LinkedList ;"
# When handling the conflict
same = solver.are_imports_the_same(fake_import, other_fake_import)
# Then check the comparison
self.assertFalse(same)
def test_compare_static_imports(self):
"""Test comparing java imports"""
# Given a Java Solver
solver = JavaImportSolver()
fake_import = "import static java.util.Collections.emptyList;"
other_fake_import = "import \tstatic \t java . util. Collections. emptyList ; "
# When handling the conflict
same = solver.are_imports_the_same(fake_import, other_fake_import)
incompatible = solver.are_imports_incompatible(fake_import, other_fake_import)
# Then check the comparison
self.assertTrue(same)
self.assertFalse(incompatible)
def test_compare_different_static_imports(self):
"""Test comparing java imports"""
# Given a Java Solver
solver = JavaImportSolver()
fake_import = "import static java.util.Collections.emptyList;"
other_fake_import = " import \tstatic \t java . util. Collections. \t singletonList ; "
# When handling the conflict
same = solver.are_imports_the_same(fake_import, other_fake_import)
# Then check the comparison
self.assertFalse(same)
# noinspection PyUnresolvedReferences
def test_path_arguments_shorts(self):
# Given
b = "b"
l = "l"
r = "r"
m = "m"
# When
parsed = parse_arguments(['-b', b, '-m', m, '-l', l, '-r', r])
self.assertEqual(parsed.base, b)
self.assertEqual(parsed.local, l)
self.assertEqual(parsed.remote, r)
self.assertEqual(parsed.merged, m)
# noinspection PyUnresolvedReferences
def test_path_arguments_long(self):
# Given
b = "b"
l = "l"
r = "r"
m = "m"
# When
parsed = parse_arguments(['--base', b, '--merged', m, '--local', l, '--remote', r])
self.assertEqual(parsed.base, b)
self.assertEqual(parsed.local, l)
self.assertEqual(parsed.remote, r)
self.assertEqual(parsed.merged, m)
def test_missing_arguments(self):
b = "b"
l = "l"
r = "r"
m = "m"
with self.assertRaises(SystemExit) as context:
parse_arguments(['--base', b, '--merged', m, '--remote', r])
def test_unknown_argument(self):
b = "b"
l = "l"
r = "r"
m = "m"
with self.assertRaises(SystemExit) as context:
parse_arguments(
['--base', b, '--merged', m, '--local', l, '--remote', r, '--kamoulox', '-p'])
if __name__ == '__main__':
unittest.main()
| 31.316901
| 109
| 0.602878
| 509
| 4,447
| 5.078585
| 0.19057
| 0.077369
| 0.058027
| 0.039458
| 0.847969
| 0.847969
| 0.799613
| 0.777176
| 0.777176
| 0.777176
| 0
| 0.000947
| 0.287835
| 4,447
| 141
| 110
| 31.539007
| 0.815283
| 0.146841
| 0
| 0.641975
| 0
| 0
| 0.150187
| 0.025655
| 0
| 0
| 0
| 0
| 0.222222
| 1
| 0.123457
| false
| 0
| 0.407407
| 0
| 0.54321
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
dd2de55f26289dc4091ce642f58c0f7e36459e7e
| 126
|
py
|
Python
|
gsfpy/gsfNavigationError.py
|
irewolepeter/gsfpy_USM_Implementation
|
c4614ac3f7d833eb86ea38c7708108b130f96612
|
[
"MIT"
] | 7
|
2020-07-01T07:12:19.000Z
|
2022-01-20T20:39:57.000Z
|
gsfpy/gsfNavigationError.py
|
irewolepeter/gsfpy_USM_Implementation
|
c4614ac3f7d833eb86ea38c7708108b130f96612
|
[
"MIT"
] | 36
|
2020-06-23T09:10:15.000Z
|
2022-03-22T10:27:58.000Z
|
gsfpy/gsfNavigationError.py
|
irewolepeter/gsfpy_USM_Implementation
|
c4614ac3f7d833eb86ea38c7708108b130f96612
|
[
"MIT"
] | 2
|
2021-02-07T13:21:52.000Z
|
2021-06-24T19:16:16.000Z
|
from gsfpy import mirror_default_gsf_version_submodule
mirror_default_gsf_version_submodule(globals(), "gsfNavigationError")
| 31.5
| 69
| 0.888889
| 15
| 126
| 6.933333
| 0.666667
| 0.25
| 0.307692
| 0.442308
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 126
| 3
| 70
| 42
| 0.87395
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
dd6934be468373da7713fd6d0a1cb534590dae98
| 20,858
|
py
|
Python
|
tests/test_iterators.py
|
Gemicai/Gemicai
|
1ce3be768979acc7251b4108a59292cba99624d1
|
[
"MIT"
] | 5
|
2020-11-16T11:06:51.000Z
|
2021-02-23T04:54:30.000Z
|
tests/test_iterators.py
|
Gemicai/Gemicai
|
1ce3be768979acc7251b4108a59292cba99624d1
|
[
"MIT"
] | 1
|
2021-08-24T16:21:30.000Z
|
2021-08-24T16:21:30.000Z
|
tests/test_iterators.py
|
Gemicai/Gemicai
|
1ce3be768979acc7251b4108a59292cba99624d1
|
[
"MIT"
] | 1
|
2021-02-23T04:54:31.000Z
|
2021-02-23T04:54:31.000Z
|
import os
import gemicai.data_iterators as test
import torchvision
import unittest
raw_dicom_directory = os.path.join("..", "examples", "dicom", "CT")
raw_dicom_file_path = os.path.join(raw_dicom_directory, "325261597578315993471860132776680.dcm.gz")
wrong_dicom_file_path = os.path.join("..", "000001.gemset")
dicom_directory = os.path.join("..", "examples", "gemset", "CT")
dicom_data_set = os.path.join(dicom_directory, "000001.gemset")
class TestPickledDicomoDataSet(unittest.TestCase):
def test_init_correct_usage(self):
dataset = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
self.assertIsInstance(dataset, test.PickledDicomoDataSet)
def test_init_wrong_label_counter_type(self):
with self.assertRaises(TypeError):
test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={},
label_counter_type=TestPickledDicomoDataSet)
def test_init_invalid_file_path(self):
with self.assertRaises(FileNotFoundError):
test.PickledDicomoDataSet(wrong_dicom_file_path, ["CT"], constraints={})
def test_init_file_has_wrong_type(self):
dataset = test.PickledDicomoDataSet(raw_dicom_file_path, ["CT"], constraints={})
with self.assertRaises(test.gem.pickle.UnpicklingError):
next(iter(dataset))
def test_init_wrong_labels_type(self):
with self.assertRaises(TypeError):
dataset = test.PickledDicomoDataSet(dicom_data_set, {"CT"}, constraints={})
def test_init_wrong_constraints_type(self):
with self.assertRaises(TypeError):
dataset = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints=[])
def test_iter(self):
dataset = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
dataset = iter(dataset)
self.assertIsInstance(dataset, test.PickledDicomoDataSet)
def test_next(self):
dataset = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
data = next(iter(dataset))
self.assertIsInstance(data, list)
def test_len(self):
dataset = iter(test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={}))
self.assertEqual(len(dataset), 0)
next(dataset)
self.assertEqual(len(dataset), 1)
next(dataset)
next(dataset)
self.assertEqual(len(dataset), 3)
def test_from_file_apply_invalid_transformation(self):
with self.assertRaises(Exception):
next(iter(test.PickledDicomoDataSet(dicom_data_set, ["CT"], transform=[], constraints={})))
def test_from_file_apply_valid_transformation(self):
t1 = torchvision.transforms.Compose([
torchvision.transforms.ToPILImage(),
torchvision.transforms.Resize((244, 244)),
torchvision.transforms.ToTensor()
])
data = next(iter(test.PickledDicomoDataSet(dicom_data_set, ["CT"], transform=t1, constraints={})))
self.assertIsInstance(data, list)
def test_subset_correct_usage(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
subset = data.subset({"Modality": "asd"})
with self.assertRaises(StopIteration):
next(iter(subset))
def test_subset_wrong_constraint_type(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
with self.assertRaises(TypeError):
subset = data.subset(("Modality", 1))
def test_can_be_parallelized(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
self.assertEqual(data.can_be_parallelized(), False)
def test_classes_correct_usage(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
self.assertIsInstance(data.classes("Modality"), list)
def test_classes_wrong_label_type(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.classes(["Modality"])
def test_summarize_correct_usage(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
self.assertIsInstance(data.summarize("Modality", print_summary=False), test.gem.LabelCounter)
def test_summarize_wrong_label_type(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize(["Modality"], print_summary=False)
def test_summarize_wrong_constraints_type(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize("Modality", [], print_summary=False)
def test_summarize_test_CT_constraint(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
summary_1 = data.summarize("Modality", print_summary=False)
summary_2 = data.summarize("BodyPartExamined", print_summary=False)
self.assertNotEqual(str(summary_1), str(summary_2))
def test_summarize_wrong_summary_type(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize("Modality", print_summary=None)
class TestPickledDicomoDataFolder(unittest.TestCase):
def test_init_correct_usage(self):
dataset = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
self.assertIsInstance(dataset, test.PickledDicomoDataFolder)
def test_init_wrong_label_counter_type(self):
with self.assertRaises(TypeError):
test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={},
label_counter_type=TestPickledDicomoDataSet)
def test_init_invalid_directory_path(self):
with self.assertRaises(NotADirectoryError):
test.PickledDicomoDataFolder(os.path.join(dicom_directory, "asd"), ["CT"], constraints={})
def test_init_file_has_wrong_type(self):
dataset = test.PickledDicomoDataFolder(raw_dicom_directory, ["CT"], constraints={})
with self.assertRaises(test.gem.pickle.UnpicklingError):
next(iter(dataset))
def test_init_wrong_labels_type(self):
with self.assertRaises(TypeError):
dataset = test.PickledDicomoDataFolder(dicom_directory, {"CT"}, constraints={})
def test_init_wrong_constraints_type(self):
with self.assertRaises(TypeError):
dataset = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints=[])
def test_iter(self):
dataset = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
dataset = iter(dataset)
self.assertIsInstance(dataset, test.PickledDicomoDataFolder)
def test_next(self):
dataset = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
data = next(iter(dataset))
self.assertIsInstance(data, list)
def test_len(self):
dataset = iter(test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={}))
self.assertEqual(len(dataset), 0)
next(dataset)
self.assertEqual(len(dataset), 1)
next(dataset)
next(dataset)
self.assertEqual(len(dataset), 3)
def test_subset_correct_usage(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
subset = data.subset({"Modality": "asd"})
with self.assertRaises(StopIteration):
next(iter(subset))
def test_subset_wrong_constraint_type(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
subset = data.subset(("Modality", 1))
def test_iterate_over_all(self):
data = iter(test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={}))
with self.assertRaises(StopIteration):
while True:
next(data)
self.assertEqual(len(data), 49)
def test_can_be_parallelized(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
self.assertEqual(data.can_be_parallelized(), False)
def test_classes_correct_usage(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
self.assertIsInstance(data.classes("Modality"), list)
def test_classes_wrong_label_type(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.classes(["Modality"])
def test_summarize_correct_usage(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
self.assertIsInstance(data.summarize("Modality", print_summary=False), test.gem.LabelCounter)
def test_summarize_wrong_label_type(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize(["Modality"], print_summary=False)
def test_summarize_wrong_constraints_type(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize("Modality", [], print_summary=False)
def test_summarize_test_CT_constraint(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
summary_1 = data.summarize("Modality", print_summary=False)
summary_2 = data.summarize("BodyPartExamined", print_summary=False)
self.assertNotEqual(str(summary_1), str(summary_2))
def test_summarize_wrong_summary_type(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize("Modality", print_summary=None)
class TestPickledDicomoFilePool(unittest.TestCase):
def test_init_correct_usage(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
self.assertIsInstance(data, test.PickledDicomoFilePool)
def test_init_wrong_label_counter_type(self):
with self.assertRaises(TypeError):
test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={},
label_counter_type=TestPickledDicomoDataSet)
def test_init_invalid_file_pool_path(self):
with self.assertRaises(FileNotFoundError):
test.PickledDicomoFilePool([os.path.join(dicom_directory, "asd", "000001.gemset")], ["CT"], constraints={})
def test_init_file_has_wrong_type(self):
with self.assertRaises(test.gem.pickle.UnpicklingError):
next(iter(test.PickledDicomoFilePool([raw_dicom_file_path], ["CT"], constraints={})))
def test_init_wrong_labels_type(self):
with self.assertRaises(TypeError):
dataset = test.PickledDicomoFilePool([dicom_data_set], {"CT"}, constraints={})
def test_init_wrong_constraints_type(self):
with self.assertRaises(TypeError):
dataset = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints=[])
def test_iter(self):
dataset = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
dataset = iter(dataset)
self.assertIsInstance(dataset, test.PickledDicomoFilePool)
def test_next(self):
dataset = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
data = next(iter(dataset))
self.assertIsInstance(data, list)
def test_len(self):
dataset = iter(test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={}))
self.assertEqual(len(dataset), 0)
next(dataset)
self.assertEqual(len(dataset), 1)
next(dataset)
next(dataset)
self.assertEqual(len(dataset), 3)
def test_subset_correct_usage(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
subset = data.subset({"Modality": "asd"})
with self.assertRaises(StopIteration):
next(iter(subset))
def test_subset_wrong_constraint_type(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
with self.assertRaises(TypeError):
subset = data.subset(("Modality", 1))
def test_iterate_over_all(self):
data = iter(test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={}))
with self.assertRaises(StopIteration):
while True:
next(data)
self.assertNotEqual(len(data), 0)
def test_can_be_parallelized(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
self.assertEqual(data.can_be_parallelized(), False)
def test_classes_correct_usage(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
self.assertIsInstance(data.classes("Modality"), list)
def test_classes_wrong_label_type(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
with self.assertRaises(TypeError):
data.classes(["Modality"])
def test_summarize_correct_usage(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
self.assertIsInstance(data.summarize("Modality", print_summary=False), test.gem.LabelCounter)
def test_summarize_wrong_label_type(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize(["Modality"], print_summary=False)
def test_summarize_wrong_constraints_type(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize("Modality", [], print_summary=False)
def test_summarize_test_CT_constraint(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
summary_1 = data.summarize("Modality", print_summary=False)
summary_2 = data.summarize("BodyPartExamined", print_summary=False)
self.assertNotEqual(str(summary_1), str(summary_2))
def test_summarize_wrong_summary_type(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize("Modality", print_summary=None)
class TestConcurrentPickledDicomObjectTaskSplitter(unittest.TestCase):
def test_init_correct_usage(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
self.assertIsInstance(data, test.ConcurrentPickledDicomObjectTaskSplitter)
def test_init_wrong_label_counter_type(self):
with self.assertRaises(TypeError):
test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={},
label_counter_type=TestPickledDicomoDataSet)
def test_init_invalid_directory_path(self):
test.ConcurrentPickledDicomObjectTaskSplitter(os.path.join(dicom_directory, "asd"), ["CT"], constraints={})
def test_init_wrong_labels_type(self):
with self.assertRaises(TypeError):
dataset = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, {"CT"}, constraints={})
def test_init_wrong_constraints_type(self):
with self.assertRaises(TypeError):
dataset = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints=[])
def test_iter(self):
dataset = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
dataset = iter(dataset)
self.assertIsInstance(dataset, test.PickledDicomoFilePool)
def test_next(self):
dataset = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
data = next(iter(dataset))
self.assertIsInstance(data, list)
def test_len(self):
dataset = iter(test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={}))
self.assertEqual(len(dataset), 0)
next(dataset)
self.assertEqual(len(dataset), 1)
next(dataset)
next(dataset)
self.assertEqual(len(dataset), 3)
def test_subset_correct_usage(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
subset = data.subset({"Modality": "asd"})
with self.assertRaises(StopIteration):
next(iter(subset))
def test_subset_wrong_constraint_type(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
subset = data.subset(("Modality", 1))
def test_iterate_over_all(self):
data = iter(test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={}))
with self.assertRaises(StopIteration):
while True:
next(data)
self.assertNotEqual(len(data), 0)
def test_can_be_parallelized(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
self.assertEqual(data.can_be_parallelized(), True)
def test_classes_correct_usage(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
self.assertIsInstance(data.classes("Modality"), list)
def test_classes_wrong_label_type(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.classes(["Modality"])
def test_summarize_correct_usage(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
self.assertIsInstance(data.summarize("Modality", print_summary=False), test.gem.LabelCounter)
def test_summarize_wrong_label_type(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize(["Modality"], print_summary=False)
def test_summarize_wrong_constraints_type(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize("Modality", [], print_summary=False)
def test_summarize_test_CT_constraint(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
summary_1 = data.summarize("Modality", print_summary=False)
summary_2 = data.summarize("BodyPartExamined", print_summary=False)
self.assertNotEqual(str(summary_1), str(summary_2))
def test_summarize_wrong_summary_type(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize("Modality", print_summary=None)
class TestDicomoDataset(unittest.TestCase):
def test_from_file_correct_usage(self):
dataset = test.DicomoDataset.from_file(dicom_data_set, ["CT"])
self.assertIsInstance(dataset, test.PickledDicomoDataSet)
def test_from_file_wrong_file_path(self):
with self.assertRaises(FileNotFoundError):
test.DicomoDataset.from_file(wrong_dicom_file_path, ["CT"])
def test_from_directory_correct_usage(self):
dataset = test.DicomoDataset.from_directory(dicom_directory, ["CT"])
self.assertIsInstance(dataset, test.ConcurrentPickledDicomObjectTaskSplitter)
def test_from_directory_file_wrong_directory_path(self):
with self.assertRaises(NotADirectoryError):
test.DicomoDataset.from_directory(os.path.join(dicom_directory, "asd"), ["CT"])
def test_get_dicomo_dataset_correct_usage_file(self):
dataset = test.DicomoDataset.get_dicomo_dataset(dicom_data_set)
self.assertIsInstance(dataset, test.PickledDicomoDataSet)
def test_get_dicomo_dataset_correct_usage_directory(self):
dataset = test.DicomoDataset.get_dicomo_dataset(dicom_directory)
self.assertIsInstance(dataset, test.ConcurrentPickledDicomObjectTaskSplitter)
def test_get_dicomo_dataset_wrong_directory_path(self):
with self.assertRaises(NotADirectoryError):
test.DicomoDataset.get_dicomo_dataset(wrong_dicom_file_path)
if __name__ == '__main__':
unittest.main()
| 45.049676
| 119
| 0.696232
| 2,131
| 20,858
| 6.564054
| 0.051619
| 0.043537
| 0.07006
| 0.038033
| 0.924149
| 0.915713
| 0.896912
| 0.848442
| 0.800329
| 0.761152
| 0
| 0.005593
| 0.185636
| 20,858
| 462
| 120
| 45.147186
| 0.817909
| 0
| 0
| 0.8
| 0
| 0
| 0.032026
| 0.001918
| 0
| 0
| 0
| 0
| 0.269444
| 1
| 0.241667
| false
| 0
| 0.011111
| 0
| 0.266667
| 0.066667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dd926a2e9e4bb6d2a9e3279506838ad57504fc25
| 123
|
py
|
Python
|
src/crkeng/app/integration_tests/test_test_db.py
|
sarahrmoeller/morphodict
|
74db04b58076b7be5d9b0742d20e8b2014fb4aaa
|
[
"Apache-2.0"
] | 8
|
2019-06-06T14:24:40.000Z
|
2020-11-11T01:43:11.000Z
|
src/crkeng/app/integration_tests/test_test_db.py
|
sarahrmoeller/morphodict
|
74db04b58076b7be5d9b0742d20e8b2014fb4aaa
|
[
"Apache-2.0"
] | 800
|
2019-05-23T15:32:03.000Z
|
2021-09-24T15:45:16.000Z
|
src/crkeng/app/integration_tests/test_test_db.py
|
sarahrmoeller/morphodict
|
74db04b58076b7be5d9b0742d20e8b2014fb4aaa
|
[
"Apache-2.0"
] | 8
|
2020-12-17T21:57:00.000Z
|
2021-09-17T20:03:51.000Z
|
from morphodict.lexicon.test_db import get_test_words
def test_test_db_words():
assert "wâpamêw" in get_test_words()
| 20.5
| 53
| 0.796748
| 20
| 123
| 4.5
| 0.6
| 0.133333
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130081
| 123
| 5
| 54
| 24.6
| 0.841122
| 0
| 0
| 0
| 0
| 0
| 0.056911
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
06e5890b10a2646477f4e2eeb2c5f277a875fbae
| 12,773
|
py
|
Python
|
python_tools/test/test_create_inputs_from_title_file.py
|
mskcc/ACCESS-Pipeline
|
b0f226a9ac5152f3afe0d38c8cd54aa25b8b01cf
|
[
"MIT"
] | 4
|
2018-08-23T04:37:43.000Z
|
2021-07-03T12:49:51.000Z
|
python_tools/test/test_create_inputs_from_title_file.py
|
andurill/ACCESS-Pipeline
|
3441040dfaecba58150c13a95a6a93657b00778a
|
[
"MIT"
] | 32
|
2018-06-08T12:44:55.000Z
|
2022-01-13T01:29:35.000Z
|
python_tools/test/test_create_inputs_from_title_file.py
|
andurill/ACCESS-Pipeline
|
3441040dfaecba58150c13a95a6a93657b00778a
|
[
"MIT"
] | 4
|
2018-05-25T21:43:48.000Z
|
2022-01-07T18:51:43.000Z
|
import unittest
import pandas as pd
from python_tools.constants import *
from python_tools.pipeline_kickoff import create_inputs_from_title_file
def load_bad_title_file():
title_file = pd.read_csv('test_data/bad_title_file.txt', sep='\t')
return title_file
def load_good_title_file_similar_sample_names():
title_file = pd.read_csv('test_data/good_title_file_similar_sample_names.txt', sep='\t')
return title_file
def load_good_title_file_with_difficult_sample_ids():
title_file = pd.read_csv('test_data/good_title_file_difficult_sample_ids.txt', sep='\t')
return title_file
def load_good_title_file():
title_file = pd.read_csv('test_data/good_title_file.txt', sep='\t')
return title_file
class CIFTTests(unittest.TestCase):
def setUp(self):
self.bad_title_file = load_bad_title_file()
self.good_title_file = load_good_title_file()
self.good_title_file_with_difficult_sample_ids = load_good_title_file_similar_sample_names()
self._fastq_objects = [
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_1_test_investigator_sample_1/test_patient_1_test_investigator_sample_1_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_1_test_investigator_sample_1a/test_patient_1_test_investigator_sample_1a_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_4_T/test_patient_2_test_investigator_sample_4_T_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_3_N/test_patient_2_test_investigator_sample_3_N_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_3_test_investigator_sample_6_T/test_patient_3_test_investigator_sample_6_T_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_5_N/test_patient_2_test_investigator_sample_5_N_R1_001.fastq.gz'}
]
# Use absolute paths
self._fastq_objects = [
{'class': 'File', 'path': os.path.abspath(p['path'])} for p in self._fastq_objects
]
self._fastq2_objects = [
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_1_test_investigator_sample_1/test_patient_1_test_investigator_sample_1_R2_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_1_test_investigator_sample_1a/test_patient_1_test_investigator_sample_1a_R2_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_4_T/test_patient_2_test_investigator_sample_4_T_R2_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_3_N/test_patient_2_test_investigator_sample_3_N_R2_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_3_test_investigator_sample_6_T/test_patient_3_test_investigator_sample_6_T_R2_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_5_N/test_patient_2_test_investigator_sample_5_N_R2_001.fastq.gz'}
]
self._patient_ids = ['test_patient_1', 'test_patient_1', 'test_patient_2', 'test_patient_2', 'test_patient_2', 'test_patient_3']
self._sample_sheets = [
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_1_test_investigator_sample_1/SampleSheet.csv'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_1_test_investigator_sample_1a/SampleSheet.csv'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_4_T/SampleSheet.csv'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_3_N/SampleSheet.csv'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_3_test_investigator_sample_6_T/SampleSheet.csv'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_5_N/SampleSheet.csv'}
]
# Use absolute paths
self._sample_sheets = [
{'class': 'File', 'path': os.path.abspath(p['path'])} for p in self._sample_sheets
]
def test_get_fastq_positions(self):
fastq1, fastq2, sample_sheet = create_inputs_from_title_file.sort_fastqs(
self._fastq_objects,
self._fastq2_objects,
self._sample_sheets,
self.bad_title_file
)
expected = [
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_1_test_investigator_sample_1a/test_patient_1_test_investigator_sample_1a_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_1_test_investigator_sample_1/test_patient_1_test_investigator_sample_1_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_4_T/test_patient_2_test_investigator_sample_4_T_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_3_N/test_patient_2_test_investigator_sample_3_N_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_5_N/test_patient_2_test_investigator_sample_5_N_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_3_test_investigator_sample_6_T/test_patient_3_test_investigator_sample_6_T_R1_001.fastq.gz'},
]
self.assertListEqual(fastq1, [{'class': 'File', 'path': os.path.abspath(f['path'])} for f in expected])
def test_two_sample_ids_found_in_fastq(self):
fastq_objects = [
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_1_test_investigator_sample_1a/test_patient_1_test_investigator_sample_1a_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_1_test_investigator_sample_1/test_patient_1_test_investigator_sample_1_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_4_T/test_patient_2_test_investigator_sample_4_T_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_3_N/test_patient_2_test_investigator_sample_3_N_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_3_test_investigator_sample_6_T/test_patient_3_test_investigator_sample_6_T_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_5_N/test_patient_2_test_investigator_sample_5_N_R1_001.fastq.gz'}
]
fastq2_objects = [
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_1_test_investigator_sample_1a/test_patient_1_test_investigator_sample_1a_R2_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_1_test_investigator_sample_1/test_patient_1_test_investigator_sample_1_R2_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_4_T/test_patient_2_test_investigator_sample_4_T_R2_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_3_test_investigator_sample_6_T/test_patient_3_test_investigator_sample_6_T_R2_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_3_N/test_patient_2_test_investigator_sample_3_N_R2_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_5_N/test_patient_2_test_investigator_sample_5_N_R2_001.fastq.gz'}
]
sample_sheets = [
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_1_test_investigator_sample_1a/SampleSheet.csv'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_1/SampleSheet.csv'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_1_test_investigator_sample_3_N/SampleSheet.csv'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_4_T/SampleSheet.csv'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_3_test_investigator_sample_6_T/SampleSheet.csv'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_5_N/SampleSheet.csv'}
]
title_file = load_good_title_file_similar_sample_names()
fastq1, fastq2, sample_sheet = create_inputs_from_title_file.sort_fastqs(
fastq_objects,
fastq2_objects,
sample_sheets,
title_file
)
self.assertListEqual(fastq1, [
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_1_test_investigator_sample_1/test_patient_1_test_investigator_sample_1_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_1_test_investigator_sample_1a/test_patient_1_test_investigator_sample_1a_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_4_T/test_patient_2_test_investigator_sample_4_T_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_3_N/test_patient_2_test_investigator_sample_3_N_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_2_test_investigator_sample_5_N/test_patient_2_test_investigator_sample_5_N_R1_001.fastq.gz'},
{'class': 'File', 'path': '../../test/test_data/umi-T_N-PanCancer/test_patient_3_test_investigator_sample_6_T/test_patient_3_test_investigator_sample_6_T_R1_001.fastq.gz'},
])
def test_validate_title_file(self):
"""
:return:
"""
with self.assertRaises(Exception):
create_inputs_from_title_file.perform_validation(self.bad_title_file)
# Fix missing lane number
self.bad_title_file.loc[self.bad_title_file.index[0], TITLE_FILE__LANE_COLUMN] = 1
with self.assertRaises(Exception):
create_inputs_from_title_file.perform_validation(self.bad_title_file)
# Fix duplicate barcodes
self.bad_title_file.loc[self.bad_title_file.index[-1], TITLE_FILE__BARCODE_ID_COLUMN] = 'bc411-bc411'
with self.assertRaises(Exception):
create_inputs_from_title_file.perform_validation(self.bad_title_file, 'test_project_title_file.txt', 'test_project')
# Fix misspelled sample class
self.bad_title_file[TITLE_FILE__SAMPLE_CLASS_COLUMN] = self.bad_title_file[TITLE_FILE__SAMPLE_CLASS_COLUMN].str.replace('Tumore', 'Tumor')
with self.assertRaises(Exception) as context:
create_inputs_from_title_file.perform_validation(self.bad_title_file)
# Fix misspelled sample type
self.bad_title_file[TITLE_FILE__SAMPLE_TYPE_COLUMN] = self.bad_title_file[TITLE_FILE__SAMPLE_TYPE_COLUMN].str.replace('Plasmaa', 'Plasma')
# Now it should pass
create_inputs_from_title_file.perform_validation(self.bad_title_file, 'test_project_title_file.txt', 'test_project')
def test_barcodes_check(self):
"""
Standalone test for barcodes validation
:return:
"""
with self.assertRaises(AssertionError):
create_inputs_from_title_file.perform_barcode_index_checks_i7(
self.good_title_file_with_difficult_sample_ids, self._sample_sheets)
with self.assertRaises(AssertionError):
create_inputs_from_title_file.perform_barcode_index_checks_i5(
self.good_title_file_with_difficult_sample_ids, self._sample_sheets)
| 69.043243
| 184
| 0.726219
| 1,863
| 12,773
| 4.441224
| 0.062802
| 0.119652
| 0.22335
| 0.098622
| 0.909838
| 0.903553
| 0.875635
| 0.86234
| 0.848199
| 0.823906
| 0
| 0.030824
| 0.138965
| 12,773
| 184
| 185
| 69.418478
| 0.721495
| 0.017067
| 0
| 0.511628
| 0
| 0
| 0.583013
| 0.516891
| 0
| 0
| 0
| 0
| 0.062016
| 1
| 0.069767
| false
| 0
| 0.031008
| 0
| 0.139535
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b07dcfcdeebf3677b9b85cfc2056f2f1727519dc
| 92
|
py
|
Python
|
reinhardt/utils/__init__.py
|
cceit/reinhardt
|
80cacb3c7d81c7128d3567bb42b75c277f05a53f
|
[
"BSD-3-Clause"
] | null | null | null |
reinhardt/utils/__init__.py
|
cceit/reinhardt
|
80cacb3c7d81c7128d3567bb42b75c277f05a53f
|
[
"BSD-3-Clause"
] | null | null | null |
reinhardt/utils/__init__.py
|
cceit/reinhardt
|
80cacb3c7d81c7128d3567bb42b75c277f05a53f
|
[
"BSD-3-Clause"
] | null | null | null |
from .admin import * # NOQA
from .helpers import * # NOQA
from .reports import * # NOQA
| 18.4
| 30
| 0.663043
| 12
| 92
| 5.083333
| 0.5
| 0.491803
| 0.459016
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.23913
| 92
| 4
| 31
| 23
| 0.871429
| 0.152174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b0b70e2d5fa15fa3099d217a14692042f26d0c28
| 8,058
|
py
|
Python
|
tests/test_monitor.py
|
jraygauthier/pytest-monitor
|
8480dac0b052d06669fbc717a19d3c816d3f06c4
|
[
"MIT"
] | null | null | null |
tests/test_monitor.py
|
jraygauthier/pytest-monitor
|
8480dac0b052d06669fbc717a19d3c816d3f06c4
|
[
"MIT"
] | null | null | null |
tests/test_monitor.py
|
jraygauthier/pytest-monitor
|
8480dac0b052d06669fbc717a19d3c816d3f06c4
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import pathlib
import pytest
import sqlite3
def test_monitor_basic_test(testdir):
"""Make sure that pytest-monitor does the job without impacting user tests."""
# create a temporary pytest test module
testdir.makepyfile("""
import time
def test_ok():
time.sleep(0.5)
x = ['a' * i for i in range(100)]
assert len(x) == 100
""")
# run pytest with the following cmd args
result = testdir.runpytest('-v')
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines(['*::test_ok PASSED*'])
pymon_path = pathlib.Path(str(testdir)) / '.pymon'
assert pymon_path.exists()
# make sure that that we get a '0' exit code for the testsuite
result.assert_outcomes(passed=1)
db = sqlite3.connect(str(pymon_path))
cursor = db.cursor()
cursor.execute('SELECT ITEM FROM TEST_METRICS;')
assert 1 == len(cursor.fetchall()) # current test
def test_monitor_pytest_skip_marker(testdir):
"""Make sure that pytest-monitor does the job without impacting user tests."""
# create a temporary pytest test module
testdir.makepyfile("""
import pytest
import time
@pytest.mark.skip("Some reason")
def test_skipped():
assert True
""")
# run pytest with the following cmd args
result = testdir.runpytest('-v')
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines(['*::test_skipped SKIPPED*'])
pymon_path = pathlib.Path(str(testdir)) / '.pymon'
assert pymon_path.exists()
# make sure that that we get a '0' exit code for the testsuite
result.assert_outcomes(skipped=1)
db = sqlite3.connect(str(pymon_path))
cursor = db.cursor()
cursor.execute('SELECT ITEM FROM TEST_METRICS;')
assert not len(cursor.fetchall())
def test_bad_markers(testdir):
"""Make sure that pytest-monitor warns about unknown markers."""
# create a temporary pytest test module
testdir.makepyfile("""
import pytest
import time
@pytest.mark.monitor_bad_marker
def test_ok():
time.sleep(0.1)
x = ['a' * i for i in range(100)]
assert len(x) == 100
""")
# run pytest with the following cmd args
result = testdir.runpytest('-v')
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines(['*::test_ok PASSED*',
'*Nothing known about marker monitor_bad_marker*'])
pymon_path = pathlib.Path(str(testdir)) / '.pymon'
assert pymon_path.exists()
# make sure that that we get a '0' exit code for the testsuite
result.assert_outcomes(passed=1)
db = sqlite3.connect(str(pymon_path))
cursor = db.cursor()
cursor.execute('SELECT ITEM FROM TEST_METRICS;')
assert 1 == len(cursor.fetchall()) # current test
def test_monitor_skip_module(testdir):
"""Make sure that pytest-monitor correctly understand the monitor_skip_test marker."""
# create a temporary pytest test module
testdir.makepyfile("""
import pytest
import time
pytestmark = pytest.mark.monitor_skip_test
def test_ok_not_monitored():
time.sleep(0.1)
x = ['a' * i for i in range(100)]
assert len(x) == 100
def test_another_function_ok_not_monitored():
assert True
""")
# run pytest with the following cmd args
result = testdir.runpytest('-v')
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines(['*::test_ok_not_monitored PASSED*',
'*::test_another_function_ok_not_monitored PASSED*'])
pymon_path = pathlib.Path(str(testdir)) / '.pymon'
assert pymon_path.exists()
# make sure that that we get a '0' exit code for the testsuite
result.assert_outcomes(passed=2)
db = sqlite3.connect(str(pymon_path))
cursor = db.cursor()
cursor.execute('SELECT ITEM FROM TEST_METRICS;')
assert not len(cursor.fetchall()) # Nothing ran
def test_monitor_skip_test(testdir):
"""Make sure that pytest-monitor correctly understand the monitor_skip_test marker."""
# create a temporary pytest test module
testdir.makepyfile("""
import pytest
import time
@pytest.mark.monitor_skip_test
def test_not_monitored():
time.sleep(0.1)
x = ['a' * i for i in range(100)]
assert len(x) == 100
""")
# run pytest with the following cmd args
result = testdir.runpytest('-v')
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines(['*::test_not_monitored PASSED*'])
pymon_path = pathlib.Path(str(testdir)) / '.pymon'
assert pymon_path.exists()
# make sure that that we get a '0' exit code for the testsuite
result.assert_outcomes(passed=1)
db = sqlite3.connect(str(pymon_path))
cursor = db.cursor()
cursor.execute('SELECT ITEM FROM TEST_METRICS;')
assert not len(cursor.fetchall()) # nothing monitored
def test_monitor_skip_test_if(testdir):
"""Make sure that pytest-monitor correctly understand the monitor_skip_test_if marker."""
# create a temporary pytest test module
testdir.makepyfile("""
import pytest
import time
@pytest.mark.monitor_skip_test_if(True)
def test_not_monitored():
time.sleep(0.1)
x = ['a' * i for i in range(100)]
assert len(x) == 100
@pytest.mark.monitor_skip_test_if(False)
def test_monitored():
time.sleep(0.1)
x = ['a' *i for i in range(100)]
assert len(x) == 100
""")
# run pytest with the following cmd args
result = testdir.runpytest('-v')
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines(['*::test_not_monitored PASSED*',
'*::test_monitored PASSED*'])
pymon_path = pathlib.Path(str(testdir)) / '.pymon'
assert pymon_path.exists()
# make sure that that we get a '0' exit code for the testsuite
result.assert_outcomes(passed=2)
db = sqlite3.connect(str(pymon_path))
cursor = db.cursor()
cursor.execute('SELECT ITEM FROM TEST_METRICS;')
assert 1 == len(cursor.fetchall())
def test_monitor_no_db(testdir):
"""Make sure that pytest-monitor correctly understand the monitor_skip_test_if marker."""
# create a temporary pytest test module
testdir.makepyfile("""
import pytest
import time
def test_it():
time.sleep(0.1)
x = ['a' * i for i in range(100)]
assert len(x) == 100
def test_that():
time.sleep(0.1)
x = ['a' *i for i in range(100)]
assert len(x) == 100
""")
wrn = 'pytest-monitor: No storage specified but monitoring is requested. Disabling monitoring.'
with pytest.warns(UserWarning, match=wrn):
# run pytest with the following cmd args
result = testdir.runpytest('--no-db', '-v')
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines(['*::test_it PASSED*',
'*::test_that PASSED*'])
pymon_path = pathlib.Path(str(testdir)) / '.pymon'
assert not pymon_path.exists()
# make sure that that we get a '0' exit code for the testsuite
result.assert_outcomes(passed=2)
def test_monitor_basic_output(testdir):
"""Make sure that pytest-monitor does not repeat captured output (issue #26)."""
# create a temporary pytest test module
testdir.makepyfile("""
def test_it():
print('Hello World')
""")
wrn = 'pytest-monitor: No storage specified but monitoring is requested. Disabling monitoring.'
with pytest.warns(UserWarning, match=wrn):
# run pytest with the following cmd args
result = testdir.runpytest('--no-db', '-s', '-vv')
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines(['*::test_it Hello World*'])
assert "Hello World" != result.stdout.get_lines_after('*Hello World')[0]
# make sure that that we get a '0' exit code for the testsuite
result.assert_outcomes(passed=1)
| 28.985612
| 99
| 0.656615
| 1,079
| 8,058
| 4.774791
| 0.114921
| 0.034938
| 0.037267
| 0.029503
| 0.907221
| 0.896739
| 0.869759
| 0.855008
| 0.845497
| 0.836374
| 0
| 0.015176
| 0.231323
| 8,058
| 277
| 100
| 29.090253
| 0.816597
| 0.265078
| 0
| 0.741935
| 0
| 0
| 0.413516
| 0.069461
| 0
| 0
| 0
| 0
| 0.206452
| 1
| 0.051613
| false
| 0.103226
| 0.103226
| 0
| 0.154839
| 0.006452
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
b0da8fe0093bf8a1254eafdadc91b734a1a112f3
| 35
|
py
|
Python
|
info_modules/wms/__init__.py
|
HusseinKabbout/qwc-feature-info-service
|
3d7cdbc1a3dc4a3725ba0529204848d47c4ed87e
|
[
"MIT"
] | null | null | null |
info_modules/wms/__init__.py
|
HusseinKabbout/qwc-feature-info-service
|
3d7cdbc1a3dc4a3725ba0529204848d47c4ed87e
|
[
"MIT"
] | null | null | null |
info_modules/wms/__init__.py
|
HusseinKabbout/qwc-feature-info-service
|
3d7cdbc1a3dc4a3725ba0529204848d47c4ed87e
|
[
"MIT"
] | 2
|
2020-03-24T09:13:14.000Z
|
2021-09-29T10:43:31.000Z
|
from .layer_info import layer_info
| 17.5
| 34
| 0.857143
| 6
| 35
| 4.666667
| 0.666667
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 35
| 1
| 35
| 35
| 0.903226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9fe536897918bd1f04d7b48a4e64a1cdbf28e6f8
| 7,896
|
py
|
Python
|
cogs/Administration.py
|
LookAtYourSKill/discord-bot
|
0ce89c627c260fb6cfed3b74cd1d4bed53dea1d0
|
[
"MIT"
] | null | null | null |
cogs/Administration.py
|
LookAtYourSKill/discord-bot
|
0ce89c627c260fb6cfed3b74cd1d4bed53dea1d0
|
[
"MIT"
] | null | null | null |
cogs/Administration.py
|
LookAtYourSKill/discord-bot
|
0ce89c627c260fb6cfed3b74cd1d4bed53dea1d0
|
[
"MIT"
] | null | null | null |
import asyncio
import json
import disnake as discord
from disnake.ext import commands
class administration(commands.Cog):
"""
`Admin commands`
"""
def __init__(self, bot):
self.bot = bot
@commands.command(name='lock', aliases=['lockdown'])
@commands.has_permissions(administrator=True)
async def lock(self, ctx):
"""
Lock a channel, so nobody can write in it
**Usage :** ``?lock``
"""
with open('utils/json/active_check.json', 'r') as f:
data = json.load(f)
if data[str(ctx.guild.id)]["Administration"] == 'false':
embed = discord.Embed(
description=f'Diese **Extension (Administration) ist momentan deaktiviert!** Wende dich bitte an **den Owner vom Bot** (LookAtYourSkill#8691)',
color=discord.Color.red())
await ctx.send(embed=embed)
else:
with open('utils/json/on_guild.json', 'r') as f:
guild_data = json.load(f)
await ctx.channel.set_permissions(ctx.guild.default_role, send_messages=False)
embed = discord.Embed(description=f'`{ctx.channel}` ist nun **im Lockdown**',
color=0x4cd137)
embed.add_field(name='**Information**',
value=f'Channel im Lockdown : `{ctx.channel}`\n'
f'In Lockdown gesetzt von : `{ctx.author}`')
await ctx.send(embed=embed, delete_after=5)
await asyncio.sleep(1)
await ctx.message.delete()
if guild_data[str(ctx.guild.id)]["moderation_log_channel"]:
channel = self.bot.get_channel(id=guild_data[str(ctx.guild.id)]['moderation_log_channel'])
await channel.send(embed=embed)
else:
return
@commands.command(name='release', aliases=['unlock'])
@commands.has_permissions(administrator=True)
async def unlock(self, ctx):
"""
Unlock the channel you locked before
**Usage :** ``?unlock``
"""
with open('utils/json/active_check.json', 'r') as f:
data = json.load(f)
if data[str(ctx.guild.id)]["Administration"] == 'false':
embed = discord.Embed(
description=f'Diese **Extension (Administration) ist momentan deaktiviert!** Wende dich bitte an **den Owner vom Bot** (LookAtYourSkill#8691)',
color=discord.Color.red())
await ctx.send(embed=embed)
else:
with open('utils/json/on_guild.json', 'r') as f:
guild_data = json.load(f)
await ctx.channel.set_permissions(ctx.guild.default_role, send_messages=True)
embed = discord.Embed(description=f'`{ctx.channel}` ist nun **nicht mehr im Lockdown**',
color=0x4cd137)
embed.add_field(name='**Information**',
value=f'Unlocked Channel : `{ctx.channel}`\n'
f'Aus dem Lockdown genommen von : `{ctx.author}`')
await ctx.send(embed=embed, delete_after=5)
await asyncio.sleep(1)
await ctx.message.delete()
if guild_data[str(ctx.guild.id)]["moderation_log_channel"]:
channel = self.bot.get_channel(id=guild_data[str(ctx.guild.id)]['moderation_log_channel'])
await channel.send(embed=embed)
else:
return
@commands.command(aliases=['announce'])
@commands.has_permissions(administrator=True)
async def say(self, ctx, *, text):
"""
Give back your text with a `@everyone` at the beginning!
**Usage :** ``?say [text]``
"""
with open('utils/json/active_check.json', 'r') as f:
data = json.load(f)
if data[str(ctx.guild.id)]["Administration"] == 'false':
embed = discord.Embed(
description=f'Diese **Extension (Administration) ist momentan deaktiviert!** Wende dich bitte an **den Owner vom Bot** (LookAtYourSkill#8691)',
color=discord.Color.red())
await ctx.send(embed=embed)
else:
role = ctx.guild.default_role
await ctx.send(f'{role}, {text}')
@commands.command(name='load')
@commands.is_owner()
async def load(self, ctx, cog):
with open('utils/json/active_check.json', 'r') as f:
data = json.load(f)
if data[str(ctx.guild.id)]["Administration"] == 'false':
embed = discord.Embed(
description=f'Diese **Extension (Administration) ist momentan deaktiviert!** Wende dich bitte an **den Owner vom Bot** (LookAtYourSkill#8691)',
color=discord.Color.red())
await ctx.send(embed=embed)
else:
try:
self.bot.load_extension(f'cogs.{cog}')
embed = discord.Embed(description=f'Das `Modul {cog}` wurde **erfolgreich geladen!**',
color=discord.Color.green())
await ctx.send(embed=embed)
except discord.Forbidden:
embed = discord.Embed(
description=f'Ein Fehler ist aufgetreten... Eventuell gibt es die Extension `{cog}` garnicht... Überprüfe bitte deine schreibweise!',
color=discord.Color.red())
await ctx.send(embed=embed)
@commands.command(name='unload')
@commands.is_owner()
async def unload(self, ctx, cog):
with open('utils/json/active_check.json', 'r') as f:
data = json.load(f)
if data[str(ctx.guild.id)]["Administration"] == 'false':
embed = discord.Embed(
description=f'Diese **Extension (Administration) ist momentan deaktiviert!** Wende dich bitte an **den Owner vom Bot** (LookAtYourSkill#8691)',
color=discord.Color.red())
await ctx.send(embed=embed)
else:
try:
self.bot.unload_extension(f'cogs.{cog}')
embed = discord.Embed(description=f'Das `Modul {cog}` wurde **erfolgreich entladen!**',
color=discord.Color.green())
await ctx.send(embed=embed)
except discord.Forbidden:
embed = discord.Embed(
description=f'Ein Fehler ist aufgetreten... Eventuell gibt es die Extension `{cog}` garnicht... Überprüfe bitte deine schreibweise!',
color=discord.Color.red())
await ctx.send(embed=embed)
@commands.command(name='reload')
@commands.is_owner()
async def reload(self, ctx, cog):
with open('utils/json/active_check.json', 'r') as f:
data = json.load(f)
if data[str(ctx.guild.id)]["Administration"] == 'false':
embed = discord.Embed(
description=f'Diese **Extension (Administration) ist momentan deaktiviert!** Wende dich bitte an **den Owner vom Bot** (LookAtYourSkill#8691)',
color=discord.Color.red())
await ctx.send(embed=embed)
else:
try:
self.bot.unload_extension(f'cogs.{cog}')
self.bot.load_extension(f'cogs.{cog}')
embed = discord.Embed(description=f'Das `Modul {cog}` wurde **erfolgreich neu geladen!**',
color=discord.Color.green())
await ctx.send(embed=embed)
except discord.Forbidden:
embed = discord.Embed(
description=f'Ein Fehler ist aufgetreten... Eventuell gibt es die Extension `{cog}` garnicht... Überprüfe bitte deine schreibweise!',
color=discord.Color.red())
await ctx.send(embed=embed)
def setup(bot):
bot.add_cog(administration(bot))
| 43.624309
| 159
| 0.564463
| 887
| 7,896
| 4.968433
| 0.1646
| 0.034491
| 0.050828
| 0.088949
| 0.853869
| 0.838212
| 0.837077
| 0.805083
| 0.805083
| 0.784661
| 0
| 0.006942
| 0.306738
| 7,896
| 180
| 160
| 43.866667
| 0.798137
| 0.002026
| 0
| 0.757143
| 0
| 0.042857
| 0.273906
| 0.057664
| 0
| 0
| 0.002116
| 0
| 0
| 1
| 0.014286
| false
| 0
| 0.028571
| 0
| 0.064286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b033dad21b692907de9ced613c03e3eb4bddc32b
| 145
|
py
|
Python
|
tests/integration/examples/test_examples.py
|
datacraft-dsc/starfish-py
|
95ff24410f056e8e2d313c3af97439fe003e294a
|
[
"Apache-2.0"
] | 4
|
2019-02-08T03:47:36.000Z
|
2019-10-17T21:45:23.000Z
|
tests/integration/examples/test_examples.py
|
datacraft-dsc/starfish-py
|
95ff24410f056e8e2d313c3af97439fe003e294a
|
[
"Apache-2.0"
] | 81
|
2019-02-09T01:01:51.000Z
|
2020-07-01T08:35:07.000Z
|
tests/integration/examples/test_examples.py
|
oceanprotocol/ocean-py
|
318ad0de2519e61d0a301c040a48d1839cd82425
|
[
"Apache-2.0"
] | 1
|
2021-01-28T12:14:03.000Z
|
2021-01-28T12:14:03.000Z
|
""""
Test examples
"""
def test_register_upload_asset_remote():
from examples.register_upload_asset_remote_agent import main
main()
| 12.083333
| 64
| 0.744828
| 18
| 145
| 5.555556
| 0.611111
| 0.28
| 0.38
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.165517
| 145
| 11
| 65
| 13.181818
| 0.826446
| 0.110345
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
c696d6b499c1c84761b2ed272058b978fca818f3
| 1,724
|
py
|
Python
|
Productions/Statements/Iterative/For.py
|
Mohsin-Ul-Islam/YAPL
|
163da2dfd879fc6dee9bc0e1b1c94972c75935cd
|
[
"MIT"
] | null | null | null |
Productions/Statements/Iterative/For.py
|
Mohsin-Ul-Islam/YAPL
|
163da2dfd879fc6dee9bc0e1b1c94972c75935cd
|
[
"MIT"
] | null | null | null |
Productions/Statements/Iterative/For.py
|
Mohsin-Ul-Islam/YAPL
|
163da2dfd879fc6dee9bc0e1b1c94972c75935cd
|
[
"MIT"
] | null | null | null |
import Nodes.Statements.Iterative.For
import Nodes.Statements.Iterative.LegacyFor
def p_for_01(p):
"iterative_statement : FOR LEFT_PAREN expression TO expression RIGHT_PAREN compound_statement"
p[0] = Nodes.Statements.Iterative.For.Node(p[3], p[5], p[7])
def p_for_02(p):
"iterative_statement : FOR LEFT_PAREN expression TO expression STEP expression RIGHT_PAREN compound_statement"
p[0] = Nodes.Statements.Iterative.For.Node(p[3], p[5], p[9], p[7])
def p_for_03(p):
"iterative_statement : FOR expression TO expression compound_statement"
p[0] = Nodes.Statements.Iterative.For.Node(p[2], p[4], p[5])
def p_for_04(p):
"iterative_statement : FOR expression TO expression STEP expression compound_statement"
p[0] = Nodes.Statements.Iterative.For.Node(p[2], p[4], p[7], p[6])
def p_for_05(p):
"iterative_statement : FOR LEFT_PAREN variable_declaration expression SEMICOLON assignment_statement RIGHT_PAREN compound_statement"
p[0] = Nodes.Statements.Iterative.LegacyFor.Node(p[3], p[4], p[6], p[8])
def p_for_06(p):
"iterative_statement : FOR LEFT_PAREN variable_declaration expression SEMICOLON expression RIGHT_PAREN compound_statement"
p[0] = Nodes.Statements.Iterative.LegacyFor.Node(p[3], p[4], p[6], p[8])
def p_for_07(p):
"iterative_statement : FOR LEFT_PAREN expression SEMICOLON expression SEMICOLON assignment_statement RIGHT_PAREN compound_statement"
p[0] = Nodes.Statements.Iterative.LegacyFor.Node(p[3], p[5], p[7], p[9])
def p_for_08(p):
"iterative_statement : FOR LEFT_PAREN expression SEMICOLON expression SEMICOLON expression RIGHT_PAREN compound_statement"
p[0] = Nodes.Statements.Iterative.LegacyFor.Node(p[3], p[5], p[7], p[9])
| 40.093023
| 136
| 0.74942
| 268
| 1,724
| 4.641791
| 0.141791
| 0.120579
| 0.192926
| 0.141479
| 0.902733
| 0.883441
| 0.883441
| 0.819936
| 0.819936
| 0.742765
| 0
| 0.036
| 0.12993
| 1,724
| 42
| 137
| 41.047619
| 0.793333
| 0.49942
| 0
| 0.153846
| 0
| 0
| 0.49536
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.307692
| false
| 0
| 0.076923
| 0
| 0.384615
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c6c0026cbb678e4885b1b4ee31f522ffb380eb3a
| 44,468
|
py
|
Python
|
rcnn_dff/symbol/symbol_flow.py
|
tonysy/mx-rcnn-flow
|
b78c3c964c802bb874d673170d7452e7a573a998
|
[
"Apache-2.0"
] | 2
|
2018-01-31T02:47:42.000Z
|
2019-07-05T03:48:54.000Z
|
rcnn_dff/symbol/symbol_flow.py
|
tonysy/mx-rcnn-flow
|
b78c3c964c802bb874d673170d7452e7a573a998
|
[
"Apache-2.0"
] | null | null | null |
rcnn_dff/symbol/symbol_flow.py
|
tonysy/mx-rcnn-flow
|
b78c3c964c802bb874d673170d7452e7a573a998
|
[
"Apache-2.0"
] | null | null | null |
"""
Optical flow for deep feature flow
"""
import sys
import mxnet as mx
import proposal
import proposal_target
from ..config import config
def conv_unit(sym, name, weights, bias):
conv1 = mx.sym.Convolution(data=sym,pad=(3, 3), kernel=(7, 7),stride=(2, 2),num_filter=64,
weight=weights[0], bias=bias[0], name='conv1' + name)
conv1 = mx.sym.LeakyReLU(data = conv1, act_type = 'leaky', slope = 0.1 ) # WHY leakyrelu?
conv2 = mx.sym.Convolution(data = conv1, pad = (2,2), kernel=(5,5),stride=(2,2),num_filter=128,
weight = weights[1], bias = bias[1], name='conv2' + name)
conv2 = mx.sym.LeakyReLU(data = conv2, act_type = 'leaky', slope = 0.1)
return conv1,conv2,
def conv_unit_share(sym, name, param_dic):
conv1 = mx.sym.Convolution(data=sym,pad=(3, 3), kernel=(7, 7),stride=(2, 2),num_filter=64,
weight=param_dic['share1_weight'], bias=param_dic['share1_bias'], name='conv1' + name)
conv1 = mx.sym.LeakyReLU(data = conv1, act_type = 'leaky', slope = 0.1 ) # WHY leakyrelu?
conv2 = mx.sym.Convolution(data = conv1, pad = (2,2), kernel=(5,5),stride=(2,2),num_filter=128,
weight = param_dic['share2_weight'], bias = param_dic['share2_bias'], name='conv2' + name)
conv2 = mx.sym.LeakyReLU(data = conv2, act_type = 'leaky', slope = 0.1)
return conv1,conv2,
def stereo_scale_net(data, data2, net_type='flow', is_sparse = False):
if net_type == 'stereo':
output_dim = 1
else:
output_dim = 2
downsample1 = mx.sym.Variable(net_type + '_downsample1')
downsample2 = mx.sym.Variable(net_type + '_downsample2')
downsample3 = mx.sym.Variable(net_type + '_downsample3')
downsample4 = mx.sym.Variable(net_type + '_downsample4')
downsample5 = mx.sym.Variable(net_type + '_downsample5')
downsample6 = mx.sym.Variable(net_type + '_downsample6')
weights = [mx.sym.Variable('share{}_weight'.format(i)) for i in range(1,4)]
bias = [mx.sym.Variable('share{}_bias'.format(i)) for i in range(1,4)]
conv1_img1, conv2_img1 = conv_unit(data, 'img1', weights, bias)
conv1_img2, conv2_img2 = conv_unit(data2, 'img2', weights, bias)
if net_type =='stereo':
corr = mx.sym.Correlation1D(data1=conv2_img1, data2=conv2_img2, \
pad_size=40, kernel_size=1, \
max_displacement=40, stride1=1, stride2=1)
conv_redir = mx.sym.Convolution(data=conv2_img1, pad=(0, 0), \
kernel=(1, 1), stride=(1, 1), \
num_filter=64, name='conv_redir')
conv_redir = mx.sym.LeakyReLU(data=conv_redir, act_type='leaky', \
slope=0.1)
concat = mx.sym.Concat(corr, conv_redir)
else:
conv3_img1 = mx.sym.Convolution(data=conv2_img1, pad=(2, 2), \
kernel=(5, 5), stride=(2, 2), \
num_filter=256, weight=weights[2], \
bias=bias[2], name='conv3_img1')
conv3_img1 = mx.sym.LeakyReLU(data=conv3_img1, act_type='leaky', \
slope=0.1)
conv3_img2 = mx.sym.Convolution(data=conv2_img2, pad=(2, 2), \
kernel=(5, 5), stride=(2, 2), \
num_filter=256, weight=weights[2], \
bias=bias[2], name='conv3_img2')
conv3_img2 = mx.sym.LeakyReLU(data=conv3_img2, act_type='leaky', \
slope=0.1)
corr = mx.sym.Correlation(data1=conv3_img1, data2=conv3_img2, \
pad_size=20, kernel_size=1, \
max_displacement=20, stride1=1, stride2=2)
conv_redir = mx.sym.Convolution(data=conv3_img1, pad=(0, 0), \
kernel=(1, 1), stride=(1, 1), \
num_filter=64, name='conv_redir')
conv_redir = mx.sym.LeakyReLU(data=conv_redir, act_type='leaky', \
slope=0.1)
concat = mx.sym.Concat(corr, conv_redir)
if net_type =='stereo':
stride = (2,2)
else:
stride = (1,1)
conv3a = mx.sym.Convolution(concat, pad=(2, 2), kernel=(5, 5), \
stride=stride, num_filter=256, name='conv3a')
conv3a = mx.sym.LeakyReLU(data=conv3a, act_type='leaky', slope=0.1)
conv3b = mx.sym.Convolution(conv3a, pad=(1, 1), kernel=(3, 3), \
stride=(1, 1), num_filter=256, name='conv3b')
conv3b = mx.sym.LeakyReLU(data=conv3b, act_type='leaky', slope=0.1)
conv4a = mx.sym.Convolution(conv3b, pad=(1, 1), kernel=(3, 3), \
stride=(2, 2), num_filter=512, name='conv4a')
conv4a = mx.sym.LeakyReLU(data=conv4a, act_type='leaky', slope=0.1)
conv4b = mx.sym.Convolution(conv4a, pad=(1, 1), kernel=(3, 3), \
stride=(1, 1), num_filter=512, name='conv4b')
conv4b = mx.sym.LeakyReLU(data=conv4b, act_type='leaky', slope=0.1)
conv5a = mx.sym.Convolution(conv4b, pad=(1, 1), kernel=(3, 3), \
stride=(2, 2), num_filter=512, name='conv5a')
conv5a = mx.sym.LeakyReLU(data=conv5a, act_type='leaky', slope=0.1)
conv5b = mx.sym.Convolution(conv5a, pad=(1, 1), kernel=(3, 3), \
stride=(1, 1), num_filter=512, name='conv5b')
conv5b = mx.sym.LeakyReLU(data=conv5b, act_type='leaky', slope=0.1)
conv6a = mx.sym.Convolution(conv5b, pad=(1, 1), kernel=(3, 3), \
stride=(2, 2), num_filter=1024, name='conv6a')
conv6a = mx.sym.LeakyReLU(data=conv6a, act_type='leaky', slope=0.1)
conv6b = mx.sym.Convolution(conv6a, pad=(1, 1), kernel=(3, 3), \
stride=(1, 1), num_filter=1024, name='conv6b')
conv6b = mx.sym.LeakyReLU(data=conv6b, act_type='leaky', slope=0.1, )
pr6 = mx.sym.Convolution(conv6b,pad= (1,1),kernel=(3,3),stride=(1,1),\
num_filter=output_dim,name='pr6')
upsample_pr6to5 = mx.sym.Deconvolution(pr6, pad=(1,1), kernel=(4,4), \
stride=(2,2), num_filter=1, \
name='upsample_pr6to5',no_bias=True)
upconv5 = mx.sym.Deconvolution(conv6b,pad=(1,1),kernel=(4,4), stride=(2,2),\
num_filter=512,name='upconv5',no_bias=True)
upconv5 = mx.sym.LeakyReLU(data = upconv5,act_type = 'leaky',slope = 0.1)
concat_tmp = mx.sym.Concat(conv5b,upconv5,upsample_pr6to5,dim=1)
iconv5 = mx.sym.Convolution(concat_tmp,pad = (1,1),kernel=(3,3),\
stride=(1,1),num_filter = 512,name='iconv5')
pr5 = mx.sym.Convolution(iconv5, pad = (1,1),kernel=(3,3),stride=(1,1),\
num_filter = output_dim,name='pr5')
upconv4 = mx.sym.Deconvolution(iconv5,pad = (1,1),kernel= (4,4),\
stride = (2,2),num_filter=256,\
name='upconv4',no_bias=True)
upconv4 = mx.sym.LeakyReLU(data = upconv4,act_type = 'leaky',slope = 0.1 )
upsample_pr5to4 = mx.sym.Deconvolution(pr5,pad = (1,1),kernel= (4,4), \
stride=(2,2),num_filter=1,\
name='upsample_pr5to4',no_bias=True)
concat_tmp2 = mx.sym.Concat(conv4b,upconv4,upsample_pr5to4)
iconv4 = mx.sym.Convolution(concat_tmp2,pad = (1,1),kernel = (3,3),\
stride=(1,1),num_filter=256,name='iconv4')
pr4 = mx.sym.Convolution(iconv4,pad=(1,1),kernel=(3,3),stride=(1,1),\
num_filter=output_dim,name='pr4')
upconv3 = mx.sym.Deconvolution(iconv4,pad=(1,1),kernel=(4,4),stride=(2,2),\
num_filter=128,name='upconv3',no_bias=True)
upconv3 = mx.sym.LeakyReLU(data = upconv3,act_type = 'leaky',slope = 0.1 )
upsample_pr4to3 = mx.sym.Deconvolution(pr4,pad = (1,1),kernel= (4,4), \
stride=(2,2), num_filter=1, \
name='upsample_pr4to3',no_bias=True)
concat_tmp3 = mx.sym.Concat(conv3b,upconv3,upsample_pr4to3)
iconv3 = mx.sym.Convolution(concat_tmp3,pad=(1,1),kernel=(3,3), \
stride=(1,1),num_filter = 128,name='iconv3')
pr3 = mx.sym.Convolution(iconv3,pad = (1,1), kernel = (3,3), \
stride = (1,1),num_filter = output_dim,name='pr3')
upconv2 = mx.sym.Deconvolution(iconv3,pad=(1,1),kernel=(4,4),stride=(2,2),\
num_filter=64,name='upconv2',no_bias=True)
upconv2 = mx.sym.LeakyReLU(data = upconv2,act_type = 'leaky',slope = 0.1)
upsample_pr3to2 = mx.sym.Deconvolution(pr3,pad = (1,1),kernel= (4,4), \
stride=(2,2),num_filter=1, \
name='upsample_pr3to2',no_bias=True)
concat_tmp4 = mx.sym.Concat(conv2_img1,upconv2,upsample_pr3to2)
iconv2 = mx.sym.Convolution(concat_tmp4,pad = (1,1),kernel = (3,3),\
stride= (1,1),num_filter = 64,name='iconv2')
pr2 = mx.sym.Convolution(iconv2,pad = (1,1),kernel=(3,3),stride = (1,1),\
num_filter = output_dim,name='pr2')
upconv1 = mx.sym.Deconvolution(iconv2,pad=(1,1),kernel=(4,4),stride=(2,2),
num_filter = 32,name='upconv1',no_bias=True)
upconv1 = mx.sym.LeakyReLU(data = upconv1,act_type = 'leaky',slope = 0.1 )
upsample_pr2to1 = mx.sym.Deconvolution(pr2,pad = (1,1),kernel= (4,4),stride=(2,2),num_filter=1,name='upsample_pr2to1',no_bias=True)
concat_tmp5 = mx.sym.Concat(conv1_img1,upconv1,upsample_pr2to1)
iconv1 = mx.sym.Convolution(concat_tmp5,pad=(1,1),kernel = (3,3),stride=(1,1),num_filter=32,name='iconv1')
pr1 = mx.sym.Convolution(iconv1,pad=(1,1),kernel=(3,3),stride=(1,1), \
num_filter=output_dim,name='pr1')
stereo_scale = mx.sym.Convolution(iconv1,pad=(1,1),kernel=(3,3),stride=(1,1), \
num_filter=512,name='stereo_scale')
img1 = mx.sym.BlockGrad(data=data,name='img1_tmp')
img2 = mx.sym.BlockGrad(data=data2,name='img2_tmp')
corr = mx.sym.BlockGrad(data=corr,name='corr')
conv3_img1 = mx.sym.BlockGrad(data=conv3_img1,name='conv2_img1_tmp')
conv3_img2 = mx.sym.BlockGrad(data=conv3_img2,name='conv2_img2_tmp')
net = mx.sym.Group([pr1, stereo_scale])
return net
def stereo_scale_net_share(data, data2, param_dic, net_type='flow', is_sparse = False):
if net_type == 'stereo':
output_dim = 1
else:
output_dim = 2
downsample1 = mx.sym.Variable(net_type + '_downsample1')
downsample2 = mx.sym.Variable(net_type + '_downsample2')
downsample3 = mx.sym.Variable(net_type + '_downsample3')
downsample4 = mx.sym.Variable(net_type + '_downsample4')
downsample5 = mx.sym.Variable(net_type + '_downsample5')
downsample6 = mx.sym.Variable(net_type + '_downsample6')
# weights = [mx.sym.Variable('share{}_weight'.format(i)) for i in range(1,4)]
# bias = [mx.sym.Variable('share{}_bias'.format(i)) for i in range(1,4)]
conv1_img1, conv2_img1 = conv_unit_share(data, 'img1', param_dic)
conv1_img2, conv2_img2 = conv_unit_share(data2, 'img2', param_dic)
if net_type =='stereo':
corr = mx.sym.Correlation1D(data1=conv2_img1, data2=conv2_img2, \
pad_size=40, kernel_size=1, \
max_displacement=40, stride1=1, stride2=1)
conv_redir = mx.sym.Convolution(data=conv2_img1, pad=(0, 0), \
kernel=(1, 1), stride=(1, 1), \
num_filter=64, name='conv_redir')
conv_redir = mx.sym.LeakyReLU(data=conv_redir, act_type='leaky', \
slope=0.1)
concat = mx.sym.Concat(corr, conv_redir)
else:
conv3_img1 = mx.sym.Convolution(data=conv2_img1, pad=(2, 2), \
kernel=(5, 5), stride=(2, 2), \
num_filter=256, weight=param_dic['share3_weight'], \
bias=param_dic['share3_bias'], name='conv3_img1')
conv3_img1 = mx.sym.LeakyReLU(data=conv3_img1, act_type='leaky', \
slope=0.1)
conv3_img2 = mx.sym.Convolution(data=conv2_img2, pad=(2, 2), \
kernel=(5, 5), stride=(2, 2), \
num_filter=256, weight=param_dic['share3_weight'], \
bias=param_dic['share3_bias'], name='conv3_img2')
conv3_img2 = mx.sym.LeakyReLU(data=conv3_img2, act_type='leaky', \
slope=0.1)
corr = mx.sym.Correlation(data1=conv3_img1, data2=conv3_img2, \
pad_size=20, kernel_size=1, \
max_displacement=20, stride1=1, stride2=2)
conv_redir = mx.sym.Convolution(data=conv3_img1, pad=(0, 0), \
kernel=(1, 1), stride=(1, 1), \
num_filter=64, name='conv_redir',
weight=param_dic['conv_redir_weight'], \
bias=param_dic['conv_redir_bias'])
conv_redir = mx.sym.LeakyReLU(data=conv_redir, act_type='leaky', \
slope=0.1)
concat = mx.sym.Concat(corr, conv_redir)
if net_type =='stereo':
stride = (2,2)
else:
stride = (1,1)
print param_dic['conv3a_weight']
conv3a = mx.sym.Convolution(data=concat, pad=(2, 2), kernel=(5, 5), \
stride=stride, num_filter=256, name='conv3a',
weight=param_dic['conv3a_weight'],
bias=param_dic['conv3a_bias'])
conv3a = mx.sym.LeakyReLU(data=conv3a, act_type='leaky', slope=0.1)
conv3b = mx.sym.Convolution(data=conv3a, pad=(1, 1), kernel=(3, 3), \
stride=(1, 1), num_filter=256,
weight=param_dic['conv3b_weight'],
bias=param_dic['conv3b_bias'], name='conv3b')
conv3b = mx.sym.LeakyReLU(data=conv3b, act_type='leaky', slope=0.1)
conv4a = mx.sym.Convolution(data=conv3b, pad=(1, 1), kernel=(3, 3), \
stride=(2, 2), num_filter=512, name='conv4a',
weight=param_dic['conv4a_weight'], \
bias=param_dic['conv4a_bias'])
conv4a = mx.sym.LeakyReLU(data=conv4a, act_type='leaky', slope=0.1)
conv4b = mx.sym.Convolution(data=conv4a, pad=(1, 1), kernel=(3, 3), \
stride=(1, 1), num_filter=512, name='conv4b',
weight=param_dic['conv4b_weight'], \
bias=param_dic['conv4b_bias'])
conv4b = mx.sym.LeakyReLU(data=conv4b, act_type='leaky', slope=0.1)
conv5a = mx.sym.Convolution(data=conv4b, pad=(1, 1), kernel=(3, 3), \
stride=(2, 2), num_filter=512, name='conv5a',
weight=param_dic['conv5a_weight'], \
bias=param_dic['conv5a_bias'])
conv5a = mx.sym.LeakyReLU(data=conv5a, act_type='leaky', slope=0.1)
conv5b = mx.sym.Convolution(data=conv5a, pad=(1, 1), kernel=(3, 3), \
stride=(1, 1), num_filter=512, name='conv5b',
weight=param_dic['conv5b_weight'], \
bias=param_dic['conv5b_bias'])
conv5b = mx.sym.LeakyReLU(data=conv5b, act_type='leaky', slope=0.1)
conv6a = mx.sym.Convolution(data=conv5b, pad=(1, 1), kernel=(3, 3), \
stride=(2, 2), num_filter=1024, name='conv6a',
weight=param_dic['conv6a_weight'], \
bias=param_dic['conv6a_bias'])
conv6a = mx.sym.LeakyReLU(data=conv6a, act_type='leaky', slope=0.1)
conv6b = mx.sym.Convolution(data=conv6a, pad=(1, 1), kernel=(3, 3), \
stride=(1, 1), num_filter=1024, name='conv6b',
weight=param_dic['conv6b_weight'], \
bias=param_dic['conv6b_bias'])
conv6b = mx.sym.LeakyReLU(data=conv6b, act_type='leaky', slope=0.1, )
pr6 = mx.sym.Convolution(data=conv6b,pad= (1,1),kernel=(3,3),stride=(1,1),\
num_filter=output_dim,name='pr6',
weight=param_dic['pr6_weight'], \
bias=param_dic['pr6_bias'])
upsample_pr6to5 = mx.sym.Deconvolution(data=pr6, pad=(1,1), kernel=(4,4), \
stride=(2,2), num_filter=1, \
name='upsample_pr6to5',no_bias=True,
weight=param_dic['upsample_pr6to5_weight'])
upconv5 = mx.sym.Deconvolution(data=conv6b,pad=(1,1),kernel=(4,4), stride=(2,2),\
num_filter=512,name='upconv5',no_bias=True,
weight = param_dic['upconv5_weight'])
upconv5 = mx.sym.LeakyReLU(data = upconv5,act_type = 'leaky',slope = 0.1)
concat_tmp = mx.sym.Concat(conv5b,upconv5,upsample_pr6to5,dim=1)
iconv5 = mx.sym.Convolution(data=concat_tmp,pad = (1,1),kernel=(3,3),\
stride=(1,1),num_filter = 512,name='iconv5',
weight = param_dic['iconv5_weight'],
bias=param_dic['iconv5_bias'])
pr5 = mx.sym.Convolution(data=iconv5, pad = (1,1),kernel=(3,3),stride=(1,1),\
num_filter = output_dim,name='pr5',
weight = param_dic['pr5_weight'],
bias=param_dic['pr5_bias'])
upconv4 = mx.sym.Deconvolution(data=iconv5,pad = (1,1),kernel= (4,4),\
stride = (2,2),num_filter=256,\
name='upconv4',no_bias=True,
weight = param_dic['upconv4_weight'])
upconv4 = mx.sym.LeakyReLU(data = upconv4,act_type = 'leaky',slope = 0.1 )
upsample_pr5to4 = mx.sym.Deconvolution(data=pr5,pad = (1,1),kernel= (4,4), \
stride=(2,2),num_filter=1,\
name='upsample_pr5to4',no_bias=True,
weight = param_dic['upsample_pr5to4_weight'])
concat_tmp2 = mx.sym.Concat(conv4b,upconv4,upsample_pr5to4)
iconv4 = mx.sym.Convolution(data=concat_tmp2,pad = (1,1),kernel = (3,3),\
stride=(1,1),num_filter=256,name='iconv4',
weight = param_dic['iconv4_weight'],
bias=param_dic['iconv4_bias'])
pr4 = mx.sym.Convolution(data=iconv4,pad=(1,1),kernel=(3,3),stride=(1,1),\
num_filter=output_dim,name='pr4',
weight = param_dic['pr4_weight'],
bias=param_dic['pr4_bias'])
upconv3 = mx.sym.Deconvolution(data=iconv4,pad=(1,1),kernel=(4,4),stride=(2,2),\
num_filter=128,name='upconv3',no_bias=True,
weight = param_dic['upconv3_weight'])
upconv3 = mx.sym.LeakyReLU(data = upconv3,act_type = 'leaky',slope = 0.1 )
upsample_pr4to3 = mx.sym.Deconvolution(data=pr4,pad = (1,1),kernel= (4,4), \
stride=(2,2), num_filter=1, \
name='upsample_pr4to3',no_bias=True,
weight = param_dic['upsample_pr4to3_weight'])
concat_tmp3 = mx.sym.Concat(conv3b,upconv3,upsample_pr4to3)
iconv3 = mx.sym.Convolution(data=concat_tmp3,pad=(1,1),kernel=(3,3), \
stride=(1,1),num_filter = 128,name='iconv3',
weight = param_dic['iconv3_weight'],
bias=param_dic['iconv3_bias'])
pr3 = mx.sym.Convolution(data=iconv3,pad = (1,1), kernel = (3,3), \
stride = (1,1),num_filter = output_dim,name='pr3',
weight = param_dic['pr3_weight'],
bias=param_dic['pr3_bias'])
upconv2 = mx.sym.Deconvolution(data=iconv3,pad=(1,1),kernel=(4,4),stride=(2,2),\
num_filter=64,name='upconv2',no_bias=True,
weight = param_dic['upconv2_weight'])
upconv2 = mx.sym.LeakyReLU(data = upconv2,act_type = 'leaky',slope = 0.1)
upsample_pr3to2 = mx.sym.Deconvolution(data=pr3,pad = (1,1),kernel= (4,4), \
stride=(2,2),num_filter=1, \
name='upsample_pr3to2',no_bias=True,
weight = param_dic['upsample_pr3to2_weight'])
concat_tmp4 = mx.sym.Concat(conv2_img1,upconv2,upsample_pr3to2)
iconv2 = mx.sym.Convolution(data=concat_tmp4,pad = (1,1),kernel = (3,3),\
stride= (1,1),num_filter = 64,name='iconv2',
weight = param_dic['iconv2_weight'],
bias=param_dic['iconv2_bias'])
pr2 = mx.sym.Convolution(data=iconv2,pad = (1,1),kernel=(3,3),stride = (1,1),\
num_filter = output_dim,name='pr2',
weight = param_dic['pr2_weight'],
bias=param_dic['pr2_bias'])
upconv1 = mx.sym.Deconvolution(data=iconv2,pad=(1,1),kernel=(4,4),stride=(2,2),
num_filter = 32,name='upconv1',no_bias=True,
weight = param_dic['upconv1_weight'])
upconv1 = mx.sym.LeakyReLU(data = upconv1,act_type = 'leaky',slope = 0.1 )
upsample_pr2to1 = mx.sym.Deconvolution(data=pr2,pad = (1,1),kernel= (4,4),stride=(2,2),num_filter=1,name='upsample_pr2to1',no_bias=True,
weight = param_dic['upsample_pr2to1_weight'])
concat_tmp5 = mx.sym.Concat(conv1_img1,upconv1,upsample_pr2to1)
iconv1 = mx.sym.Convolution(data=concat_tmp5,pad=(1,1),kernel = (3,3),stride=(1,1),num_filter=32,name='iconv1',
weight = param_dic['iconv1_weight'],
bias=param_dic['iconv1_bias'])
pr1 = mx.sym.Convolution(data=iconv1,pad=(1,1),kernel=(3,3),stride=(1,1), \
num_filter=output_dim,name='pr1',
weight = param_dic['pr1_weight'],
bias=param_dic['pr1_bias'])
stereo_scale = mx.sym.Convolution(data=iconv1,pad=(1,1),kernel=(3,3),stride=(1,1), \
num_filter=512,name='stereo_scale',
weight = param_dic['stereo_scale_weight'],
bias=param_dic['stereo_scale_bias'])
img1 = mx.sym.BlockGrad(data=data,name='img1_tmp')
img2 = mx.sym.BlockGrad(data=data2,name='img2_tmp')
corr = mx.sym.BlockGrad(data=corr,name='corr')
conv3_img1 = mx.sym.BlockGrad(data=conv3_img1,name='conv2_img1_tmp')
conv3_img2 = mx.sym.BlockGrad(data=conv3_img2,name='conv2_img2_tmp')
net = mx.sym.Group([pr1, stereo_scale])
return net
def flownets_half(img1, img2, loss_scale, net_type='stereo', is_sparse = False):
name = 'flownets_half'
# img1 = mx.sym.Variable('img1')
# img2 = mx.sym.Variable('img2')
if net_type == 'stereo':
output_dim = 1
elif net_type == 'flow':
output_dim = 2
# six loss functions with different output sizes
labels = {'loss{}'.format(i): mx.sym.Variable('loss{}_label'.format(i)) for i in range(1, 7)}
prediction = {}
loss = []
data = mx.sym.Concat(img1, img2)
# The structure below is similar to VGG
conv1 = mx.sym.Convolution(data, pad=(3, 3), kernel=(7, 7), stride=(2, 2), num_filter=32, name=name+'conv1')
conv1 = mx.sym.LeakyReLU(data=conv1, act_type='leaky', slope=0.1)
conv2 = mx.sym.Convolution(conv1, pad=(2, 2), kernel=(5, 5), stride=(2, 2), num_filter=64, name=name+'conv2')
conv2 = mx.sym.LeakyReLU(data=conv2, act_type='leaky', slope=0.1)
conv3a = mx.sym.Convolution(conv2, pad=(2, 2), kernel=(5, 5), stride=(2, 2), num_filter=128, name=name+'conv3a')
conv3a = mx.sym.LeakyReLU(data=conv3a, act_type='leaky', slope=0.1)
conv3b = mx.sym.Convolution(conv3a, pad=(1, 1), kernel=(3, 3), stride=(1, 1), num_filter=128, name=name+'conv3b')
conv3b = mx.sym.LeakyReLU(data=conv3b, act_type='leaky', slope=0.1)
conv4a = mx.sym.Convolution(conv3b, pad=(1, 1), kernel=(3, 3), stride=(2, 2), num_filter=256, name=name+'conv4a')
conv4a = mx.sym.LeakyReLU(data=conv4a, act_type='leaky', slope=0.1)
conv4b = mx.sym.Convolution(conv4a, pad=(1, 1), kernel=(3, 3), stride=(1, 1), num_filter=256, name=name+'conv4b')
conv4b = mx.sym.LeakyReLU(data=conv4b, act_type='leaky', slope=0.1)
conv5a = mx.sym.Convolution(conv4b, pad=(1, 1), kernel=(3, 3), stride=(2, 2), num_filter=256, name=name+'conv5a')
conv5a = mx.sym.LeakyReLU(data=conv5a, act_type='leaky', slope=0.1)
conv5b = mx.sym.Convolution(conv5a, pad=(1, 1), kernel=(3, 3), stride=(1, 1), num_filter=256, name=name+'conv5b')
conv5b = mx.sym.LeakyReLU(data=conv5b, act_type='leaky', slope=0.1)
conv6a = mx.sym.Convolution(conv5b, pad=(1, 1), kernel=(3, 3), stride=(2, 2), num_filter=512, name=name+'conv6a')
conv6a = mx.sym.LeakyReLU(data=conv6a, act_type='leaky', slope=0.1)
conv6b = mx.sym.Convolution(conv6a, pad=(1, 1), kernel=(3, 3), stride=(1, 1), num_filter=512, name=name+'conv6b')
conv6b = mx.sym.LeakyReLU(data=conv6b, act_type='leaky', slope=0.1, )
pr6 = mx.sym.Convolution(conv6b,pad= (1,1),kernel=(3,3),stride=(1,1),num_filter=output_dim,name=name+'pr6')
prediction['loss6'] = pr6
upsample_pr6to5 = mx.sym.Deconvolution(pr6, pad=(1,1), kernel=(4,4), stride=(2,2), num_filter=output_dim,
name=name+'upsample_pr6to5',no_bias=True)
upconv5 = mx.sym.Deconvolution(conv6b,pad=(1,1),kernel=(4,4),stride=(2,2),num_filter=512,name=name+'upconv5',no_bias=True)
upconv5 = mx.sym.LeakyReLU(data = upconv5,act_type = 'leaky',slope = 0.1)
concat_tmp = mx.sym.Concat(conv5b,upconv5,upsample_pr6to5,dim=1)
iconv5 = mx.sym.Convolution(concat_tmp,pad = (1,1),kernel=(3,3),stride=(1,1),num_filter = 512,name=name+'iconv5')
pr5 = mx.sym.Convolution(iconv5, pad = (1,1),kernel=(3,3),stride=(1,1),num_filter = output_dim,name=name+'pr5')
prediction['loss5'] = pr5
upconv4 = mx.sym.Deconvolution(iconv5,pad = (1,1),kernel= (4,4),stride = (2,2),num_filter=256,name=name+'upconv4',no_bias=True)
upconv4 = mx.sym.LeakyReLU(data = upconv4,act_type = 'leaky',slope = 0.1 )
upsample_pr5to4 = mx.sym.Deconvolution(pr5,pad = (1,1),kernel= (4,4),stride=(2,2),num_filter=output_dim,name=name+'upsample_pr5to4',no_bias=True)
concat_tmp2 = mx.sym.Concat(conv4b,upconv4,upsample_pr5to4)
iconv4 = mx.sym.Convolution(concat_tmp2,pad = (1,1),kernel = (3,3),stride=(1,1),num_filter=256,name=name+'iconv4')
pr4 = mx.sym.Convolution(iconv4,pad=(1,1),kernel=(3,3),stride=(1,1),num_filter=output_dim,name=name+'pr4')
prediction['loss4'] = pr4
upconv3 = mx.sym.Deconvolution(iconv4,pad=(1,1),kernel=(4,4),stride=(2,2),num_filter=128,name=name+'upconv3',no_bias=True)
upconv3 = mx.sym.LeakyReLU(data = upconv3,act_type = 'leaky',slope = 0.1 )
upsample_pr4to3 = mx.sym.Deconvolution(pr4,pad = (1,1),kernel= (4,4),stride=(2,2),num_filter=output_dim,name=name+'upsample_pr4to3',no_bias=True)
concat_tmp3 = mx.sym.Concat(conv3b,upconv3,upsample_pr4to3)
iconv3 = mx.sym.Convolution(concat_tmp3,pad=(1,1),kernel=(3,3),stride=(1,1),num_filter = 128,name=name+'iconv3')
pr3 = mx.sym.Convolution(iconv3,pad = (1,1), kernel = (3,3), stride = (1,1),num_filter = output_dim,name=name+'pr3')
prediction['loss3'] = pr3
upconv2 = mx.sym.Deconvolution(iconv3,pad=(1,1),kernel=(4,4),stride=(2,2),num_filter=64,name=name+'upconv2',no_bias=True)
upconv2 = mx.sym.LeakyReLU(data = upconv2,act_type = 'leaky',slope = 0.1 )
upsample_pr3to2 = mx.sym.Deconvolution(pr3,pad = (1,1),kernel= (4,4),stride=(2,2),num_filter=output_dim,name=name+'upsample_pr3to2',no_bias=True)
concat_tmp4 = mx.sym.Concat(conv2, upconv2, upsample_pr3to2)
iconv2 = mx.sym.Convolution(concat_tmp4,pad = (1,1),kernel = (3,3),stride= (1,1),num_filter = 64,name=name+'iconv2')
pr2 = mx.sym.Convolution(iconv2,pad = (1,1),kernel=(3,3),stride = (1,1),num_filter = output_dim,name=name+'pr2')
prediction['loss2'] = pr2
upconv1 = mx.sym.Deconvolution(iconv2,pad=(1,1),kernel=(4,4),stride=(2,2),num_filter = 32,name=name+'upconv1',no_bias=True)
upconv1 = mx.sym.LeakyReLU(data = upconv1,act_type = 'leaky',slope = 0.1 )
upsample_pr2to1 = mx.sym.Deconvolution(pr2,pad = (1,1),kernel= (4,4),stride=(2,2),num_filter=output_dim,name=name+'upsample_pr2to1',no_bias=True)
concat_tmp5 = mx.sym.Concat(conv1, upconv1, upsample_pr2to1)
iconv1 = mx.sym.Convolution(concat_tmp5,pad=(1,1),kernel = (3,3),stride=(1,1),num_filter=32,name=name+'iconv1')
pr1 = mx.sym.Convolution(iconv1, pad=(1, 1), kernel=(3, 3), stride=(1, 1), num_filter=output_dim,name=name+'pr1')
prediction['loss1'] = pr1
# scale layer
stereo_scale = mx.sym.Convolution(iconv1, pad=(1,1), kernel=(3,3), \
stride=(1,1), num_filter=512, \
name='stereo_scale')
# ignore the loss functions with loss scale of zero
keys = loss_scale.keys()
keys.sort()
for key in keys:
loss.append(get_loss(prediction[key], labels[key], loss_scale[key], name=key+name,
get_input=False, is_sparse = is_sparse, type=net_type))
# net = mx.sym.Group(loss)
net = mx.sym.Group([loss[0], stereo_scale])
return net
def get_loss(data, label, loss_scale, name, get_input=False, is_sparse = False, type='stereo'):
if type == 'stereo':
data = mx.sym.Activation(data=data, act_type='relu',name=name+'relu')
# loss
if is_sparse:
loss =mx.symbol.Custom(data=data, label=label, name=name, loss_scale= loss_scale, is_l1=True,
op_type='SparseRegressionLoss')
else:
loss = mx.sym.MAERegressionOutput(data=data, label=label, name=name, grad_scale=loss_scale)
return (loss,data) if get_input else loss
def feature_propagate(relu5_3, data, data2):
flownet = stereo_scale_net(data*config.FLOW_SCALE_FACTOR, \
data2*config.FLOW_SCALE_FACTOR,\
net_type='flow')
flow = flownet[0]
scale = flownet[1]
scale_avg = mx.sym.Pooling(data=scale*0.125, pool_type='avg',\
kernel=(8,8),stride=(8,8),name="scale_avg")
flow_avg = mx.sym.Pooling(data=flow*0.125, pool_type='avg',\
kernel=(8,8),stride=(8,8),name="flow_avg")
flow_grid = mx.symbol.GridGenerator(data=flow_avg,transform_type='warp',\
name='flow_grid')
warp_res = mx.symbol.BilinearSampler(data=relu5_3,grid=flow_grid,\
name='warp_res')
relu5_3_ = warp_res * scale_avg
return relu5_3_, flow, flow_avg
def feature_propagate_share(return_name, param_dic, relu5_3, data, data2):
flownet = stereo_scale_net_share(data*config.FLOW_SCALE_FACTOR, \
data2*config.FLOW_SCALE_FACTOR,\
param_dic=param_dic,
net_type='flow')
flow = flownet[0]
scale = flownet[1]
scale_avg = mx.sym.Pooling(data=scale*0.125, pool_type='avg',\
kernel=(8,8),stride=(8,8),name="scale_avg")
flow_avg = mx.sym.Pooling(data=flow*0.125, pool_type='avg',\
kernel=(8,8),stride=(8,8),name="flow_avg")
flow_grid = mx.symbol.GridGenerator(data=flow_avg,transform_type='warp',\
name='flow_grid')
warp_res = mx.symbol.BilinearSampler(data=relu5_3,grid=flow_grid,\
name='warp_res')
relu5_3_ = warp_res * scale_avg
# relu5_3_ = mx.symbol.broadcast_mul(lhs=warp_res, rhs=scale_avg)
# print "+++++++++feature propagate:", relu5_3_.list_arguments()
return relu5_3_, flow, flow_avg
def feature_propagate_share_half(return_name, param_dic, relu5_3, data, data2):
flownet = stereo_scale_net_share(data*config.FLOW_SCALE_FACTOR, \
data2*config.FLOW_SCALE_FACTOR,\
param_dic=param_dic,
net_type='flow')
flow = flownet[0]
scale = flownet[1]
scale_avg = mx.sym.Pooling(data=scale*0.125, pool_type='avg',\
kernel=(8,8),stride=(8,8),name="scale_avg")
flow_avg = mx.sym.Pooling(data=flow*0.125, pool_type='avg',\
kernel=(8,8),stride=(8,8),name="flow_avg")
flow_grid = mx.symbol.GridGenerator(data=flow_avg,transform_type='warp',\
name='flow_grid')
warp_res = mx.symbol.BilinearSampler(data=relu5_3,grid=flow_grid,\
name='warp_res')
relu5_3_ = warp_res * scale_avg
# relu5_3_ = mx.symbol.broadcast_mul(lhs=warp_res, rhs=scale_avg)
# print "+++++++++feature propagate:", relu5_3_.list_arguments()
return relu5_3_, flow, flow_avg
def feature_warp(relu5_3, data, data2):
# flownet
flownet = stereo_scale_net(data * 0.00392156, data2 * 0.00392156, net_type='flow')
flow = flownet[0]
scale = flownet[1]
scale_avg = mx.sym.Pooling(
data=scale*0.125, pool_type="avg", kernel=(8, 8), stride=(8, 8), name="flow_avg")
flow_avg = mx.sym.Pooling(
data=flow*0.125, pool_type="avg", kernel=(8, 8), stride=(8, 8), name="flow_avg")
flow_transpose = mx.sym.transpose(
data=flow_avg, axes=(0, 2, 3, 1), name="flow_transpose")
relu5_3_transpose = mx.sym.transpose(
data=relu5_3, axes=(0, 2, 3, 1), name="relu5_3_transpose")
warp_res = mx.sym.Warp(
data=relu5_3_transpose, grid=flow_transpose, name="warp")
warp_transpose = mx.sym.transpose(
data=warp_res, axes=(0, 3, 1, 2), name="warp_transpose")
relu5_3_ = warp_transpose * scale_avg
return relu5_3_, flow, flow_avg
def stereo_net(data, data2, net_type='flow', is_sparse = False):
if net_type == 'stereo':
output_dim = 1
else:
output_dim = 2
downsample1 = mx.sym.Variable(net_type + '_downsample1')
downsample2 = mx.sym.Variable(net_type + '_downsample2')
downsample3 = mx.sym.Variable(net_type + '_downsample3')
downsample4 = mx.sym.Variable(net_type + '_downsample4')
downsample5 = mx.sym.Variable(net_type + '_downsample5')
downsample6 = mx.sym.Variable(net_type + '_downsample6')
weights = [mx.sym.Variable('share{}_weight'.format(i)) for i in range(1,4)]
bias = [mx.sym.Variable('share{}_bias'.format(i)) for i in range(1,4)]
conv1_img1, conv2_img1 = conv_unit(data, 'img1', weights, bias)
conv1_img2, conv2_img2 = conv_unit(data2, 'img2', weights, bias)
if net_type =='stereo':
corr = mx.sym.Correlation1D(data1=conv2_img1, data2=conv2_img2, \
pad_size=40, kernel_size=1, \
max_displacement=40, stride1=1, stride2=1)
conv_redir = mx.sym.Convolution(data=conv2_img1, pad=(0, 0), \
kernel=(1, 1), stride=(1, 1), \
num_filter=64, name='conv_redir')
conv_redir = mx.sym.LeakyReLU(data=conv_redir,act_type='leaky',slope=0.1)
concat = mx.sym.Concat(corr, conv_redir)
else:
conv3_img1 = mx.sym.Convolution(data=conv2_img1, pad=(2, 2), \
kernel=(5, 5), stride=(2, 2), \
num_filter=256, weight=weights[2], \
bias=bias[2], name='conv3_img1')
conv3_img1 = mx.sym.LeakyReLU(data=conv3_img1, act_type='leaky', slope=0.1)
conv3_img2 = mx.sym.Convolution(data=conv2_img2, pad=(2, 2), \
kernel=(5, 5), stride=(2, 2), \
num_filter=256, weight=weights[2], \
bias=bias[2], name='conv3_img2')
conv3_img2 = mx.sym.LeakyReLU(data=conv3_img2, act_type='leaky', slope=0.1)
corr = mx.sym.Correlation(data1=conv3_img1, data2=conv3_img2, \
pad_size=20, kernel_size=1, \
max_displacement=20, stride1=1, stride2=2)
conv_redir = mx.sym.Convolution(data=conv3_img1, pad=(0, 0), \
kernel=(1, 1), stride=(1, 1), \
num_filter=64, name='conv_redir')
conv_redir = mx.sym.LeakyReLU(data=conv_redir, act_type='leaky', slope=0.1)
concat = mx.sym.Concat(corr, conv_redir)
if net_type =='stereo':
stride = (2,2)
else:
stride = (1,1)
conv3a = mx.sym.Convolution(concat, pad=(2, 2), kernel=(5, 5), \
stride=stride, num_filter=256, name='conv3a')
conv3a = mx.sym.LeakyReLU(data=conv3a, act_type='leaky', slope=0.1)
conv3b = mx.sym.Convolution(conv3a, pad=(1, 1), kernel=(3, 3), \
stride=(1, 1), num_filter=256, name='conv3b')
conv3b = mx.sym.LeakyReLU(data=conv3b, act_type='leaky', slope=0.1)
conv4a = mx.sym.Convolution(conv3b, pad=(1, 1), kernel=(3, 3), \
stride=(2, 2), num_filter=512, name='conv4a')
conv4a = mx.sym.LeakyReLU(data=conv4a, act_type='leaky', slope=0.1)
conv4b = mx.sym.Convolution(conv4a, pad=(1, 1), kernel=(3, 3), \
stride=(1, 1), num_filter=512, name='conv4b')
conv4b = mx.sym.LeakyReLU(data=conv4b, act_type='leaky', slope=0.1)
conv5a = mx.sym.Convolution(conv4b, pad=(1, 1), kernel=(3, 3), \
stride=(2, 2), num_filter=512, name='conv5a')
conv5a = mx.sym.LeakyReLU(data=conv5a, act_type='leaky', slope=0.1)
conv5b = mx.sym.Convolution(conv5a, pad=(1, 1), kernel=(3, 3), \
stride=(1, 1), num_filter=512, name='conv5b')
conv5b = mx.sym.LeakyReLU(data=conv5b, act_type='leaky', slope=0.1)
conv6a = mx.sym.Convolution(conv5b, pad=(1, 1), kernel=(3, 3), \
stride=(2, 2), num_filter=1024, name='conv6a')
conv6a = mx.sym.LeakyReLU(data=conv6a, act_type='leaky', slope=0.1)
conv6b = mx.sym.Convolution(conv6a, pad=(1, 1), kernel=(3, 3), \
stride=(1, 1), num_filter=1024, name='conv6b')
conv6b = mx.sym.LeakyReLU(data=conv6b, act_type='leaky', slope=0.1, )
pr6 = mx.sym.Convolution(conv6b,pad= (1,1),kernel=(3,3),stride=(1,1),\
num_filter=output_dim,name='pr6')
upsample_pr6to5 = mx.sym.Deconvolution(pr6, pad=(1,1), kernel=(4,4), \
stride=(2,2), num_filter=1, \
name='upsample_pr6to5',no_bias=True)
upconv5 = mx.sym.Deconvolution(conv6b,pad=(1,1),kernel=(4,4),stride=(2,2), \
num_filter=512,name='upconv5',no_bias=True)
upconv5 = mx.sym.LeakyReLU(data = upconv5,act_type = 'leaky',slope = 0.1)
concat_tmp = mx.sym.Concat(conv5b,upconv5,upsample_pr6to5,dim=1)
iconv5 = mx.sym.Convolution(concat_tmp,pad = (1,1),kernel=(3,3),\
stride=(1,1),num_filter = 512,name='iconv5')
pr5 = mx.sym.Convolution(iconv5, pad = (1,1),kernel=(3,3),stride=(1,1),\
num_filter = output_dim,name='pr5')
upconv4 = mx.sym.Deconvolution(iconv5,pad = (1,1),kernel= (4,4),\
stride = (2,2),num_filter=256, \
name='upconv4',no_bias=True)
upconv4 = mx.sym.LeakyReLU(data = upconv4,act_type = 'leaky',slope = 0.1 )
upsample_pr5to4 = mx.sym.Deconvolution(pr5,pad = (1,1),kernel= (4,4),\
stride=(2,2),num_filter=1,\
name='upsample_pr5to4',no_bias=True)
concat_tmp2 = mx.sym.Concat(conv4b,upconv4,upsample_pr5to4)
iconv4 = mx.sym.Convolution(concat_tmp2,pad = (1,1),kernel = (3,3),\
stride=(1,1),num_filter=256,name='iconv4')
pr4 = mx.sym.Convolution(iconv4,pad=(1,1),kernel=(3,3),stride=(1,1),\
num_filter=output_dim,name='pr4')
upconv3 = mx.sym.Deconvolution(iconv4,pad=(1,1),kernel=(4,4),stride=(2,2),\
num_filter=128,name='upconv3',no_bias=True)
upconv3 = mx.sym.LeakyReLU(data = upconv3,act_type = 'leaky',slope = 0.1 )
upsample_pr4to3 = mx.sym.Deconvolution(pr4,pad = (1,1),kernel= (4,4),stride=(2,2),num_filter=1,name='upsample_pr4to3',no_bias=True)
concat_tmp3 = mx.sym.Concat(conv3b,upconv3,upsample_pr4to3)
iconv3 = mx.sym.Convolution(concat_tmp3,pad=(1,1),kernel=(3,3),\
stride=(1,1),num_filter = 128,name='iconv3')
pr3 = mx.sym.Convolution(iconv3,pad = (1,1), kernel = (3,3), \
stride = (1,1),num_filter = output_dim,name='pr3')
upconv2 = mx.sym.Deconvolution(iconv3,pad=(1,1),kernel=(4,4),stride=(2,2),\
num_filter=64,name='upconv2',no_bias=True)
upconv2 = mx.sym.LeakyReLU(data = upconv2,act_type = 'leaky',slope = 0.1 )
upsample_pr3to2 = mx.sym.Deconvolution(pr3,pad = (1,1),kernel= (4,4),stride=(2,2),num_filter=1,name='upsample_pr3to2',no_bias=True)
concat_tmp4 = mx.sym.Concat(conv2_img1,upconv2,upsample_pr3to2)
iconv2 = mx.sym.Convolution(concat_tmp4,pad = (1,1),kernel = (3,3),\
stride= (1,1),num_filter = 64,name='iconv2')
pr2 = mx.sym.Convolution(iconv2,pad = (1,1),kernel=(3,3),stride = (1,1), \
num_filter = output_dim,name='pr2')
upconv1 = mx.sym.Deconvolution(iconv2,pad=(1,1),kernel=(4,4),stride=(2,2),\
num_filter = 32,name='upconv1',no_bias=True)
upconv1 = mx.sym.LeakyReLU(data = upconv1,act_type = 'leaky',slope = 0.1 )
upsample_pr2to1 = mx.sym.Deconvolution(pr2,pad = (1,1),kernel= (4,4),stride=(2,2),num_filter=1,name='upsample_pr2to1',no_bias=True)
concat_tmp5 = mx.sym.Concat(conv1_img1,upconv1,upsample_pr2to1)
iconv1 = mx.sym.Convolution(concat_tmp5,pad=(1,1),kernel = (3,3), \
stride=(1,1),num_filter=32,name='iconv1')
pr1 = mx.sym.Convolution(iconv1,pad=(1,1),kernel=(3,3),stride=(1,1),\
num_filter=output_dim,name='pr1')
img1 = mx.sym.BlockGrad(data=data,name='img1_tmp')
img2 = mx.sym.BlockGrad(data=data2,name='img2_tmp')
corr = mx.sym.BlockGrad(data=corr,name='corr')
conv3_img1 = mx.sym.BlockGrad(data=conv3_img1,name='conv2_img1_tmp')
conv3_img2 = mx.sym.BlockGrad(data=conv3_img2,name='conv2_img2_tmp')
net = mx.sym.Group([pr1,conv3_img1,conv3_img2,corr,img1,img2])
return pr1
| 54.031592
| 149
| 0.559751
| 5,914
| 44,468
| 4.051742
| 0.034325
| 0.062808
| 0.023996
| 0.052792
| 0.881103
| 0.866664
| 0.855146
| 0.848719
| 0.846215
| 0.844796
| 0
| 0.076792
| 0.285756
| 44,468
| 822
| 150
| 54.097324
| 0.677655
| 0.015292
| 0
| 0.637795
| 0
| 0
| 0.068034
| 0.002516
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.007874
| null | null | 0.001575
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c6d1a442200bc0e9347f9d476a265d48a19edcfe
| 83
|
py
|
Python
|
fafnir/__init__.py
|
Kupoman/Fafnir
|
4f6c5f76270ce16fd92e67672db5a7e599b83143
|
[
"Apache-2.0"
] | 3
|
2018-04-03T02:50:07.000Z
|
2019-02-28T13:43:49.000Z
|
fafnir/__init__.py
|
Kupoman/fafnir
|
4f6c5f76270ce16fd92e67672db5a7e599b83143
|
[
"Apache-2.0"
] | null | null | null |
fafnir/__init__.py
|
Kupoman/fafnir
|
4f6c5f76270ce16fd92e67672db5a7e599b83143
|
[
"Apache-2.0"
] | null | null | null |
from .renderer import Renderer
def init(scene_np):
return Renderer(scene_np)
| 13.833333
| 30
| 0.759036
| 12
| 83
| 5.083333
| 0.666667
| 0.229508
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168675
| 83
| 5
| 31
| 16.6
| 0.884058
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
05ba95a88cf3830e0b2406dbc118ae6601f4441e
| 2,785
|
py
|
Python
|
test/tests/test_tag.py
|
denmark/html2markdown
|
d50646eeabddc563c12c2d01fc5543c6d232329c
|
[
"MIT"
] | 4
|
2015-09-04T16:17:49.000Z
|
2017-08-31T02:50:27.000Z
|
test/tests/test_tag.py
|
denmark/html2markdown
|
d50646eeabddc563c12c2d01fc5543c6d232329c
|
[
"MIT"
] | null | null | null |
test/tests/test_tag.py
|
denmark/html2markdown
|
d50646eeabddc563c12c2d01fc5543c6d232329c
|
[
"MIT"
] | null | null | null |
import os
import random
import unittest
import traceback
from html2markdown import Html2MarkdownParser
data_dir = 'data'
def get_input_buf():
s = traceback.extract_stack()[-2][2]
return open(data_dir + '/' + s.split('_')[1] + '.html').read()
def get_output_buf():
s = traceback.extract_stack()[-2][2]
return open(data_dir + '/' + s.split('_')[1] + '.markdown').read()
class TestTag(unittest.TestCase):
def setUp(self):
self._parser = Html2MarkdownParser()
def tearDown(self):
self._parser.close()
def test_a(self):
self._parser.feed(get_input_buf())
self.assertEqual(self._parser.get_markdown(), get_output_buf())
def test_b(self):
self._parser.feed(get_input_buf())
self.assertEqual(self._parser.get_markdown(), get_output_buf())
def test_blockquote(self):
self._parser.feed(get_input_buf())
self.assertEqual(self._parser.get_markdown(), get_output_buf())
def test_em(self):
self._parser.feed(get_input_buf())
self.assertEqual(self._parser.get_markdown(), get_output_buf())
def test_h1(self):
self._parser.feed(get_input_buf())
self.assertEqual(self._parser.get_markdown(), get_output_buf())
def test_h2(self):
self._parser.feed(get_input_buf())
self.assertEqual(self._parser.get_markdown(), get_output_buf())
def test_h3(self):
self._parser.feed(get_input_buf())
self.assertEqual(self._parser.get_markdown(), get_output_buf())
def test_h4(self):
self._parser.feed(get_input_buf())
self.assertEqual(self._parser.get_markdown(), get_output_buf())
def test_h5(self):
self._parser.feed(get_input_buf())
self.assertEqual(self._parser.get_markdown(), get_output_buf())
def test_h6(self):
self._parser.feed(get_input_buf())
self.assertEqual(self._parser.get_markdown(), get_output_buf())
def test_hr(self):
self._parser.feed(get_input_buf())
self.assertEqual(self._parser.get_markdown(), get_output_buf())
def test_ol(self):
self._parser.feed(get_input_buf())
self.assertEqual(self._parser.get_markdown(), get_output_buf())
def test_p(self):
self._parser.feed(get_input_buf())
self.assertEqual(self._parser.get_markdown(), get_output_buf())
def test_pre(self):
self._parser.feed(get_input_buf())
self.assertEqual(self._parser.get_markdown(), get_output_buf())
def test_strong(self):
self._parser.feed(get_input_buf())
self.assertEqual(self._parser.get_markdown(), get_output_buf())
def test_ul(self):
self._parser.feed(get_input_buf())
self.assertEqual(self._parser.get_markdown(), get_output_buf())
| 31.647727
| 71
| 0.672531
| 373
| 2,785
| 4.643432
| 0.142091
| 0.196305
| 0.145497
| 0.166282
| 0.830831
| 0.830831
| 0.830831
| 0.830831
| 0.830831
| 0.830831
| 0
| 0.006625
| 0.187074
| 2,785
| 87
| 72
| 32.011494
| 0.758392
| 0
| 0
| 0.523077
| 0
| 0
| 0.007899
| 0
| 0
| 0
| 0
| 0
| 0.246154
| 1
| 0.307692
| false
| 0
| 0.076923
| 0
| 0.430769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
05c56a92cb88c1c614aa4792435f9b185082bd89
| 6,696
|
py
|
Python
|
ReplicatedBBDIndex/Plotting Indices.py
|
zhoudanxie/Regulatory-Uncertainty
|
5a8e2619c77f3b80f613d306a9649e78e02e3f64
|
[
"MIT"
] | 1
|
2020-07-08T22:02:22.000Z
|
2020-07-08T22:02:22.000Z
|
ReplicatedBBDIndex/Plotting Indices.py
|
zhoudanxie/Regulatory-Uncertainty
|
5a8e2619c77f3b80f613d306a9649e78e02e3f64
|
[
"MIT"
] | null | null | null |
ReplicatedBBDIndex/Plotting Indices.py
|
zhoudanxie/Regulatory-Uncertainty
|
5a8e2619c77f3b80f613d306a9649e78e02e3f64
|
[
"MIT"
] | null | null | null |
# Author: Zhoudan Xie
# Date: May 12, 2020
# Import packages
import pandas as pd
import os
import re
import numpy as np
import datetime
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import matplotlib.cbook as cbook
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
from mpl_toolkits.axes_grid1.inset_locator import zoomed_inset_axes
from mpl_toolkits.axes_grid1.inset_locator import mark_inset
from matplotlib import rcParams
rcParams['font.family'] = "Times New Roman"
#----------------------------------------------Plot the Indices--------------------------------------------------------
# Data
index=pd.read_csv('Replicated Uncertainty Indices.csv')
index['date']=index['year-month'].astype('datetime64[ns]').dt.date
#-----------------------------------------------------------------------------------------------------------------------
# Plot EPU & PU
x=index['date']
y1=index['EPU']
y2=index['PU']
years = mdates.YearLocator(2) # every two years
months = mdates.MonthLocator() # every month
years_fmt = mdates.DateFormatter('%Y')
fig, ax = plt.subplots(1, figsize=(13,9))
fig.subplots_adjust(top=0.92,bottom=0.02,left=0.08,right=0.98)
ax.plot(x,y1,color='#033C5A',label="Economic Policy Uncertainty",linewidth=1.5)
ax.plot(x,y2,color='#AA9868',label="Policy Uncertainty",linewidth=1.5)
# format the ticks
ax.xaxis.set_major_locator(years)
ax.xaxis.set_major_formatter(years_fmt)
ax.xaxis.set_minor_locator(months)
# round to nearest years.
datemin = np.datetime64(index['date'].iloc[0], 'Y')
datemax = np.datetime64(index['date'].iloc[-1], 'Y') + np.timedelta64(1, 'Y')
ax.set_xlim(datemin, datemax)
# format the coords message box
ax.format_xdata = mdates.DateFormatter('%Y-%m-%d')
ax.format_ydata = lambda x: '$%1.2f' % x
ax.grid(False)
# rotates and right aligns the x labels, and moves the bottom of the
# axes up to make room for them
fig.autofmt_xdate()
# Set tick and label format
ax.tick_params(axis='both',which='major',labelsize=14)
ax.set_ylabel('Uncertainty Index',fontsize=24)
ax.set_yticks(np.arange(0,max(max(y1),max(y2))+100,100))
ax.grid(color='gray', which='major', axis='y', linestyle='dashed')
# Legend and title
fig.legend(loc=9,bbox_to_anchor=(.35, .41, .6, .5),fontsize=14)
ax.set_title('(Based on four major U.S. newspapers)',fontsize=18)
fig.suptitle('Figure 1: U.S. Monthly Policy Uncertainty Index, 1985-2020',
y=0.98,fontsize=22)
# Inset plot
xins=x.iloc[-6:]
y1ins=y1.iloc[-6:]
y2ins=y2.iloc[-6:]
axins=inset_axes(ax, width=5, height=3, bbox_to_anchor=(.04, .48, .6, .5),
bbox_transform=ax.transAxes,loc=2)
axins.plot(xins,y1ins,color='#033C5A',linewidth=2,marker='D',markersize=8)
axins.plot(xins,y2ins,color='#AA9868',linewidth=2,marker='D',markersize=8)
axins.format_xdata = mdates.DateFormatter('%Y-%m')
axins.set_yticks(np.arange(150,max(max(y1ins),max(y2ins))+100,100))
axins.grid(color='gray', which='major', axis='y', linestyle='dotted')
axins.tick_params(axis='both',which='major',labelsize=14)
axins.set_facecolor('#d3d3d3')
axins.set_alpha(0.5)
axins.set_title('Uncertainty over the Past Six Months',fontsize=16,position=(0.5,0.9))
#mark_inset(ax, axins, loc1=1, loc2=4, fc="none", ec="0.5")
# Notes
fig.text(0.065,0.07,"Notes: Indices are calculated and plotted by the author applying the Baker, Bloom, and Davis (2016) "
"method to four U.S. newspapers\nincluding Chicago Tribune, Los Angeles Times, New York Times, "
"and The Washington Post.\nEach index is normalized seperately to mean 100 from January 1985 through December 2009.",
fontsize=16,style='italic')
plt.savefig('EPU&PU.png')
plt.show()
#-----------------------------------------------------------------------------------------------------------------------
# Plot PU & RPU
x=index['date']
y1=index['RPU']
y2=index['PU']
years = mdates.YearLocator(2) # every two years
months = mdates.MonthLocator() # every month
years_fmt = mdates.DateFormatter('%Y')
fig, ax = plt.subplots(1, figsize=(13,9))
fig.subplots_adjust(top=0.92,bottom=0.02,left=0.08,right=0.98)
ax.plot(x,y1,color='#033C5A',label="Regulatory Policy Uncertainty",linewidth=1.5)
ax.plot(x,y2,color='#AA9868',label="Policy Uncertainty",linewidth=1.5)
# format the ticks
ax.xaxis.set_major_locator(years)
ax.xaxis.set_major_formatter(years_fmt)
ax.xaxis.set_minor_locator(months)
# round to nearest years.
datemin = np.datetime64(index['date'].iloc[0], 'Y')
datemax = np.datetime64(index['date'].iloc[-1], 'Y') + np.timedelta64(1, 'Y')
ax.set_xlim(datemin, datemax)
# format the coords message box
ax.format_xdata = mdates.DateFormatter('%Y-%m-%d')
ax.format_ydata = lambda x: '$%1.2f' % x
ax.grid(False)
ax.set_title('Figure 2: U.S. Monthly Policy Uncertainty Index, 1985-2020',fontsize=22)
# rotates and right aligns the x labels, and moves the bottom of the
# axes up to make room for them
fig.autofmt_xdate()
# Set tick and label format
ax.tick_params(axis='both',which='major',labelsize=14)
ax.set_ylabel('Uncertainty Index',fontsize=24)
ax.set_yticks(np.arange(0,max(max(y1),max(y2))+100,100))
ax.grid(color='gray', which='major', axis='y', linestyle='dashed')
# Legend
fig.legend(loc=9,bbox_to_anchor=(.35, .41, .6, .5),fontsize=14)
fig.suptitle('Figure 2: U.S. Monthly Regulatory Policy Uncertainty Index, 1985-2020',
y=0.98,fontsize=22)
ax.set_title('(Based on four major U.S. newspapers)',fontsize=18)
# Inset plot
xins=x.iloc[-6:]
y1ins=y1.iloc[-6:]
y2ins=y2.iloc[-6:]
axins=inset_axes(ax, width=5, height=3, bbox_to_anchor=(.04, .48, .6, .5),
bbox_transform=ax.transAxes,loc=2)
axins.plot(xins,y1ins,color='#033C5A',linewidth=2,marker='D',markersize=8)
axins.plot(xins,y2ins,color='#AA9868',linewidth=2,marker='D',markersize=8)
axins.format_xdata = mdates.DateFormatter('%Y-%m')
axins.set_yticks(np.arange(50,max(max(y1ins),max(y2ins))+100,100))
axins.grid(color='gray', which='major', axis='y', linestyle='dotted')
axins.tick_params(axis='both',which='major',labelsize=14)
axins.set_facecolor('#d3d3d3')
axins.set_alpha(0.5)
axins.set_title('Uncertainty over the Past Six Months',fontsize=16,position=(0.5,0.9))
#mark_inset(ax, axins, loc1=1, loc2=4, fc="none", ec="0.5")
# Notes
fig.text(0.065,0.07,"Notes: Indices are calculated and plotted by the author applying the Baker, Bloom, and Davis (2016) "
"method to four U.S. newspapers\nincluding Chicago Tribune, Los Angeles Times, New York Times, "
"and The Washington Post.\nEach index is normalized seperately to mean 100 from January 1985 through December 2009.",
fontsize=16,style='italic')
plt.savefig('RPU&PU.png')
plt.show()
| 38.705202
| 135
| 0.683393
| 1,049
| 6,696
| 4.290753
| 0.226883
| 0.009998
| 0.026661
| 0.023995
| 0.885359
| 0.872695
| 0.872695
| 0.872695
| 0.852699
| 0.852699
| 0
| 0.059198
| 0.109468
| 6,696
| 173
| 136
| 38.705202
| 0.695623
| 0.158453
| 0
| 0.767857
| 0
| 0.035714
| 0.258658
| 0.007497
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.107143
| 0
| 0.107143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
af5c33fe8eab46c65821fef245fb89cf9d31ba52
| 207
|
py
|
Python
|
backend/pokemons/exceptions.py
|
jaenia/pokebattle
|
760181384ea1c067d40705f120152641156724d8
|
[
"MIT"
] | null | null | null |
backend/pokemons/exceptions.py
|
jaenia/pokebattle
|
760181384ea1c067d40705f120152641156724d8
|
[
"MIT"
] | 4
|
2021-01-05T18:51:30.000Z
|
2021-04-07T17:03:08.000Z
|
backend/pokemons/exceptions.py
|
jaenia/pokebattle
|
760181384ea1c067d40705f120152641156724d8
|
[
"MIT"
] | null | null | null |
class PokemonNotFound(Exception):
def __init__(self, value):
super().__init__()
self.value = value
def __str__(self):
return "{} was not found in PokeAPI".format(self.value)
| 25.875
| 63
| 0.63285
| 24
| 207
| 4.958333
| 0.666667
| 0.226891
| 0.218487
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.246377
| 207
| 7
| 64
| 29.571429
| 0.762821
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
bbab0938f95a09a032bbc8dbb0f73546d90fbacc
| 16,128
|
py
|
Python
|
stats.py
|
Kayal314/Habitat-Simulation
|
0335874dc869ab50ddd846c15f8abcfe5c94f000
|
[
"MIT"
] | 1
|
2021-06-09T06:22:13.000Z
|
2021-06-09T06:22:13.000Z
|
stats.py
|
Kayal314/Habitat-Simulation
|
0335874dc869ab50ddd846c15f8abcfe5c94f000
|
[
"MIT"
] | null | null | null |
stats.py
|
Kayal314/Habitat-Simulation
|
0335874dc869ab50ddd846c15f8abcfe5c94f000
|
[
"MIT"
] | null | null | null |
import math
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from scipy import stats
def write_population(rabbit: list, fox: list, grassland: list, forest: list, pond: list, lake: list):
f = open('population_graph', 'w')
size = len(rabbit)
for i in range(0, size):
f.write(rabbit[i] + " " + fox[i] + " " + grassland[i] + " " + forest[i] + " " + pond[i] + " " + lake)
def plot_animal_population(rabbit: list, fox: list):
x_coordinates = range(1, len(rabbit) + 1)
plt.plot(x_coordinates, rabbit, color='#3daeff', label='rabbit population')
plt.plot(x_coordinates, fox, color='#ff3679', label='fox population')
plt.legend(['rabbit', 'fox'])
plt.show()
def plot_resource_changes(grassland: list, forest: list, pond: list, lake: list):
x_coordinates = range(1, len(grassland) + 1)
plt.plot(x_coordinates, grassland, color='#21d9c3', label='grassland')
plt.plot(x_coordinates, forest, color='#460991', label='forest')
plt.plot(x_coordinates, pond, color='#088538', label='pond')
plt.plot(x_coordinates, lake, color='#d6d30b', label='lake')
plt.legend(['grassland', 'forest', 'pond', 'lake'])
plt.show()
def plot_all(grassland: list, forest: list, pond: list, lake: list, rabbit: list, fox: list):
x_coordinates = range(1, len(rabbit) + 1)
plt.plot(x_coordinates, rabbit, linestyle='dashed', color='#3daeff', label='rabbit population')
plt.plot(x_coordinates, fox, linestyle='dashed', color='#ff3679', label='fox population')
plt.plot(x_coordinates, grassland, color='#21d9c3', label='grassland')
plt.plot(x_coordinates, forest, color='#460991', label='forest')
plt.plot(x_coordinates, pond, color='#088538', label='pond')
plt.plot(x_coordinates, lake, color='#d6d30b', label='lake')
plt.legend(['rabbit', 'fox', 'grassland', 'forest', 'pond', 'lake'])
plt.show()
def write(grassland: list, forest: list, pond: list, lake: list, rabbit: list, fox: list):
f = open('population_graph', 'w')
size = len(rabbit)
for i in range(0, size):
f.write(str(rabbit[i]) + "," + str(fox[i]) + "," + str(grassland[i]) + "," + str(forest[i]) +
"," + str(lake[i]) + "," + str(pond[i]) + "\n")
f.close()
def find_genetic_variation_rabbit_std(rabbits: list) -> list:
variance = []
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.mating_requirement)
variance.append(np.std(data))
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.step_size)
variance.append(np.std(data))
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.thirst_resistance)
variance.append(np.std(data))
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.hunger_resistance)
variance.append(np.std(data))
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.predator_fear)
variance.append(np.std(data))
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.vision)
variance.append(np.std(data))
return variance
def find_genetic_variation_fox_std(foxes: list) -> list:
variance = []
data = []
for fox in foxes:
data.append(fox.genetics.mating_requirement)
variance.append(np.std(data))
data = []
for fox in foxes:
data.append(fox.genetics.step_size)
variance.append(np.std(data))
data = []
for fox in foxes:
data.append(fox.genetics.thirst_resistance)
variance.append(np.std(data))
data = []
for fox in foxes:
data.append(fox.genetics.hunger_resistance)
variance.append(np.std(data))
data = []
for fox in foxes:
data.append(fox.genetics.hunting_skill)
variance.append(np.std(data))
data = []
for fox in foxes:
data.append(fox.genetics.vision)
variance.append(np.std(data))
return variance
def find_genetic_variation_rabbit_mean_ad(rabbits: list) -> list:
variance = []
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.mating_requirement)
variance.append(pd.Series(data).mad())
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.step_size)
variance.append(pd.Series(data).mad())
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.thirst_resistance)
variance.append(pd.Series(data).mad())
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.hunger_resistance)
variance.append(pd.Series(data).mad())
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.predator_fear)
variance.append(pd.Series(data).mad())
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.vision)
variance.append(pd.Series(data).mad())
return variance
def find_genetic_variation_fox_mean_ad(foxes: list) -> list:
variance = []
data = []
for fox in foxes:
data.append(fox.genetics.mating_requirement)
variance.append(pd.Series(data).mad())
data = []
for fox in foxes:
data.append(fox.genetics.step_size)
variance.append(pd.Series(data).mad())
data = []
for fox in foxes:
data.append(fox.genetics.thirst_resistance)
variance.append(pd.Series(data).mad())
data = []
for fox in foxes:
data.append(fox.genetics.hunger_resistance)
variance.append(pd.Series(data).mad())
data = []
for fox in foxes:
data.append(fox.genetics.hunting_skill)
variance.append(pd.Series(data).mad())
data = []
for fox in foxes:
data.append(fox.genetics.vision)
variance.append(pd.Series(data).mad())
return variance
def find_genetic_variation_rabbit_mad(rabbits: list) -> list:
variance = []
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.mating_requirement)
variance.append(stats.median_abs_deviation(data))
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.step_size)
variance.append(stats.median_abs_deviation(data))
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.thirst_resistance)
variance.append(stats.median_abs_deviation(data))
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.hunger_resistance)
variance.append(stats.median_abs_deviation(data))
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.predator_fear)
variance.append(stats.median_abs_deviation(data))
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.vision)
variance.append(stats.median_abs_deviation(data))
return variance
def find_genetic_variation_fox_mad(foxes: list) -> list:
variance = []
data = []
for fox in foxes:
data.append(fox.genetics.mating_requirement)
variance.append(stats.median_abs_deviation(data))
data = []
for fox in foxes:
data.append(fox.genetics.step_size)
variance.append(stats.median_abs_deviation(data))
data = []
for fox in foxes:
data.append(fox.genetics.thirst_resistance)
variance.append(stats.median_abs_deviation(data))
data = []
for fox in foxes:
data.append(fox.genetics.hunger_resistance)
variance.append(stats.median_abs_deviation(data))
data = []
for fox in foxes:
data.append(fox.genetics.hunting_skill)
variance.append(stats.median_abs_deviation(data))
data = []
for fox in foxes:
data.append(fox.genetics.vision)
variance.append(stats.median_abs_deviation(data))
return variance
def find_genetic_variation_rabbit_coeff_var(rabbits: list) -> list:
variance = []
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.mating_requirement)
variance.append(stats.variation(data))
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.step_size)
variance.append(stats.variation(data))
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.thirst_resistance)
variance.append(stats.variation(data))
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.hunger_resistance)
variance.append(stats.variation(data))
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.predator_fear)
variance.append(stats.variation(data))
data = []
for rabbit in rabbits:
data.append(rabbit.genetics.vision)
variance.append(stats.variation(data))
return variance
def find_genetic_variation_fox_coeff_var(foxes: list) -> list:
variance = []
data = []
for fox in foxes:
data.append(fox.genetics.mating_requirement)
variance.append(stats.variation(data))
data = []
for fox in foxes:
data.append(fox.genetics.step_size)
variance.append(stats.variation(data))
data = []
for fox in foxes:
data.append(fox.genetics.thirst_resistance)
variance.append(stats.variation(data))
data = []
for fox in foxes:
data.append(fox.genetics.hunger_resistance)
variance.append(stats.variation(data))
data = []
for fox in foxes:
data.append(fox.genetics.hunting_skill)
variance.append(stats.variation(data))
data = []
for fox in foxes:
data.append(fox.genetics.vision)
variance.append(stats.variation(data))
return variance
def find_genetic_variation_rabbit_entropy(rabbits: list) -> list:
variance = []
population = len(rabbits)
entropy = 0
l = np.linspace(0, 4, 20)
for j in range(19):
proportion = 0
for rabbit in rabbits:
if l[j] <= rabbit.genetics.mating_requirement < l[j + 1]:
proportion += 1
if proportion > 0:
entropy += -(proportion / population) * math.log2(proportion / population)
variance.append(entropy)
entropy = 0
l = np.linspace(15, 40, 20)
for j in range(19):
proportion = 0
for rabbit in rabbits:
if l[j] <= rabbit.genetics.step_size < l[j + 1]:
proportion += 1
if proportion > 0:
entropy += -(proportion / population) * math.log2(proportion / population)
variance.append(entropy)
entropy = 0
l = np.linspace(0, 1, 20)
for j in range(19):
proportion = 0
for rabbit in rabbits:
if l[j] <= rabbit.genetics.thirst_resistance < l[j + 1]:
proportion += 1
if proportion > 0:
entropy += -(proportion / population) * math.log2(proportion / population)
variance.append(entropy)
entropy = 0
for j in range(19):
proportion = 0
for rabbit in rabbits:
if l[j] <= rabbit.genetics.hunger_resistance < l[j + 1]:
proportion += 1
if proportion > 0:
entropy += -(proportion / population) * math.log2(proportion / population)
variance.append(entropy)
entropy = 0
l = np.linspace(0, 4, 20)
for j in range(19):
proportion = 0
for rabbit in rabbits:
if l[j] <= rabbit.genetics.predator_fear < l[j + 1]:
proportion += 1
if proportion > 0:
entropy += -(proportion / population) * math.log2(proportion / population)
variance.append(entropy)
entropy = 0
l = np.linspace(80, 200, 20)
for j in range(19):
proportion = 0
for rabbit in rabbits:
if l[j] <= rabbit.genetics.vision < l[j + 1]:
proportion += 1
if proportion > 0:
entropy += -(proportion / population) * math.log2(proportion / population)
variance.append(entropy)
return variance
def find_genetic_variation_fox_entropy(foxes: list) -> list:
variance = []
population = len(foxes)
entropy = 0
l = np.linspace(0, 5, 20)
for j in range(19):
proportion = 0
for fox in foxes:
if l[j] <= fox.genetics.mating_requirement < l[j + 1]:
proportion += 1
if proportion > 0:
entropy += -(proportion / population) * math.log2(proportion / population)
variance.append(entropy)
entropy = 0
l = np.linspace(20, 45, 20)
for j in range(19):
proportion = 0
for fox in foxes:
if l[j] <= fox.genetics.step_size < l[j + 1]:
proportion += 1
if proportion > 0:
entropy += -(proportion / population) * math.log2(proportion / population)
variance.append(entropy)
entropy = 0
l = np.linspace(0, 0.5, 20)
for j in range(19):
proportion = 0
for fox in foxes:
if l[j] <= fox.genetics.thirst_resistance < l[j + 1]:
proportion += 1
if proportion > 0:
entropy += -(proportion / population) * math.log2(proportion / population)
variance.append(entropy)
entropy = 0
for j in range(19):
proportion = 0
for fox in foxes:
if l[j] <= fox.genetics.hunger_resistance < l[j + 1]:
proportion += 1
if proportion > 0:
entropy += -(proportion / population) * math.log2(proportion / population)
variance.append(entropy)
entropy = 0
l = np.linspace(130, 280, 20)
for j in range(19):
proportion = 0
for fox in foxes:
if l[j] <= fox.genetics.hunting_skill < l[j + 1]:
proportion += 1
if proportion > 0:
entropy += -(proportion / population) * math.log2(proportion / population)
variance.append(entropy)
entropy = 0
l = np.linspace(100, 280, 20)
for j in range(19):
proportion = 0
for fox in foxes:
if l[j] <= fox.genetics.vision < l[j + 1]:
proportion += 1
if proportion > 0:
entropy += -(proportion / population) * math.log2(proportion / population)
variance.append(entropy)
return variance
def plot_variance_rabbit(variance, title):
variance = np.array(variance).transpose()
x_coordinates = range(1, len(variance[0]) + 1)
fig, part = plt.subplots(2, 3)
part[0][0].plot(x_coordinates, variance[0], color='#0bd6cc')
part[0][0].set_title('Mating Requirement')
part[0][1].plot(x_coordinates, variance[1], color='#a70bd6')
part[0][1].set_title('Step Size')
part[0][2].plot(x_coordinates, variance[2], color='#d60b63')
part[0][2].set_title('Thirst Resistance')
part[1][0].plot(x_coordinates, variance[3], color='#22d42b')
part[1][0].set_title('Hunger Resistance')
part[1][1].plot(x_coordinates, variance[4], color='#f56c22')
part[1][1].set_title('Predator Fear')
part[1][2].plot(x_coordinates, variance[5], color='#17453f')
part[1][2].set_title('Vision Radius')
plt.suptitle(title)
plt.show()
def plot_variance_fox(variance, title):
variance = np.array(variance).transpose()
x_coordinates = range(1, len(variance[0]) + 1)
fig, part = plt.subplots(2, 3)
part[0][0].plot(x_coordinates, variance[0], color='#0bd6cc')
part[0][0].set_title('Mating Requirement')
part[0][1].plot(x_coordinates, variance[1], color='#a70bd6')
part[0][1].set_title('Step Size')
part[0][2].plot(x_coordinates, variance[2], color='#d60b63')
part[0][2].set_title('Thirst Resistance')
part[1][0].plot(x_coordinates, variance[3], color='#22d42b')
part[1][0].set_title('Hunger Resistance')
part[1][1].plot(x_coordinates, variance[4], color='#f56c22')
part[1][1].set_title('Hunting Skill')
part[1][2].plot(x_coordinates, variance[5], color='#17453f')
part[1][2].set_title('Vision Radius')
plt.suptitle(title)
plt.show()
| 35.524229
| 110
| 0.611793
| 2,013
| 16,128
| 4.816692
| 0.0616
| 0.086634
| 0.034035
| 0.055693
| 0.948948
| 0.936056
| 0.925639
| 0.915532
| 0.905116
| 0.905116
| 0
| 0.026889
| 0.257502
| 16,128
| 453
| 111
| 35.602649
| 0.782797
| 0
| 0
| 0.861575
| 0
| 0
| 0.036491
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040573
| false
| 0
| 0.011933
| 0
| 0.076372
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bbb5138a9de59f94e89526eb3539d536ad11119f
| 8,721
|
py
|
Python
|
tests/test_tempest_zigzag.py
|
rcbops/tempest-zigzag
|
12f4bc528c9b88263f04394e5f31755519aa02e8
|
[
"Apache-2.0"
] | null | null | null |
tests/test_tempest_zigzag.py
|
rcbops/tempest-zigzag
|
12f4bc528c9b88263f04394e5f31755519aa02e8
|
[
"Apache-2.0"
] | 2
|
2019-01-14T22:48:35.000Z
|
2019-02-25T20:06:06.000Z
|
tests/test_tempest_zigzag.py
|
rcbops/tempest-zigzag
|
12f4bc528c9b88263f04394e5f31755519aa02e8
|
[
"Apache-2.0"
] | 2
|
2019-01-08T20:19:16.000Z
|
2019-01-10T22:12:30.000Z
|
from tempest_zigzag.tempest_zigzag import TempestZigZag
from lxml import etree
class TestTempestZigZag(object):
def test_nothing_to_do(self, file_test_xml_all_pass, file_test_list, tempest_config_file):
"""Tests that when there is nothing to do it will return a string
that is functionally identical to the xml passed in"""
result = TempestZigZag.process_xml(file_test_xml_all_pass, file_test_list, tempest_config_file)
expected = etree.parse(file_test_xml_all_pass).getroot()
observed = etree.XML(result)
assert observed.tag == expected.tag
assert observed.attrib['failures'] == expected.attrib['failures']
assert observed.attrib['errors'] == expected.attrib['errors']
assert observed.attrib['tests'] == expected.attrib['tests']
assert observed.attrib['name'] == 'xml suite created by tempest-zigzag'
assert len(observed) == len(expected) + 1 # the actual number of child elements (testcase) + properties element
for observed_case in observed:
# test that the observed cases have the same names as the expected cases
try:
# there should be exactly one testcase with a matching name
assert len([case for case in expected if case.attrib['name'] == observed_case.attrib['name']]) == 1
except KeyError: # <properties> do not have a name attribute
pass
def test_setupclass_failure(self, file_test_xml_setup_failure, file_test_list_with_whitespace, tempest_config_file):
"""Tests that the correct testcase elements will be created
when a setUpClass failure is found"""
result = TempestZigZag.process_xml(file_test_xml_setup_failure,
file_test_list_with_whitespace,
tempest_config_file)
observed = etree.XML(result)
new_case_count = 0
assert len(observed) == 11 # 10 test cases and 1 properties element
assert '5' == observed.attrib['errors']
assert '0' == observed.attrib['failures']
for testcase in observed.findall('testcase'):
if testcase.attrib['classname'] == 'tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON':
error_tag = testcase.find('error')
assert error_tag is not None
assert 'An unexpected error prevented the server from fulfilling your request.' in error_tag.text
assert not testcase.find('failure') # there should not be any failures
new_case_count += 1
assert new_case_count == 5
def test_teardownclass_failure(self, file_test_xml_teardown_failure, file_test_list, tempest_config_file):
"""Tests that the correct testcase elements will be altered
when a teardown failure is found"""
result = TempestZigZag.process_xml(file_test_xml_teardown_failure, file_test_list, tempest_config_file)
observed = etree.XML(result)
assert len(observed) == 11 # 10 test cases and 1 properties element
assert '5' == observed.attrib['errors']
assert '0' == observed.attrib['failures']
for testcase in observed.findall('testcase'):
if testcase.attrib['classname'] == 'tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON':
error_tag = testcase.find('error')
assert error_tag is not None
assert 'An unexpected error prevented the server from fulfilling your request.' in error_tag.text
assert not testcase.find('failure') # there should not be any failures
def test_teardownclass_multiple_failures(self,
file_test_xml_teardown_multiple_failures,
file_test_list_with_whitespace,
tempest_config_file):
"""Tests that the correct testcase elements will be altered
when multiple teardown failures are found"""
result = TempestZigZag.process_xml(file_test_xml_teardown_multiple_failures,
file_test_list_with_whitespace,
tempest_config_file)
observed = etree.XML(result)
assert len(observed) == 11 # 10 test cases and 1 properties element
assert '10' == observed.attrib['errors']
assert '0' == observed.attrib['failures']
# all test cases should be set to error in this case
for testcase in observed.findall('testcase'):
error_tag = testcase.find('error')
assert error_tag is not None
assert 'An unexpected error prevented the server from fulfilling your request.' in error_tag.text
assert not testcase.find('failure') # there should not be any failures
def test_setupclass_multiple_failures(self,
file_test_xml_setup_multiple_failures,
file_test_list,
tempest_config_file):
"""Tests that the correct testcase elements will be altered
when multiple teardown failures are found"""
result = TempestZigZag.process_xml(file_test_xml_setup_multiple_failures, file_test_list, tempest_config_file)
observed = etree.XML(result)
assert len(observed) == 11 # 10 test cases and 1 properties element
assert '10' == observed.attrib['errors']
assert '0' == observed.attrib['failures']
# all test cases should be set to error in this case
for testcase in observed.findall('testcase'):
error_tag = testcase.find('error')
assert error_tag is not None
assert 'An unexpected error prevented the server from fulfilling your request.' in error_tag.text
assert not testcase.find('failure') # there should not be any failures
def test_teardownclass_failure_tests_not_found(self,
file_test_xml_teardown_class_not_in_list,
file_test_list,
tempest_config_file):
"""Tests when a tearDownClass failure exists in the provided xml
but there are no corresponding tests in the test list
"""
result = TempestZigZag.process_xml(file_test_xml_teardown_class_not_in_list,
file_test_list,
tempest_config_file)
expected = etree.parse(file_test_xml_teardown_class_not_in_list).getroot()
observed = etree.XML(result)
assert observed.tag == expected.tag
assert observed.attrib['failures'] == expected.attrib['failures']
assert observed.attrib['errors'] == expected.attrib['errors']
assert observed.attrib['tests'] == expected.attrib['tests']
assert observed.attrib['name'] == 'xml suite created by tempest-zigzag'
assert len(observed) == len(expected) + 1 # the actual number of child elements (testcase) + properties element
# the broken testcase should be the last case in the list
assert 'tearDownClass (tempest.oops.this.class.is.not.in.the.TestList)' == observed[-1].attrib['name']
def test_setupclass_failure_tests_not_found(self,
file_test_xml_setup_class_not_in_list,
file_test_list,
tempest_config_file):
"""Tests when a setUpClass failure exists in the provided xml
but there are no corresponding tests in the test list
"""
result = TempestZigZag.process_xml(file_test_xml_setup_class_not_in_list, file_test_list, tempest_config_file)
expected = etree.parse(file_test_xml_setup_class_not_in_list).getroot()
observed = etree.XML(result)
assert observed.tag == expected.tag
assert observed.attrib['failures'] == expected.attrib['failures']
assert observed.attrib['errors'] == expected.attrib['errors']
assert observed.attrib['tests'] == expected.attrib['tests']
assert observed.attrib['name'] == 'xml suite created by tempest-zigzag'
assert len(observed) == len(expected) + 1 # the actual number of child elements (testcase) + properties element
# the broken testcase should be the last case in the list
assert 'setUpClass (tempest.oops.this.class.is.not.in.the.TestList)' == observed[-1].attrib['name']
| 56.62987
| 120
| 0.633872
| 1,020
| 8,721
| 5.215686
| 0.126471
| 0.046617
| 0.03515
| 0.035714
| 0.879323
| 0.873872
| 0.866353
| 0.860902
| 0.847744
| 0.847744
| 0
| 0.006312
| 0.29148
| 8,721
| 153
| 121
| 57
| 0.854669
| 0.18358
| 0
| 0.682243
| 0
| 0
| 0.134706
| 0.032318
| 0
| 0
| 0
| 0
| 0.429907
| 1
| 0.065421
| false
| 0.037383
| 0.018692
| 0
| 0.093458
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bbd02536309812dbd4e3fb83fc758b386478b273
| 53
|
py
|
Python
|
src/test/pythonFiles/formatting/fileToFormatOnEnter.py
|
wjt/vscode-python
|
ece0479102e910e6d75197bf84d9aa1e28eb3f09
|
[
"MIT"
] | null | null | null |
src/test/pythonFiles/formatting/fileToFormatOnEnter.py
|
wjt/vscode-python
|
ece0479102e910e6d75197bf84d9aa1e28eb3f09
|
[
"MIT"
] | null | null | null |
src/test/pythonFiles/formatting/fileToFormatOnEnter.py
|
wjt/vscode-python
|
ece0479102e910e6d75197bf84d9aa1e28eb3f09
|
[
"MIT"
] | null | null | null |
x=1
"""x=1
"""
# comment
# x=1
x+1 #
@x
x.y
x+"""
| 5.3
| 11
| 0.358491
| 13
| 53
| 1.461538
| 0.307692
| 0.421053
| 0.473684
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 0.283019
| 53
| 9
| 12
| 5.888889
| 0.394737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a539621100d371a9dd5b302280f1cad348d70b62
| 141
|
py
|
Python
|
mapps/admin.py
|
fossabot/mendelmd
|
5cafebf5336b8854fe71414b3559428e3e401617
|
[
"BSD-3-Clause"
] | 33
|
2016-07-22T21:39:09.000Z
|
2021-06-24T02:57:02.000Z
|
mapps/admin.py
|
fossabot/mendelmd
|
5cafebf5336b8854fe71414b3559428e3e401617
|
[
"BSD-3-Clause"
] | 41
|
2017-06-20T03:10:33.000Z
|
2021-12-24T23:54:41.000Z
|
mapps/admin.py
|
fossabot/mendelmd
|
5cafebf5336b8854fe71414b3559428e3e401617
|
[
"BSD-3-Clause"
] | 8
|
2017-06-14T21:07:47.000Z
|
2021-01-12T17:59:49.000Z
|
from django.contrib import admin
# Register your models here.
from .models import App
# Register your models here.
admin.site.register(App)
| 20.142857
| 32
| 0.787234
| 21
| 141
| 5.285714
| 0.52381
| 0.216216
| 0.324324
| 0.396396
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141844
| 141
| 6
| 33
| 23.5
| 0.917355
| 0.375887
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
a539b8edf374676d211c0ebd8c029a7b79bfad5a
| 39
|
py
|
Python
|
ETPM-SDG/f1aula3_1.py
|
ayresmajor/Curso-python
|
006229cec38ea365bf43b19e3ce93fbd32e1dca6
|
[
"MIT"
] | null | null | null |
ETPM-SDG/f1aula3_1.py
|
ayresmajor/Curso-python
|
006229cec38ea365bf43b19e3ce93fbd32e1dca6
|
[
"MIT"
] | null | null | null |
ETPM-SDG/f1aula3_1.py
|
ayresmajor/Curso-python
|
006229cec38ea365bf43b19e3ce93fbd32e1dca6
|
[
"MIT"
] | null | null | null |
print('\033[1;31;47m Viva Turma\033[m')
| 39
| 39
| 0.692308
| 9
| 39
| 3
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.297297
| 0.051282
| 39
| 1
| 39
| 39
| 0.432432
| 0
| 0
| 0
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
a53f8d24f7563ede90b131176818e72f7944c131
| 96,147
|
py
|
Python
|
app.py
|
geofragkos/RBAC_Decentralized
|
7cfcb668d1dead1fab63c713918e5a401da4ec82
|
[
"Apache-2.0"
] | null | null | null |
app.py
|
geofragkos/RBAC_Decentralized
|
7cfcb668d1dead1fab63c713918e5a401da4ec82
|
[
"Apache-2.0"
] | null | null | null |
app.py
|
geofragkos/RBAC_Decentralized
|
7cfcb668d1dead1fab63c713918e5a401da4ec82
|
[
"Apache-2.0"
] | null | null | null |
from flask import Flask, jsonify, request, abort, send_file
from flask_cors import CORS
from web3 import Web3, HTTPProvider
import json
from web3.auto import w3
import subprocess
import rbac_generation
import time
import uuid
import rbac_generation
import sys
import rbac_der_demo_getterv2 as getter
log_file = open("./log.txt","a")
sys.stdout = log_file
# Configuration
DEBUG = False
# Instantiate the application
app = Flask(__name__)
app.config.from_object(__name__)
# Enable CORS
CORS(app, resources={r'/*': {'origins': '*'}})
def find_Address(username):
util_address = blockchain.contract.functions.profileAddress(username).call()
return util_address
def add_owner(username, role, association):
new_account = blockchain.web3.eth.account.create()
user_account = new_account.address
association_account = find_Address(association)
created_profile_hash = blockchain.contract.functions.createProfile(username, user_account).transact()
# Wait for the transaction to be mined in the blockchain, and get the transaction receipt
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print('New Profile with Username: "'+str(username)+ '" and with hash ' + str(str(Web3.toHex(created_profile_hash))) + ' is successfully created!')
created_utr = blockchain.contract.functions.addRoleToUser(username, user_account, role).transact()
# Wait for the transaction to be mined in the blockchain, and get the transaction receipt
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_utr)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print('New User to Role Assignment: '+ str(entity)+ ' -> ' + str(role) +' with hash ' + str(Web3.toHex(created_utr)) + ' is successfully created')
created_assoc = blockchain.contract.functions.addChild(username, association, user_account, association_account).transact()
# Wait for the transaction to be mined in the blockchain, and get the transaction receipt
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_assoc)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print('New Association: '+ str(entity)+' & '+str(username) +' with hash ' + str(Web3.toHex(created_assoc)) + ' is successfully created')
item = {
'name': username,
'address': user_account,
'role': role
}
if(association == 'Utility 1'):
UTILITY1.append(item)
elif(association == 'Utility 2'):
UTILITY2.append(item)
elif(association == 'Utility 3'):
UTILITY3.append(item)
elif(association == 'Utility 4'):
UTILITY4.append(item)
elif(association == 'Utility 5'):
UTILITY5.append(item)
elif(association == 'Service Provider 1'):
SP1.append(item)
elif(association == 'Service Provider 2'):
SP2.append(item)
elif(association == 'Service Provider 3'):
SP3.append(item)
elif(association == 'Service Provider 4'):
SP4.append(item)
elif(association == 'Service Provider 5'):
SP5.append(item)
elif(association == 'Security Auditor'):
SECAUDITORS.append(item)
else:
SECADMINS.append(item)
def add_owner_device(username, role, device):
new_account = blockchain.web3.eth.account.create()
user_account = new_account.address
new_account = blockchain.web3.eth.account.create()
device_account = new_account.address
created_profile_hash = blockchain.contract.functions.createProfile(username, user_account).transact()
# Wait for the transaction to be mined in the blockchain, and get the transaction receipt
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print('New Profile with Username: "'+str(username)+ '" and with hash ' + str(str(Web3.toHex(created_profile_hash))) + ' is successfully created!')
created_profile_hash = blockchain.contract.functions.createProfile(device, device_account).transact()
# Wait for the transaction to be mined in the blockchain, and get the transaction receipt
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print('New Profile with Username: "'+str(device)+ '" and with hash ' + str(str(Web3.toHex(created_profile_hash))) + ' is successfully created!')
created_utr = blockchain.contract.functions.addRoleToUser(username, user_account, role).transact()
# Wait for the transaction to be mined in the blockchain, and get the transaction receipt
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_utr)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print('New User to Role Assignment: '+ str(username)+ ' -> ' + str(role) +' with hash ' + str(Web3.toHex(created_utr)) + ' is successfully created')
created_assoc = blockchain.contract.functions.addChild(device, username, device_account, user_account).transact()
# Wait for transaction to be mined in the blockchain, and get the transaction receipt
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_assoc)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print('New Association: '+ str(device)+' & '+str(username) +' with hash ' + str(Web3.toHex(created_assoc)) + ' is successfully created')
item = {
'name': username,
'address': user_account,
'role': device
}
DEROWNERS.append(item)
@app.route('/showperm2', methods=['PUT'])
def show_perm2():
start = time.time()
response_object = {'status': 'success'}
response_object['flag'] = 'True'
if(request.method == 'PUT'):
post_data = request.get_json()
username = post_data.get('name')
address = post_data.get('address')
# Check Username and Address -- Matching
if (username == ''):
username = blockchain.contract.functions.profileUsername(address).call()
elif (address == ''):
address = find_Address(username)
elif(username != '' and address != ''):
check_username = blockchain.contract.functions.profileUsername(address).call()
check_address = find_Address(username)
if(check_username != username or check_address != address):
response_object['flag'] = 'False'
if('DER' in username):
response_object['flag'] = 'False'
else:
role = blockchain.contract.functions.getRolesfromNames(username).call()
parent = blockchain.contract.functions.returnParent(username).call()
if(role == 'DER Owner'):
parent = 'DER Owners'
elif(role == ''):
response_object['flag'] = 'False'
model = 'DERCapacity'
perms_string = blockchain.contract.functions.queryPermissions(parent,model).call()
perms_string = perms_string.replace('{','')
perms_string = perms_string.replace('}','')
perms_string = perms_string.replace('"','')
response_object[model] = perms_string
end = time.time()
print('Show Permissions Query Time: ' + str(end-start))
return jsonify(response_object)
@app.route('/show_permissions', methods=['PUT'])
def show_permissions():
start = time.time()
response_object = {'status': 'success'}
response_object['flag'] = 'True'
response_object['NoPerm'] = 'False'
response_object['permissions'] = ''
if request.method == 'PUT':
post_data = request.get_json()
username = post_data.get('username')
address = post_data.get('address')
device = post_data.get('der')
# Check Username and Address -- Matching
if (username == ''):
username = blockchain.contract.functions.profileUsername(address).call()
elif (address == ''):
address = find_Address(username)
elif(username != '' and address != ''):
check_username = blockchain.contract.functions.profileUsername(address).call()
check_address = find_Address(username)
if(check_username != username or check_address != address):
response_object['flag'] = 'False'
# Check Username and DER - Can Username do an operation to this DER
parent_list = blockchain.contract.functions.getParentsNames(username).call()
username_parent_string = ''
for parent in parent_list:
parent_string = Web3.toText(parent)
parent_string = parent_string.split("\x00")
username_parent_string += parent_string[0] + ','
username_parent_string = username_parent_string[0:-1] # Subtract the comma
parent_list = blockchain.contract.functions.getParentsNames(device).call()
der_parent_string = ''
for parent in parent_list:
parent_string = Web3.toText(parent)
parent_string = parent_string.split("\x00")
der_parent_string += parent_string[0] + ','
ischild = blockchain.contract.functions.isChild(der, username)
if ((username_parent_string in der_parent_string) or ischild):
# Ask for permissions
perm_list = blockchain.contract.functions.queryPermissions(username).call
permissions_string = ''
for perm in perm_list:
perm_string = Web3.toText(perm)
perm_string = perm_string.split("\x00")
permissions_string += perm_string[0] + ','
response_object['permissions'] = permissions_string
else:
response_object['NoPerm'] = 'True'
end = time.time()
print('Permissions Query Time: '+ str(end-start))
return jsonify(response_object)
@app.route('/download', methods=['POST'])
def download():
f = '/home/george/Desktop/Sandia_RBAC/Decentralized_Ethereum/log.txt'
return send_file(f,attachment_filename='test.txt',as_attachment=True)
@app.route('/verify_utr', methods=['PUT'])
def verify_utr():
start = time.time()
response_object = {'status': 'success'}
response_object['flag'] = 'True'
if request.method == 'PUT':
post_data = request.get_json()
username = post_data.get('username')
address = post_data.get('address')
role = post_data.get('role')
if (username == ''):
username = blockchain.contract.functions.profileUsername(address).call()
elif (address == ''):
address = find_Address(username)
elif(username != '' and address != ''):
check_username = blockchain.contract.functions.profileUsername(address).call()
check_address = find_Address(username)
if(check_username != username or check_address != address):
response_object['flag'] = 'False'
parent_list = blockchain.contract.functions.getParentsNames(username).call()
final_parent_string = ''
for parent in parent_list:
parent_string = Web3.toText(parent)
parent_string = parent_string.split("\x00")
final_parent_string += parent_string[0] + ','
response_object['sent_parent'] = final_parent_string
verification = blockchain.contract.functions.hasRole(username, role).call()
response_object['sent_username'] = username
response_object['sent_address'] = address
response_object['sent_role'] = role
response_object['sent_verification'] = verification
end = time.time()
print('Verify User Query Time: '+ str(end-start))
return jsonify(response_object)
@app.route('/add_user', methods=['PUT'])
def add_user():
start = time.time()
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
first_name = post_data.get('firstName')
last_name = post_data.get('lastName')
role = post_data.get('role')
association = post_data.get('association')
device = post_data.get('device')
username = first_name + ' ' + last_name
if (role == 'DER Owner'):
if (find_Address(username) != '0x0000000000000000000000000000000000000000'):
response_object['sent_response'] = 'Existing User'
if (find_Address(device) != '0x0000000000000000000000000000000000000000'):
response_object['sent_response'] = 'Existing DER Device'
if(find_Address(username) == '0x0000000000000000000000000000000000000000' and find_Address(device) == '0x0000000000000000000000000000000000000000'):
add_owner_device(username, role, device)
else:
if (find_Address(username) != '0x0000000000000000000000000000000000000000'):
response_object['sent_response'] = 'Existing User'
else:
add_owner(username,role,association)
end = time.time()
print('Add User Query Time: '+str(end-start))
return jsonify(response_object)
def delete_fun(username, address):
if (not 'DER' in username):
# Delete the old Profile
created_profile_hash = blockchain.contract.functions.deleteProfile(username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print(tx_receipt)
role = blockchain.contract.functions.getRolesfromNames(username).call()
parent_list = blockchain.contract.functions.getParentsNames(username).call()
final_parent_string = ''
parentt = ''
for parent in parent_list:
parent_string = Web3.toText(parent)
parent_string = parent_string.split("\x00")
final_parent_string += parent_string[0] + ','
parentt = parent_string[0]
# Delete the Name Association with Utility
created_profile_hash = blockchain.contract.functions.deleteAssoc(username, parent_string[0]).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print(tx_receipt)
# Delete the Name Association with Utility
parent_address = find_Address(parent_string[0])
created_profile_hash = blockchain.contract.functions.deleteAssocAddress(address, parent_address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print(tx_receipt)
item = {
'name': username,
'address': address,
'role': role
}
if(parentt == 'Utility 1'):
UTILITY1.remove(item)
elif(parentt == 'Utility 2'):
UTILITY2.remove(item)
elif(parentt == 'Utility 3'):
UTILITY3.remove(item)
elif(parentt == 'Utility 4'):
UTILITY4.remove(item)
elif(parentt == 'Utility 5'):
UTILITY5.remove(item)
elif(parentt == 'Service Provider 1'):
SP1.remove(item)
elif(parentt == 'Service Provider 2'):
SP2.remove(item)
elif(parentt == 'Service Provider 3'):
SP3.remove(item)
elif(parentt == 'Service Provider 4'):
SP4.remove(item)
elif(parentt == 'Service Provider 5'):
SP5.remove(item)
elif(parentt == 'Security Auditor'):
SECAUDITORS.remove(item)
else:
SECADMINS.remove(item)
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(username, address, role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print(tx_receipt)
else:
# Delete the old Profile
created_profile_hash = blockchain.contract.functions.deleteProfile(username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print(tx_receipt)
parent_list = blockchain.contract.functions.getParentsNames(username).call()
final_parent_string = ''
for parent in parent_list:
parent_string = Web3.toText(parent)
parent_string = parent_string.split("\x00")
final_parent_string += parent_string[0] + ','
# Delete the Name Association with Utility
created_profile_hash = blockchain.contract.functions.deleteAssoc(username, parent_string[0]).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print(tx_receipt)
# Delete the Name Association with Utility
parent_address = find_Address(parent_string[0])
created_profile_hash = blockchain.contract.functions.deleteAssocAddress(address, parent_address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print(tx_receipt)
item = {
'name': username,
'address': address,
'role': ''
}
if(parent_string[0] == 'Utility 1'):
UTILITY1.remove(item)
elif(parent_string[0] == 'Utility 2'):
UTILITY2.remove(item)
elif(parent_string[0] == 'Utility 3'):
UTILITY3.remove(item)
elif(parent_string[0] == 'Utility 4'):
UTILITY4.remove(item)
elif(parent_string[0] == 'Utility 5'):
UTILITY5.remove(item)
elif(parent_string[0] == 'Service Provider 1'):
SP1.remove(item)
elif(parent_string[0] == 'Service Provider 2'):
SP2.remove(item)
elif(parent_string[0] == 'Service Provider 3'):
SP3.remove(item)
elif(parent_string[0] == 'Service Provider 4'):
SP4.remove(item)
elif(parent_string[0] == 'Service Provider 5'):
SP5.remove(item)
else: # Case that a user has this DER
item = {
'name': parent_string[0],
'address': parent_address,
'role': username
}
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(parent_string[0], parent_address, 'DER Owner').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print(tx_receipt)
DEROWNERS.remove(item)
@app.route('/delete_user', methods=['PUT'])
def delete_user():
start = time.time()
response_object = {'status': 'success'}
response_object['flag'] = 'True'
if request.method == 'PUT':
post_data = request.get_json()
username = post_data.get('username')
address = post_data.get('address')
if (username == ''):
username = blockchain.contract.functions.profileUsername(address).call()
elif (address == ''):
address = find_Address(username)
elif(username != '' and address != ''):
check_username = blockchain.contract.functions.profileUsername(address).call()
check_address = find_Address(username)
if(check_username != username or check_address != address):
response_object['flag'] = 'False'
return jsonify(response_object)
if (find_Address(username) == '0x0000000000000000000000000000000000000000'):
response_object['flag'] = 'False'
return jsonify(response_object)
else:
delete_fun(username, address)
end = time.time()
print('Delete User Query Time: '+str(end-start))
return jsonify(response_object)
@app.route('/find_transaction', methods=['PUT'])
def find_transaction():
start = time.time()
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
trx = post_data.get('transaction')
info = blockchain.web3.eth.getTransaction(trx)
response_object['sent_block'] = info['blockNumber']
response_object['sent_from'] = info['from']
response_object['sent_to'] = info['to']
response_object['sent_gas'] = info['gas']
end = time.time()
print('Find Trx Query Time: '+str(end-start))
return jsonify(response_object)
@app.route('/find_block', methods=['PUT'])
def find_block():
response_object = {'status': 'success'}
response_object['flag'] = 'True'
if request.method == 'PUT':
post_data = request.get_json()
block = int(post_data.get('block'))
latest_block = blockchain.web3.eth.get_block('latest')
if(block > latest_block['number'] or block < 0):
response_object['flag'] = 'False'
info = blockchain.web3.eth.get_block(block)
response_object['sent_hash'] = info['hash'].hex()
response_object['sent_size'] = info['size']
response_object['sent_trx'] = info['transactions'][0].hex()
return jsonify(response_object)
def der_create_profile(username):
new_account = blockchain.web3.eth.account.create()
der_account = new_account.address
created_profile_hash = blockchain.contract.functions.createProfile(username, der_account).transact()
# Wait for the transaction to be mined in the blockchain, and get the transaction receipt
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print('New Profile with Username: "'+str(username)+ '" and with hash ' + str(str(Web3.toHex(created_profile_hash))) + ' is successfully created!')
def add_der(username, association):
der_account = find_Address(username)
association_account = find_Address(association)
created_assoc = blockchain.contract.functions.addChild(username, association, der_account, association_account).transact()
# Wait for the transaction to be mined in the blockchain, and get the transaction receipt
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_assoc)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print('New Association: '+ str(der)+' & '+str(username) +' with hash ' + str(Web3.toHex(created_assoc)) + ' is successfully created')
item = {
'name': username,
'address': der_account,
'role': ''
}
if(association == 'Utility 1'):
UTILITY1.append(item)
elif(association == 'Utility 2'):
UTILITY2.append(item)
elif(association == 'Utility 3'):
UTILITY3.append(item)
elif(association == 'Utility 4'):
UTILITY4.append(item)
elif(association == 'Utility 5'):
UTILITY5.append(item)
elif(association == 'Service Provider 1'):
SP1.append(item)
elif(association == 'Service Provider 2'):
SP2.append(item)
elif(association == 'Service Provider 3'):
SP3.append(item)
elif(association == 'Service Provider 4'):
SP4.append(item)
else:
SP5.append(item)
@app.route('/showperm', methods=['PUT'])
def show_perm():
start = time.time()
response_object = {'status': 'success'}
response_object['flag'] = 'True'
response_object['answer'] = 'No'
response_object['answer_op'] = ''
if request.method == 'PUT':
post_data = request.get_json()
first_name = post_data.get('firstName')
last_name = post_data.get('lastName')
organization = post_data.get('parent')
model = post_data.get('model')
print(model)
operation = post_data.get('operation')
print(organization)
if('DER' in first_name):
response_object['flag'] = 'False'
else:
perms_string = blockchain.contract.functions.queryPermissions(organization,model).call()
perms_dict = json.loads(perms_string)
if (operation in perms_dict):
response_object['answer'] = 'Yes'
response_object['answer_op'] = perms_dict[operation]
end = time.time()
print('Show Permissions Query Time: '+str(end-start))
return jsonify(response_object)
@app.route('/add_der_device', methods=['PUT'])
def add_der_device():
start = time.time()
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
username = post_data.get('device')
associations = post_data.get('association')
if (find_Address(username) != '0x0000000000000000000000000000000000000000'):
response_object['sent_response'] = 'Existing DER'
else:
der_create_profile(username)
for association in associations:
add_der(username, association)
end = time.time()
print('Add DER Query Time: '+str(end-start))
return jsonify(response_object)
@app.route('/get_der_info', methods=['PUT'])
def get_der_info():
start = time.time()
response_object = {'status': 'success'}
response_object['flag'] = 'True'
response_object['sent_owner'] = '' # JSON field in case we have a DER Owner
if request.method == 'PUT':
post_data = request.get_json()
username = post_data.get('username')
address = post_data.get('address')
if (username != '' and address == ''):
address = find_Address(username)
parent_list = blockchain.contract.functions.getParentsNames(username).call()
final_parent_string = ''
for parent in parent_list:
parent_string = Web3.toText(parent)
parent_string = parent_string.split("\x00")
final_parent_string += parent_string[0] + ','
response_object['sent_parent'] = final_parent_string
elif (username == '' and address != ''):
username = blockchain.contract.functions.profileUsername(address).call()
parent_list = blockchain.contract.functions.getParentsNames(username).call()
final_parent_string = ''
for parent in parent_list:
parent_string = Web3.toText(parent)
parent_string = parent_string.split("\x00")
final_parent_string += parent_string[0] + ','
response_object['sent_parent'] = final_parent_string
else:
check_username = blockchain.contract.functions.profileUsername(address).call()
check_address = find_Address(username)
if((check_username != username) or (check_address != address) ):
abort(500)
# Check if the user is a DER Owner
parent_list = blockchain.contract.functions.getParentsNames(username).call()
final_parent_string = ''
for parent in parent_list:
parent_string = Web3.toText(parent)
parent_string = parent_string.split("\x00")
final_parent_string += parent_string[0] + ', '
response_object['sent_parent'] = final_parent_string
response_object['message'] = 'Entity is updated!'
response_object['sent_name'] = username
response_object['sent_address'] = address
print(response_object['sent_address'])
end = time.time()
print('Search DER Info Query Time: '+str(end-start))
return jsonify(response_object)
@app.route('/get_entity_info/<entity_id>', methods=['PUT'])
def get_info(entity_id):
start = time.time()
response_object = {'status': 'success'}
response_object['flag'] = 'True'
response_object['sent_owner'] = '' # JSON field in case we have a DER Owner
if request.method == 'PUT':
post_data = request.get_json()
username = post_data.get('username')
address = post_data.get('address')
if (username != '' and address == ''):
address = find_Address(username)
role = blockchain.contract.functions.getRolesfromNames(username).call()
parent = blockchain.contract.functions.returnParent(username).call()
if (role == 'Security Administrator'):
parent = 'Security Administrators'
elif (role == 'Security Auditor'):
parent = 'Security Auditors'
elif (role == 'DER Owner'):
parent = 'DER Owners'
# Check if the user is a DER Owner
if(role == 'DER Owner'):
children_list = blockchain.contract.functions.getChildrenNames(username).call()
child_string = Web3.toText(children_list[0])
child_string = child_string.split("\x00")
final_child_string = child_string[0]
response_object['sent_owner'] = final_child_string
elif (username == '' and address != ''):
username = blockchain.contract.functions.profileUsername(address).call()
role = blockchain.contract.functions.getRolesfromAddress(address).call()
if (role == 'Security Administrator'):
parent = 'Security Administrators'
elif (role == 'Security Auditor'):
parent = 'Security Auditors'
elif (role == 'DER Owner'):
parent = 'DER Owners'
else:
parent = blockchain.contract.functions.returnParent(username).call()
# Check if the user is a DER Owner
if(role == 'DER Owner'):
children_list = blockchain.contract.functions.getChildrenNames(username).call()
child_string = Web3.toText(children_list[0])
child_string = child_string.split("\x00")
final_child_string = child_string[0]
response_object['sent_owner'] = final_child_string
else:
check_username = blockchain.contract.functions.profileUsername(address).call()
check_address = find_Address(username)
if((check_username != username) or (check_address != address) ):
abort(500)
role = blockchain.contract.functions.getRolesfromNames(username).call()
role = blockchain.contract.functions.getRolesfromAddress(address).call()
if (role == 'Security Administrator'):
parent = 'Security Administrators'
elif (role == 'Security Auditor'):
parent = 'Security Auditors'
elif (role == 'DER Owner'):
parent = 'DER Owners'
else:
parent = blockchain.contract.functions.returnParent(username).call()
# Check if the user is a DER Owner
if(role == 'DER Owner'):
children_list = blockchain.contract.functions.getChildrenNames(username).call()
child_string = Web3.toText(children_list[0])
child_string = child_string.split("\x00")
final_child_string = child_string[0]
response_object['sent_owner'] = final_child_string
response_object['message'] = 'Entity is updated!'
response_object['sent_name'] = username
response_object['sent_address'] = address
print(response_object['sent_address'])
response_object['sent_role'] = role
response_object['sent_parent'] = parent
end = time.time()
print('Search User Query Time: ' + str(end-start))
return jsonify(response_object)
@app.route('/auth', methods=['POST'])
def auth():
response_object = {'status': 'success'}
post_data = request.get_json()
if (post_data['user'] == '' and post_data['password'] == ''):
response_object['token'] = 'Token Granted'
# Login Cases
if (post_data['case'] == 'utilities_admin'):
blockchain.web3.eth.defaultAccount = blockchain.web3.eth.accounts[-1]
elif (post_data['case'] == 'admin'):
blockchain.web3.eth.defaultAccount = blockchain.web3.eth.accounts[0]
elif (post_data['case'] == 'sp_admin'):
blockchain.web3.eth.defaultAccount = blockchain.web3.eth.accounts[-2]
elif (post_data['case'] == 'der_admin'):
blockchain.web3.eth.defaultAccount = blockchain.web3.eth.accounts[-3]
else:
response_object['token'] = ''
return jsonify(response_object)
def update_profile_util1(new_username,address, new_role, old_username, old_role):
if(new_username == old_username and new_role == old_role):
return
else:
# Delete the old Profile
created_profile_hash = blockchain.contract.functions.deleteProfile(old_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print(tx_receipt)
# Delete the Name Association with Utility
created_profile_hash = blockchain.contract.functions.deleteAssoc(old_username, 'Utility 1').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print(tx_receipt)
# Delete the Name Association with Utility
parent_address = find_Address('Utility 1')
created_profile_hash = blockchain.contract.functions.deleteAssocAddress(address, parent_address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print(tx_receipt)
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(old_username, address, old_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print(tx_receipt)
# Create a new profile
created_profile_hash = blockchain.contract.functions.createProfile(new_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
mined_receipt = blockchain.web3.eth.contract(address = tx_receipt.contractAddress, abi = blockchain.contract_abi)
print(tx_receipt)
# Create new association
util_address = find_Address('Utility 1')
created_profile_hash = blockchain.contract.functions.addChild(new_username, 'Utility 1', address, util_address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create role
created_profile_hash = blockchain.contract.functions.addRoleToUser(new_username, address, new_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
def revoke_role_util(address, new_role, old_username, old_role):
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(old_username, address, old_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create role
created_profile_hash = blockchain.contract.functions.addRoleToUser(old_username, address, new_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
def revoke_role_derowners(address, new_role, old_username, device):
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(old_username, address, 'DER Owner').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Name Association with Utility
created_profile_hash = blockchain.contract.functions.deleteAssoc(device, old_username).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Name Association with Utility
child_address = find_Address(device)
created_profile_hash = blockchain.contract.functions.deleteAssocAddress(child_address, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
def revoke_role_secadmins(address, new_role, old_username, old_role):
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(old_username, address, 'Security Administrator').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
def revoke_role_secauditors(address, new_role, old_username, old_role):
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(old_username, address, 'Security Auditor').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
def update_profile_util2(new_username,address, new_role, old_username, old_role):
if(new_username == old_username and new_role == old_role):
return
else:
# Delete the old Profile
created_profile_hash = blockchain.contract.functions.deleteProfile(old_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Name Association with Utility
created_profile_hash = blockchain.contract.functions.deleteAssoc(old_username, 'Utility 2').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Name Association with Utility
parent_address = find_Address('Utility 2')
created_profile_hash = blockchain.contract.functions.deleteAssocAddress(address, parent_address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(old_username, address, old_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create a new profile
created_profile_hash = blockchain.contract.functions.createProfile(new_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create new association
util_address = find_Address('Utility 2')
created_profile_hash = blockchain.contract.functions.addChild(new_username, 'Utility 2', address, util_address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create role
created_profile_hash = blockchain.contract.functions.addRoleToUser(new_username, address, new_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
def update_profile_util3(new_username,address, new_role, old_username, old_role):
if(new_username == old_username and new_role == old_role):
return
else:
# Delete the old Profile
created_profile_hash = blockchain.contract.functions.deleteProfile(old_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Name Association with Utility
created_profile_hash = blockchain.contract.functions.deleteAssoc(old_username, 'Utility 3').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Name Association with Utility
parent_address = find_Address('Utility 3')
created_profile_hash = blockchain.contract.functions.deleteAssocAddress(address, parent_address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(old_username, address, old_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create a new profile
created_profile_hash = blockchain.contract.functions.createProfile(new_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create new association
util_address = find_Address('Utility 3')
created_profile_hash = blockchain.contract.functions.addChild(new_username, 'Utility 3', address, util_address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create role
created_profile_hash = blockchain.contract.functions.addRoleToUser(new_username, address, new_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
def update_profile_util4(new_username,address, new_role, old_username, old_role):
if(new_username == old_username and new_role == old_role):
return
else:
# Delete the old Profile
created_profile_hash = blockchain.contract.functions.deleteProfile(old_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Name Association with Utility
created_profile_hash = blockchain.contract.functions.deleteAssoc(old_username, 'Utility 4').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Name Association with Utility
parent_address = find_Address('Utility 4')
created_profile_hash = blockchain.contract.functions.deleteAssocAddress(address, parent_address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(old_username, address, old_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create a new profile
created_profile_hash = blockchain.contract.functions.createProfile(new_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create new association
util_address = find_Address('Utility 4')
created_profile_hash = blockchain.contract.functions.addChild(new_username, 'Utility 4', address, util_address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create role
created_profile_hash = blockchain.contract.functions.addRoleToUser(new_username, address, new_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
def update_profile_util5(new_username,address, new_role, old_username, old_role):
if(new_username == old_username and new_role == old_role):
return
else:
# Delete the old Profile
created_profile_hash = blockchain.contract.functions.deleteProfile(old_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Name Association with Utility
created_profile_hash = blockchain.contract.functions.deleteAssoc(old_username, 'Utility 5').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(old_username, address, old_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Name Association with Utility
parent_address = find_Address('Utility 5')
created_profile_hash = blockchain.contract.functions.deleteAssocAddress(address, parent_address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create a new profile
created_profile_hash = blockchain.contract.functions.createProfile(new_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create new association
util_address = find_Address('Utility 5')
created_profile_hash = blockchain.contract.functions.addChild(new_username, 'Utility 5', address, util_address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create role
created_profile_hash = blockchain.contract.functions.addRoleToUser(new_username, address, new_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
def update_profile_sp1(new_username,address, new_role, old_username, old_role):
if(new_username == old_username and new_role == old_role):
return
else:
# Delete the old Profile
created_profile_hash = blockchain.contract.functions.deleteProfile(old_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Name Association with Utility
created_profile_hash = blockchain.contract.functions.deleteAssoc(old_username, 'Service Provider 1').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(old_username, address, old_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create a new profile
created_profile_hash = blockchain.contract.functions.createProfile(new_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create new association
util_address = find_Address('Service Provider 1')
created_profile_hash = blockchain.contract.functions.addChild(new_username, 'Service Provider 1', address, util_address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create role
created_profile_hash = blockchain.contract.functions.addRoleToUser(new_username, address, new_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
def update_profile_sp2(new_username,address, new_role, old_username, old_role):
if(new_username == old_username and new_role == old_role):
return
else:
# Delete the old Profile
created_profile_hash = blockchain.contract.functions.deleteProfile(old_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Name Association with Utility
created_profile_hash = blockchain.contract.functions.deleteAssoc(old_username, 'Service Provider 2').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(old_username, address, old_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create a new profile
created_profile_hash = blockchain.contract.functions.createProfile(new_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create new association
util_address = find_Address('Service Provider 2')
created_profile_hash = blockchain.contract.functions.addChild(new_username, 'Service Provider 2', address, util_address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create role
created_profile_hash = blockchain.contract.functions.addRoleToUser(new_username, address, new_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
def update_profile_sp3(new_username,address, new_role, old_username, old_role):
if(new_username == old_username and new_role == old_role):
return
else:
# Delete the old Profile
created_profile_hash = blockchain.contract.functions.deleteProfile(old_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Name Association with Utility
created_profile_hash = blockchain.contract.functions.deleteAssoc(old_username, 'Service Provider 3').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(old_username, address, old_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create a new profile
created_profile_hash = blockchain.contract.functions.createProfile(new_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create new association
util_address = find_Address('Service Provider 3')
created_profile_hash = blockchain.contract.functions.addChild(new_username, 'Service Provider 3', address, util_address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create role
created_profile_hash = blockchain.contract.functions.addRoleToUser(new_username, address, new_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
def update_profile_sp4(new_username,address, new_role, old_username, old_role):
if(new_username == old_username and new_role == old_role):
return
else:
# Delete the old Profile
created_profile_hash = blockchain.contract.functions.deleteProfile(old_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Name Association with Utility
created_profile_hash = blockchain.contract.functions.deleteAssoc(old_username, 'Service Provider 4').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(old_username, address, old_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create a new profile
created_profile_hash = blockchain.contract.functions.createProfile(new_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create new association
util_address = find_Address('Service Provider 4')
created_profile_hash = blockchain.contract.functions.addChild(new_username, 'Service Provider 4', address, util_address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create role
created_profile_hash = blockchain.contract.functions.addRoleToUser(new_username, address, new_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
def update_profile_sp5(new_username,address, new_role, old_username, old_role):
if(new_username == old_username and new_role == old_role):
return
else:
# Delete the old Profile
created_profile_hash = blockchain.contract.functions.deleteProfile(old_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Name Association with Utility
created_profile_hash = blockchain.contract.functions.deleteAssoc(old_username, 'Service Provider 5').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(old_username, address, old_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create a new profile
created_profile_hash = blockchain.contract.functions.createProfile(new_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create new association
util_address = find_Address('Service Provider 5')
created_profile_hash = blockchain.contract.functions.addChild(new_username, 'Service Provider 5', address, util_address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create role
created_profile_hash = blockchain.contract.functions.addRoleToUser(new_username, address, new_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
def update_profile_sp6(new_username,address, new_role, old_username, old_role):
if(new_username == old_username and new_role == old_role):
return
else:
# Delete the old Profile
created_profile_hash = blockchain.contract.functions.deleteProfile(old_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Name Association with Utility
created_profile_hash = blockchain.contract.functions.deleteAssoc(old_username, 'Service Provider 6').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(old_username, address, old_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create a new profile
created_profile_hash = blockchain.contract.functions.createProfile(new_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create new association
util_address = find_Address('Service Provider 6')
created_profile_hash = blockchain.contract.functions.addChild(new_username, 'Service Provider 6', address, util_address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create role
created_profile_hash = blockchain.contract.functions.addRoleToUser(new_username, address, new_role).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
def update_profile_derowners(new_username,address, new_role, old_username, old_role):
if(new_username == old_username and new_role == old_role):
return
else:
# Delete the old Profile
created_profile_hash = blockchain.contract.functions.deleteProfile(old_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Name Association with Utility
created_profile_hash = blockchain.contract.functions.deleteAssoc(old_role, old_username).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(old_username, address, 'DER Owner').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create a new profile
created_profile_hash = blockchain.contract.functions.createProfile(new_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create new association
util_address = find_Address(old_role)
created_profile_hash = blockchain.contract.functions.addChild(new_username, old_role, address, util_address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create role
created_profile_hash = blockchain.contract.functions.addRoleToUser(new_username, address, 'DER Owner').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
def update_profile_secadmins(new_username,address, new_role, old_username, old_role):
if(new_username == old_username and new_role == old_role):
return
else:
# Delete the old Profile
created_profile_hash = blockchain.contract.functions.deleteProfile(old_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(old_username, address, 'Security Administrator').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create a new profile
created_profile_hash = blockchain.contract.functions.createProfile(new_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create role
created_profile_hash = blockchain.contract.functions.addRoleToUser(new_username, address, 'Security Administrator').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
def update_profile_secauditors(new_username,address, new_role, old_username, old_role):
if(new_username == old_username and new_role == old_role):
return
else:
# Delete the old Profile
created_profile_hash = blockchain.contract.functions.deleteProfile(old_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Delete the Role Association
created_profile_hash = blockchain.contract.functions.revokeRole(old_username, address, 'Security Auditor').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create a new profile
created_profile_hash = blockchain.contract.functions.createProfile(new_username, address).transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
# Create role
created_profile_hash = blockchain.contract.functions.addRoleToUser(new_username, address, 'Security Auditor').transact()
tx_receipt = blockchain.web3.eth.waitForTransactionReceipt(created_profile_hash)
print(tx_receipt)
def remove_entity_util1(entity_id):
for entity in UTILITY1:
if entity['address'] == entity_id:
UTILITY1.remove(entity)
return True
return False
def remove_entity_util2(entity_id):
for entity in UTILITY2:
if entity['address'] == entity_id:
UTILITY2.remove(entity)
return True
return False
def remove_entity_util3(entity_id):
for entity in UTILITY3:
if entity['address'] == entity_id:
UTILITY3.remove(entity)
return True
return False
def remove_entity_util4(entity_id):
for entity in UTILITY4:
if entity['address'] == entity_id:
UTILITY4.remove(entity)
return True
return False
def remove_entity_util5(entity_id):
for entity in UTILITY5:
if entity['address'] == entity_id:
UTILITY5.remove(entity)
return True
return False
def remove_entity_sp1(entity_id):
for entity in SP1:
if entity['address'] == entity_id:
SP1.remove(entity)
return True
return False
def remove_entity_sp2(entity_id):
for entity in SP2:
if entity['address'] == entity_id:
SP2.remove(entity)
return True
return False
def remove_entity_sp3(entity_id):
for entity in SP3:
if entity['address'] == entity_id:
SP3.remove(entity)
return True
return False
def remove_entity_sp4(entity_id):
for entity in SP4:
if entity['address'] == entity_id:
SP4.remove(entity)
return True
return False
def remove_entity_sp5(entity_id):
for entity in SP5:
if entity['address'] == entity_id:
SP5.remove(entity)
return True
return False
def remove_entity_derowners(entity_id):
for entity in DEROWNERS:
if entity['address'] == entity_id:
DEROWNERS.remove(entity)
return True
return False
def remove_entity_secadmins(entity_id):
for entity in SECADMINS:
if entity['address'] == entity_id:
SECADMINS.remove(entity)
return True
return False
def remove_entity_secauditors(entity_id):
for entity in SECAUDITORS:
if entity['address'] == entity_id:
SECAUDITORS.remove(entity)
return True
return False
@app.route('/utility1', methods=['GET'])
def entities_utility1():
response_object = {'status': 'success'}
response_object['entities'] = UTILITY1
return jsonify(response_object)
@app.route('/utility2', methods=['GET'])
def entities_utility2():
response_object = {'status': 'success'}
response_object['entities'] = UTILITY2
return jsonify(response_object)
@app.route('/utility3', methods=['GET'])
def entities_utility3():
response_object = {'status': 'success'}
response_object['entities'] = UTILITY3
return jsonify(response_object)
@app.route('/utility4', methods=['GET'])
def entities_utility4():
response_object = {'status': 'success'}
response_object['entities'] = UTILITY4
return jsonify(response_object)
@app.route('/utility5', methods=['GET'])
def entities_utility5():
response_object = {'status': 'success'}
response_object['entities'] = UTILITY5
return jsonify(response_object)
@app.route('/sp1', methods=['GET'])
def entities_sp1():
response_object = {'status': 'success'}
response_object['entities'] = SP1
return jsonify(response_object)
@app.route('/sp2', methods=['GET'])
def entities_sp2():
response_object = {'status': 'success'}
response_object['entities'] = SP2
return jsonify(response_object)
@app.route('/sp3', methods=['GET'])
def entities_sp3():
response_object = {'status': 'success'}
response_object['entities'] = SP3
return jsonify(response_object)
@app.route('/sp4', methods=['GET'])
def entities_sp4():
response_object = {'status': 'success'}
response_object['entities'] = SP4
return jsonify(response_object)
@app.route('/sp5', methods=['GET'])
def entities_sp5():
response_object = {'status': 'success'}
response_object['entities'] = SP5
return jsonify(response_object)
@app.route('/derowners', methods=['GET'])
def entities_derowners():
response_object = {'status': 'success'}
response_object['entities'] = DEROWNERS
return jsonify(response_object)
@app.route('/secadmins', methods=['GET'])
def entities_secadmins():
response_object = {'status': 'success'}
response_object['entities'] = SECADMINS
return jsonify(response_object)
@app.route('/secauditors', methods=['GET'])
def entities_secauditors():
response_object = {'status': 'success'}
response_object['entities'] = SECAUDITORS
return jsonify(response_object)
@app.route('/utilities/<entity_id>', methods=['PUT'])
def single_entity(entity_id):
start = time.time()
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('oldName')
old_role = post_data.get('oldRole')
remove_entity_util1(entity_id)
# Update blockchain entries
new_name = post_data.get('name')
new_role = post_data.get('role')
update_profile_util1(new_name, entity_id, new_role, old_name, old_role)
UTILITY1.append({
'name': new_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
end = time.time()
print('The Utility Update Time is:'+str(end-start))
return jsonify(response_object)
@app.route('/utilitiesrevoke/<entity_id>', methods=['PUT'])
def single_entity_revoke(entity_id):
start = time.time()
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('name')
remove_entity_util1(entity_id)
# Update blockchain entries
old_role = post_data.get('oldRole')
new_role = ''
revoke_role_util(entity_id, new_role, old_name, old_role)
UTILITY1.append({
'name': old_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
end = time.time()
print('The revoke time is:'+str(end-start))
return jsonify(response_object)
@app.route('/utilities2revoke/<entity_id>', methods=['PUT'])
def single_entity2_revoke(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('name')
remove_entity_util2(entity_id)
# Update blockchain entries
old_role = post_data.get('oldRole')
new_role = ''
revoke_role_util(entity_id, new_role, old_name, old_role)
UTILITY2.append({
'name': old_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/utilities3revoke/<entity_id>', methods=['PUT'])
def single_entity3_revoke(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('name')
remove_entity_util3(entity_id)
# Update blockchain entries
old_role = post_data.get('oldRole')
new_role = ''
revoke_role_util(entity_id, new_role, old_name, old_role)
UTILITY3.append({
'name': old_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/utilities4revoke/<entity_id>', methods=['PUT'])
def single_entity4_revoke(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('name')
remove_entity_util4(entity_id)
# Update blockchain entries
old_role = post_data.get('oldRole')
new_role = ''
revoke_role_util(entity_id, new_role, old_name, old_role)
UTILITY4.append({
'name': old_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/utilities5revoke/<entity_id>', methods=['PUT'])
def single_entity5_revoke(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('name')
remove_entity_util5(entity_id)
# Update blockchain entries
old_role = post_data.get('oldRole')
new_role = ''
revoke_role_util(entity_id, new_role, old_name, old_role)
UTILITY5.append({
'name': old_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/sp1revoke/<entity_id>', methods=['PUT'])
def single_sp_revoke(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('name')
remove_entity_sp1(entity_id)
# Update blockchain entries
old_role = post_data.get('oldRole')
new_role = ''
revoke_role_util(entity_id, new_role, old_name, old_role)
SP1.append({
'name': old_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/sp2revoke/<entity_id>', methods=['PUT'])
def single_sp2_revoke(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('name')
remove_entity_sp2(entity_id)
# Update blockchain entries
old_role = post_data.get('oldRole')
new_role = ''
revoke_role_util(entity_id, new_role, old_name, old_role)
SP2.append({
'name': old_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/sp3revoke/<entity_id>', methods=['PUT'])
def single_sp3_revoke(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('name')
remove_entity_sp3(entity_id)
# Update blockchain entries
old_role = post_data.get('oldRole')
new_role = ''
revoke_role_util(entity_id, new_role, old_name, old_role)
SP3.append({
'name': old_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/sp4revoke/<entity_id>', methods=['PUT'])
def single_sp4_revoke(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('name')
remove_entity_sp4(entity_id)
# Update blockchain entries
old_role = post_data.get('oldRole')
new_role = ''
revoke_role_util(entity_id, new_role, old_name, old_role)
SP4.append({
'name': old_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/sp5revoke/<entity_id>', methods=['PUT'])
def single_sp5_revoke(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('name')
remove_entity_sp5(entity_id)
# Update blockchain entries
old_role = post_data.get('oldRole')
new_role = ''
revoke_role_util(entity_id, new_role, old_name, old_role)
SP5.append({
'name': old_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/derownersrevoke/<entity_id>', methods=['PUT'])
def single_derownersrevoke(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('name')
remove_entity_derowners(entity_id)
# Update blockchain entries
device = post_data.get('oldRole')
new_role = ''
revoke_role_derowners(entity_id, new_role, old_name, device)
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/secadminsrevoke/<entity_id>', methods=['PUT'])
def single_secadminsrevoke(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('name')
remove_entity_secadmins(entity_id)
# Update blockchain entries
device = post_data.get('oldRole')
new_role = ''
revoke_role_secadmins(entity_id, new_role, old_name, device)
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/secauditorsrevoke/<entity_id>', methods=['PUT'])
def single_secauditorsrevoke(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('name')
remove_entity_secauditors(entity_id)
# Update blockchain entries
device = post_data.get('oldRole')
new_role = ''
revoke_role_secauditors(entity_id, new_role, old_name, device)
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/utilities2/<entity_id>', methods=['PUT'])
def single_entity2(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('oldName')
old_role = post_data.get('oldRole')
remove_entity_util2(entity_id)
# Update blockchain entries
new_name = post_data.get('name')
new_role = post_data.get('role')
update_profile_util2(new_name, entity_id, new_role, old_name, old_role)
UTILITY2.append({
'name': new_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/utilities3/<entity_id>', methods=['PUT'])
def single_entity3(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('oldName')
old_role = post_data.get('oldRole')
remove_entity_util3(entity_id)
# Update blockchain entries
new_name = post_data.get('name')
new_role = post_data.get('role')
update_profile_util3(new_name, entity_id, new_role, old_name, old_role)
UTILITY3.append({
'name': new_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/utilities4/<entity_id>', methods=['PUT'])
def single_entity4(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('oldName')
old_role = post_data.get('oldRole')
remove_entity_util4(entity_id)
# Update blockchain entries
new_name = post_data.get('name')
new_role = post_data.get('role')
update_profile_util4(new_name, entity_id, new_role, old_name, old_role)
UTILITY4.append({
'name': new_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/utilities5/<entity_id>', methods=['PUT'])
def single_entity5(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('oldName')
old_role = post_data.get('oldRole')
remove_entity_util5(entity_id)
# Update blockchain entries
new_name = post_data.get('name')
new_role = post_data.get('role')
update_profile_util5(new_name, entity_id, new_role, old_name, old_role)
UTILITY5.append({
'name': new_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/sp1/<entity_id>', methods=['PUT'])
def single_sp1(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('oldName')
old_role = post_data.get('oldRole')
remove_entity_sp1(entity_id)
# Update blockchain entries
new_name = post_data.get('name')
new_role = post_data.get('role')
update_profile_sp1(new_name, entity_id, new_role, old_name, old_role)
SP1.append({
'name': new_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/sp2/<entity_id>', methods=['PUT'])
def single_sp2(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('oldName')
old_role = post_data.get('oldRole')
remove_entity_sp2(entity_id)
# Update blockchain entries
new_name = post_data.get('name')
new_role = post_data.get('role')
update_profile_sp2(new_name, entity_id, new_role, old_name, old_role)
SP2.append({
'name': new_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/sp3/<entity_id>', methods=['PUT'])
def single_sp3(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('oldName')
old_role = post_data.get('oldRole')
remove_entity_sp3(entity_id)
# Update blockchain entries
new_name = post_data.get('name')
new_role = post_data.get('role')
update_profile_sp3(new_name, entity_id, new_role, old_name, old_role)
SP3.append({
'name': new_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/sp4/<entity_id>', methods=['PUT'])
def single_sp4(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('oldName')
old_role = post_data.get('oldRole')
remove_entity_sp4(entity_id)
# Update blockchain entries
new_name = post_data.get('name')
new_role = post_data.get('role')
update_profile_sp4(new_name, entity_id, new_role, old_name, old_role)
SP4.append({
'name': new_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/sp5/<entity_id>', methods=['PUT'])
def single_sp5(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('oldName')
old_role = post_data.get('oldRole')
remove_entity_sp5(entity_id)
# Update blockchain entries
new_name = post_data.get('name')
new_role = post_data.get('role')
update_profile_sp5(new_name, entity_id, new_role, old_name, old_role)
SP5.append({
'name': new_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/derowners/<entity_id>', methods=['PUT'])
def single_derowners(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('oldName')
old_role = post_data.get('oldRole')
remove_entity_derowners(entity_id)
# Update blockchain entries
new_name = post_data.get('name')
new_role = post_data.get('role')
update_profile_derowners(new_name, entity_id, new_role, old_name, old_role)
DEROWNERS.append({
'name': new_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/secadmins/<entity_id>', methods=['PUT'])
def single_secadmins(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('oldName')
old_role = post_data.get('oldRole')
remove_entity_secadmins(entity_id)
# Update blockchain entries
new_name = post_data.get('name')
new_role = post_data.get('role')
update_profile_secadmins(new_name, entity_id, new_role, old_name, old_role)
SECADMINS.append({
'name': new_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/secauditors/<entity_id>', methods=['PUT'])
def single_secauditors(entity_id):
response_object = {'status': 'success'}
if request.method == 'PUT':
post_data = request.get_json()
old_name = post_data.get('oldName')
old_role = post_data.get('oldRole')
remove_entity_secauditors(entity_id)
# Update blockchain entries
new_name = post_data.get('name')
new_role = post_data.get('role')
update_profile_secauditors(new_name, entity_id, new_role, old_name, old_role)
SECAUDITORS.append({
'name': new_name,
'address': entity_id,
'role': new_role
})
response_object['message'] = 'Entity is updated!'
return jsonify(response_object)
@app.route('/get_admin_address', methods=['GET'])
def get_admin_address():
return jsonify(blockchain.web3.eth.defaultAccount)
@app.route('/get_admin_balance',methods=['GET'])
def get_admin_balance():
return jsonify(str(blockchain.web3.fromWei(blockchain.web3.eth.getBalance(blockchain.web3.eth.defaultAccount),'ether')))
@app.route('/get_sc_address',methods=['GET'])
def get_sc_address():
return jsonify(blockchain.contract_address)
@app.route('/get_blocks',methods=['GET'])
def get_blocks():
block = blockchain.web3.eth.get_block('latest')
block_number = block['number']
return jsonify(str(block_number + 1))
@app.route('/get_entities',methods=['GET'])
def get_entities():
return jsonify(str(blockchain.blockchain_accounts))
class Blockchain_Connector:
def __init__(self, net_id, net_ip, port):
self.blockchain_network_id = net_id
self.net_ip = net_ip
self.port = port
self.blockchain_accounts = rbac_generation.total_accounts + 1
self.pro = subprocess.Popen(['./accounts_creator.sh %s %s %s %s' %(self.blockchain_network_id, self.blockchain_accounts, self.port, '.ganache/data'+str(net_id))], shell = True)
t = 60
time.sleep(t)
self.blockchain_address = 'http://'+ self.net_ip + ':' + self.port
self.web3 = Web3(HTTPProvider(self.blockchain_address))
print('Connection with Private Ethereum Blockchain:' + str(self.web3.isConnected()))
self.web3.eth.defaultAccount = self.web3.eth.accounts[0]
print('RBAC Administrator Address:'+str(self.web3.eth.defaultAccount))
# subprocess.Popen(['./RBAC_sc_migrator.sh'], shell=True)
# t = 270 # Sleep for 30 seconds until the subprocess has began
# time.sleep(t)
self.compiled_contract_path = '/home/george/Desktop/Sandia_RBAC/Decentralized_Ethereum/build/contracts/RBAC.json'
# Contract Address - Get that from Json File after Smart Contract Migration [Automatic]
with open(self.compiled_contract_path) as file:
self.contract_json = json.load(file) # Load Contract Information as JSON file
self.contract_abi = self.contract_json['abi'] # Fetch RBAC smart contract's abi - Call its functions
self.contract_address = self.contract_json['networks'][str(self.blockchain_network_id)]['address']
# Fetch deployed RBAC smart contract instance
self.contract = self.web3.eth.contract(address = self.contract_address, abi = self.contract_abi)
print('Deployed RBAC Contract Address:' + str(self.contract_address))
print('************************ Connection with Blockchain is done ******************************')
if __name__ == '__main__':
# Sleep in order the RBAC Model to be generated
starting = time.time()
t = 30
time.sleep(t)
print('Starting app.py ...')
net_id = sys.argv[1]
#net_id = 1200
blockchain = Blockchain_Connector(net_id, '127.0.0.1', '9545')
t = 5
time.sleep(t)
print('Connection is done with Contract Address:'+str(blockchain.contract_address))
# ---------------- Utility 1 - JSON Creation ------------------ #
UTILITY1 = []
for (entity,value) in rbac_generation.rbac['Utility 1'].items():
if entity == 'DER':
for der in value:
item = {
'name': der,
'address': find_Address(der),
'role': ''
}
UTILITY1.append(item)
else:
item = {
'name': entity,
'address': find_Address(entity),
'role': value
}
UTILITY1.append(item)
# ---------------- Utility 2 - JSON Creation ------------------ #
UTILITY2 = []
for (entity,value) in rbac_generation.rbac['Utility 2'].items():
if entity == 'DER':
for der in value:
item = {
'name': der,
'address': find_Address(der),
'role': ''
}
UTILITY2.append(item)
else:
item = {
'name': entity,
'address': find_Address(entity),
'role': value
}
UTILITY2.append(item)
# ---------------- Utility 3 - JSON Creation ------------------ #
UTILITY3 = []
for (entity,value) in rbac_generation.rbac['Utility 3'].items():
if entity == 'DER':
for der in value:
item = {
'name': der,
'address': find_Address(der),
'role': ''
}
UTILITY3.append(item)
else:
item = {
'name': entity,
'address': find_Address(entity),
'role': value
}
UTILITY3.append(item)
# ---------------- Utility 4 - JSON Creation ------------------ #
UTILITY4 = []
for (entity,value) in rbac_generation.rbac['Utility 4'].items():
if entity == 'DER':
for der in value:
item = {
'name': der,
'address': find_Address(der),
'role': ''
}
UTILITY4.append(item)
else:
item = {
'name': entity,
'address': find_Address(entity),
'role': value
}
UTILITY4.append(item)
# ---------------- Utility 5 - JSON Creation ------------------ #
UTILITY5 = []
for (entity,value) in rbac_generation.rbac['Utility 5'].items():
if entity == 'DER':
for der in value:
item = {
'name': der,
'address': find_Address(der),
'role': ''
}
UTILITY5.append(item)
else:
item = {
'name': entity,
'address': find_Address(entity),
'role': value
}
UTILITY5.append(item)
# ---------------- SP 1 - JSON Creation ------------------ #
SP1 = []
for (entity,value) in rbac_generation.rbac['Service Provider 1'].items():
if entity == 'DER':
for der in value:
item = {
'name': der,
'address': find_Address(der),
'role': ''
}
SP1.append(item)
else:
item = {
'name': entity,
'address': find_Address(entity),
'role': value
}
SP1.append(item)
# ---------------- SP 2 - JSON Creation ------------------ #
SP2 = []
for (entity,value) in rbac_generation.rbac['Service Provider 2'].items():
if entity == 'DER':
for der in value:
item = {
'name': der,
'address': find_Address(der),
'role': ''
}
SP2.append(item)
else:
item = {
'name': entity,
'address': find_Address(entity),
'role': value
}
SP2.append(item)
# ---------------- SP 3 - JSON Creation ------------------ #
SP3 = []
for (entity,value) in rbac_generation.rbac['Service Provider 3'].items():
if entity == 'DER':
for der in value:
item = {
'name': der,
'address': find_Address(der),
'role': ''
}
SP3.append(item)
else:
item = {
'name': entity,
'address': find_Address(entity),
'role': value
}
SP3.append(item)
# ---------------- SP 4 - JSON Creation ------------------ #
SP4 = []
for (entity,value) in rbac_generation.rbac['Service Provider 4'].items():
if entity == 'DER':
for der in value:
item = {
'name': der,
'address': find_Address(der),
'role': ''
}
SP4.append(item)
else:
item = {
'name': entity,
'address': find_Address(entity),
'role': value
}
SP4.append(item)
# ---------------- SP 5 - JSON Creation ------------------ #
SP5 = []
for (entity,value) in rbac_generation.rbac['Service Provider 5'].items():
if entity == 'DER':
for der in value:
item = {
'name': der,
'address': find_Address(der),
'role': ''
}
SP5.append(item)
else:
item = {
'name': entity,
'address': find_Address(entity),
'role': value
}
SP5.append(item)
# ---------------- SP 5 - JSON Creation ------------------ #
DEROWNERS = []
for (entity,value) in rbac_generation.rbac['DER Device'].items():
item = {
'name': entity,
'address': find_Address(entity),
'role': value
}
DEROWNERS.append(item)
# ---------------- SP 5 - JSON Creation ------------------ #
SECADMINS = []
for (entity,value) in rbac_generation.rbac['Security Administrator'].items():
item = {
'name': entity,
'address': find_Address(entity),
'role': value
}
SECADMINS.append(item)
SECAUDITORS = []
for (entity,value) in rbac_generation.rbac['Security Auditor'].items():
item = {
'name': entity,
'address': find_Address(entity),
'role': value
}
SECAUDITORS.append(item)
ending = time.time()
print('Server Initiation Time is: '+ str(ending-starting))
app.run(threaded=True)
| 41.496331
| 184
| 0.654165
| 10,540
| 96,147
| 5.733776
| 0.033966
| 0.034401
| 0.063143
| 0.052024
| 0.877271
| 0.856472
| 0.836317
| 0.795248
| 0.779495
| 0.759109
| 0
| 0.012166
| 0.234037
| 96,147
| 2,316
| 185
| 41.514249
| 0.808446
| 0.059409
| 0
| 0.716867
| 0
| 0
| 0.094474
| 0.011779
| 0
| 0
| 0.003258
| 0
| 0
| 1
| 0.052026
| false
| 0.000548
| 0.006572
| 0.002191
| 0.11391
| 0.072289
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a5aa33b19e20a2278dbe35c771b96c6dfa4d2875
| 20,996
|
py
|
Python
|
cone_fitting.py
|
dartmouthrobotics/escalibr
|
10eb5755a1b971374d33c585b616680809e61476
|
[
"MIT"
] | 2
|
2021-03-31T17:41:30.000Z
|
2021-08-08T23:33:53.000Z
|
cone_fitting.py
|
dartmouthrobotics/escalibr
|
10eb5755a1b971374d33c585b616680809e61476
|
[
"MIT"
] | null | null | null |
cone_fitting.py
|
dartmouthrobotics/escalibr
|
10eb5755a1b971374d33c585b616680809e61476
|
[
"MIT"
] | 1
|
2019-08-31T03:59:32.000Z
|
2019-08-31T03:59:32.000Z
|
#!/usr/bin/env python
import xml.etree.ElementTree as ET
import numpy as np
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from skimage.measure import LineModelND, ransac
import math
tree = ET.parse('output_6_all_points.xml')
root = tree.getroot()
confidence_level = 100
low_confidence_level = 30
xs = []
ys = []
zs = []
xyz = []
low_perc = []
dist = []
for child in root:
if float(child.get('confidence')) >= confidence_level:
xs.append(child.get('x'))
ys.append(child.get('y'))
zs.append(child.get('z'))
xyz.append([float(child.get('x')), float(child.get('y')), float(child.get('z'))])
dist.append(child.get('depth'))
elif float(child.get('confidence')) < low_confidence_level:
low_perc.append([float(child.get('x')), float(child.get('y')), float(child.get('z'))])
tree = ET.parse('output_7_all_points.xml')
root = tree.getroot()
for child in root:
if float(child.get('confidence')) >= confidence_level:
xs.append(child.get('x'))
ys.append(child.get('y'))
zs.append(child.get('z'))
xyz.append([float(child.get('x')), float(child.get('y')), float(child.get('z'))])
dist.append(child.get('depth'))
elif float(child.get('confidence')) < low_confidence_level:
low_perc.append([float(child.get('x')), float(child.get('y')), float(child.get('z'))])
tree = ET.parse('output_8_all_points.xml')
root = tree.getroot()
for child in root:
if float(child.get('confidence')) >= confidence_level:
xs.append(child.get('x'))
ys.append(child.get('y'))
zs.append(child.get('z'))
xyz.append([float(child.get('x')), float(child.get('y')), float(child.get('z'))])
dist.append(child.get('depth'))
elif float(child.get('confidence')) < low_confidence_level:
low_perc.append([float(child.get('x')), float(child.get('y')), float(child.get('z'))])
tree = ET.parse('output_9_all_points.xml')
root = tree.getroot()
for child in root:
if float(child.get('confidence')) >= confidence_level:
xs.append(child.get('x'))
ys.append(child.get('y'))
zs.append(child.get('z'))
xyz.append([float(child.get('x')), float(child.get('y')), float(child.get('z'))])
dist.append(child.get('depth'))
elif float(child.get('confidence')) < low_confidence_level:
low_perc.append([float(child.get('x')), float(child.get('y')), float(child.get('z'))])
tree = ET.parse('output_10_all_points.xml')
root = tree.getroot()
for child in root:
if float(child.get('confidence')) >= confidence_level:
xs.append(child.get('x'))
ys.append(child.get('y'))
zs.append(child.get('z'))
xyz.append([float(child.get('x')), float(child.get('y')), float(child.get('z'))])
dist.append(child.get('depth'))
elif float(child.get('confidence')) < low_confidence_level:
low_perc.append([float(child.get('x')), float(child.get('y')), float(child.get('z'))])
xs = np.array(xs).astype(float)
ys = np.array(ys).astype(float)
zs = np.array(zs).astype(float)
xyz = np.array(xyz)
low_perc = np.array(low_perc)
dist = np.array(dist).astype(float)
print(xyz)
# model_robust, inliers = ransac(xyz, LineModelND, min_samples=1000,
# residual_threshold=0.01, max_trials=5000)
# outliers = inliers == False
#
# print(model_robust.params)
#
# x_ransac = np.array([model_robust.params[0][0], model_robust.params[1][0]])
# y_ransac = np.array([model_robust.params[0][1], model_robust.params[1][1]])
# z_ransac = np.array([model_robust.params[0][2], model_robust.params[1][2]])
vertex = np.array([-0.14, 0.06, 0.05])
for x in range(2):
move_to = np.array([0.0, 0.0, 0.0])
for i in range(xs.size):
data_vect = xyz[i] - vertex
cur_dist = np.linalg.norm(data_vect)
unit_data_vect = data_vect / cur_dist
if (cur_dist > dist[i]):
move_to = move_to + (cur_dist - dist[i]) * unit_data_vect
elif (cur_dist < dist[i]):
move_to = move_to - (dist[i] - cur_dist) * unit_data_vect
print("move to")
print(move_to)
move_to = move_to / xs.size
print("move to")
print(move_to)
dist_to_new = np.linalg.norm(move_to)
print(dist_to_new)
vertex = vertex + move_to
print("ECHO SOUNDER POSITION:")
print(vertex)
axis = np.array([0.0, 0.0, 0.0])
for i in range(xs.size):
unit_data_vect = (xyz[i] - vertex) / np.linalg.norm(xyz[i] - vertex)
axis = axis + unit_data_vect
axis = axis / xs.size
print("current axis:")
print(axis)
points_outside = True
angle = 0.261799
while points_outside:
points_outside = False
for i in xyz:
cone_dist = np.dot(i - vertex, axis)
# print("cone distance")
# print(cone_dist)
# print("point distance")
# print(np.linalg.norm(i-vertex))
cone_radius = cone_dist * math.tan(angle)
# print("cone radius")
# print(cone_radius)
orth_vect = np.dot(i - vertex, axis) / np.dot(axis, axis)
orth_vect = orth_vect * axis + vertex - i
orth_dist = np.linalg.norm(orth_vect)
# print("orth distance")
# print(orth_dist)
if orth_dist > cone_radius:
angle = angle + 0.01
points_outside = True
print("angle")
print(angle)
points_inside = True
while points_inside:
points_inside = False
for i in low_perc:
cone_dist = np.dot(i - vertex, axis)
# print("cone distance")
# print(cone_dist)
cone_radius = cone_dist * math.tan(angle)
# print("cone radius")
# print(cone_radius)
orth_vect = np.dot(i - vertex, axis) / np.dot(axis, axis)
orth_vect = orth_vect * axis + vertex - i
orth_dist = np.linalg.norm(orth_vect)
# print("orth distance")
# print(orth_dist)
if orth_dist < cone_radius:
angle = angle - 0.01
points_inside = True
print(low_perc)
print("angle")
print(angle)
old_axis = axis
axis = old_axis + np.array([old_axis[0] + 0.1, old_axis[1], old_axis[2]])
axis = axis / np.linalg.norm(axis)
print("new axis 1")
print(axis)
points_outside = True
angle = 0.261799
while points_outside:
points_outside = False
for i in xyz:
cone_dist = np.dot(i - vertex, axis)
# print("cone distance")
# print(cone_dist)
cone_radius = cone_dist * math.tan(angle)
# print("cone radius")
# print(cone_radius)
orth_vect = np.dot(i - vertex, axis) / np.dot(axis, axis)
orth_vect = orth_vect * axis + vertex - i
orth_dist = np.linalg.norm(orth_vect)
# print("orth distance")
# print(orth_dist)
if orth_dist > cone_radius:
angle = angle + 0.01
points_outside = True
print("angle 1")
print(angle)
axis = old_axis + np.array([old_axis[0] - 0.1, old_axis[1], old_axis[2]])
axis = axis / np.linalg.norm(axis)
print("new axis 2")
print(axis)
points_outside = True
angle = 0.261799
while points_outside:
points_outside = False
for i in xyz:
cone_dist = np.dot(i - vertex, axis)
# print("cone distance")
# print(cone_dist)
cone_radius = cone_dist * math.tan(angle)
# print("cone radius")
# print(cone_radius)
orth_vect = np.dot(i - vertex, axis) / np.dot(axis, axis)
orth_vect = orth_vect * axis + vertex - i
orth_dist = np.linalg.norm(orth_vect)
# print("orth distance")
# print(orth_dist)
if orth_dist > cone_radius:
angle = angle + 0.01
points_outside = True
print("angle 2")
print(angle)
#
# axis = old_axis + np.array([old_axis[0], old_axis[1] + 0.1, old_axis[2]])
# axis = axis / np.linalg.norm(axis)
#
# print("new axis 3")
# print(axis)
#
# points_outside = True
# angle = 0.261799
# while points_outside:
# points_outside = False
# for i in xyz:
# cone_dist = np.dot(i - vertex, axis)
# # print("cone distance")
# # print(cone_dist)
# cone_radius = cone_dist * math.tan(angle)
# # print("cone radius")
# # print(cone_radius)
# orth_vect = np.dot(i - vertex, axis) / np.dot(axis, axis)
# orth_vect = orth_vect * axis + vertex - i
# orth_dist = np.linalg.norm(orth_vect)
# # print("orth distance")
# # print(orth_dist)
# if orth_dist > cone_radius:
# angle = angle + 0.01
# points_outside = True
#
# print("angle 3")
# print(angle)
#
#
#
#
# axis = old_axis + np.array([old_axis[0], old_axis[1] - 0.1, old_axis[2]])
# axis = axis / np.linalg.norm(axis)
#
# print("new axis 4")
# print(axis)
#
# points_outside = True
# angle = 0.261799
# while points_outside:
# points_outside = False
# for i in xyz:
# cone_dist = np.dot(i - vertex, axis)
# # print("cone distance")
# # print(cone_dist)
# cone_radius = cone_dist * math.tan(angle)
# # print("cone radius")
# # print(cone_radius)
# orth_vect = np.dot(i - vertex, axis) / np.dot(axis, axis)
# orth_vect = orth_vect * axis + vertex - i
# orth_dist = np.linalg.norm(orth_vect)
# # print("orth distance")
# # print(orth_dist)
# if orth_dist > cone_radius:
# angle = angle + 0.01
# points_outside = True
#
# print("angle 4")
# print(angle)
# KEEPING ANGLE 2
print("KEEPING ANGLE 2")
old_axis = axis
axis = old_axis + np.array([old_axis[0] + 0.1, old_axis[1], old_axis[2]])
axis = axis / np.linalg.norm(axis)
print("new axis 1")
print(axis)
points_outside = True
angle = 0.261799
while points_outside:
points_outside = False
for i in xyz:
cone_dist = np.dot(i - vertex, axis)
# print("cone distance")
# print(cone_dist)
cone_radius = cone_dist * math.tan(angle)
# print("cone radius")
# print(cone_radius)
orth_vect = np.dot(i - vertex, axis) / np.dot(axis, axis)
orth_vect = orth_vect * axis + vertex - i
orth_dist = np.linalg.norm(orth_vect)
# print("orth distance")
# print(orth_dist)
if orth_dist > cone_radius:
angle = angle + 0.01
points_outside = True
print("angle 1")
print(angle)
axis = old_axis + np.array([old_axis[0] - 0.1, old_axis[1], old_axis[2]])
axis = axis / np.linalg.norm(axis)
print("new axis 2")
print(axis)
points_outside = True
angle = 0.261799
while points_outside:
points_outside = False
for i in xyz:
cone_dist = np.dot(i - vertex, axis)
# print("cone distance")
# print(cone_dist)
cone_radius = cone_dist * math.tan(angle)
# print("cone radius")
# print(cone_radius)
orth_vect = np.dot(i - vertex, axis) / np.dot(axis, axis)
orth_vect = orth_vect * axis + vertex - i
orth_dist = np.linalg.norm(orth_vect)
# print("orth distance")
# print(orth_dist)
if orth_dist > cone_radius:
angle = angle + 0.01
points_outside = True
print("angle 2")
print(angle)
# print(axis)
axis = old_axis + np.array([old_axis[0], old_axis[1] + 0.1, old_axis[2]])
# print(axis)
axis = axis / np.linalg.norm(axis)
print("new axis 3")
print(axis)
points_outside = True
angle = 0.261799
while points_outside:
points_outside = False
for i in xyz:
cone_dist = np.dot(i - vertex, axis)
# print("cone distance")
# print(cone_dist)
cone_radius = cone_dist * math.tan(angle)
# print("cone radius")
# print(cone_radius)
orth_vect = np.dot(i - vertex, axis) / np.dot(axis, axis)
orth_vect = orth_vect * axis + vertex - i
orth_dist = np.linalg.norm(orth_vect)
# print("orth distance")
# print(orth_dist)
if orth_dist > cone_radius:
angle = angle + 0.01
points_outside = True
print("angle 3")
print(angle)
# axis = old_axis + np.array([old_axis[0], old_axis[1] - 0.1, old_axis[2]])
# axis = axis / np.linalg.norm(axis)
#
# print("new axis 4")
# print(axis)
#
# points_outside = True
# angle = 0.261799
# while points_outside:
# points_outside = False
# for i in xyz:
# cone_dist = np.dot(i - vertex, axis)
# # print("cone distance")
# # print(cone_dist)
# cone_radius = cone_dist * math.tan(angle)
# # print("cone radius")
# # print(cone_radius)
# orth_vect = np.dot(i - vertex, axis) / np.dot(axis, axis)
# orth_vect = orth_vect * axis + vertex - i
# orth_dist = np.linalg.norm(orth_vect)
# # print("orth distance")
# # print(orth_dist)
# if orth_dist > cone_radius:
# angle = angle + 0.01
# points_outside = True
#
# print("angle 4")
# print(angle)
#
#
#
#
# axis = old_axis + np.array([old_axis[0] + 0.1, old_axis[1] + 0.1, old_axis[2]])
# axis = axis / np.linalg.norm(axis)
#
# print("new axis 5")
# print(axis)
#
# points_outside = True
# angle = 0.261799
# while points_outside:
# points_outside = False
# for i in xyz:
# cone_dist = np.dot(i - vertex, axis)
# # print("cone distance")
# # print(cone_dist)
# cone_radius = cone_dist * math.tan(angle)
# # print("cone radius")
# # print(cone_radius)
# orth_vect = np.dot(i - vertex, axis) / np.dot(axis, axis)
# orth_vect = orth_vect * axis + vertex - i
# orth_dist = np.linalg.norm(orth_vect)
# # print("orth distance")
# # print(orth_dist)
# if orth_dist > cone_radius:
# angle = angle + 0.01
# points_outside = True
#
# print("angle 5")
# print(angle)
#
#
#
#
# axis = old_axis + np.array([old_axis[0] + 0.1, old_axis[1] - 0.1, old_axis[2]])
# axis = axis / np.linalg.norm(axis)
#
# print("new axis 6")
# print(axis)
#
# points_outside = True
# angle = 0.261799
# while points_outside:
# points_outside = False
# for i in xyz:
# cone_dist = np.dot(i - vertex, axis)
# # print("cone distance")
# # print(cone_dist)
# cone_radius = cone_dist * math.tan(angle)
# # print("cone radius")
# # print(cone_radius)
# orth_vect = np.dot(i - vertex, axis) / np.dot(axis, axis)
# orth_vect = orth_vect * axis + vertex - i
# orth_dist = np.linalg.norm(orth_vect)
# # print("orth distance")
# # print(orth_dist)
# if orth_dist > cone_radius:
# angle = angle + 0.01
# points_outside = True
#
# print("angle 6")
# print(angle)
#
#
#
# axis = old_axis + np.array([old_axis[0] - 0.1, old_axis[1] + 0.1, old_axis[2]])
# axis = axis / np.linalg.norm(axis)
#
# print("new axis 7")
# print(axis)
#
# points_outside = True
# angle = 0.261799
# while points_outside:
# points_outside = False
# for i in xyz:
# cone_dist = np.dot(i - vertex, axis)
# # print("cone distance")
# # print(cone_dist)
# cone_radius = cone_dist * math.tan(angle)
# # print("cone radius")
# # print(cone_radius)
# orth_vect = np.dot(i - vertex, axis) / np.dot(axis, axis)
# orth_vect = orth_vect * axis + vertex - i
# orth_dist = np.linalg.norm(orth_vect)
# # print("orth distance")
# # print(orth_dist)
# if orth_dist > cone_radius:
# angle = angle + 0.01
# points_outside = True
#
# print("angle 7")
# print(angle)
#
#
#
# axis = old_axis + np.array([old_axis[0] - 0.1, old_axis[1] - 0.1, old_axis[2]])
# axis = axis / np.linalg.norm(axis)
#
# print("new axis 8")
# print(axis)
#
# points_outside = True
# angle = 0.261799
# while points_outside:
# points_outside = False
# for i in xyz:
# cone_dist = np.dot(i - vertex, axis)
# # print("cone distance")
# # print(cone_dist)
# cone_radius = cone_dist * math.tan(angle)
# # print("cone radius")
# # print(cone_radius)
# orth_vect = np.dot(i - vertex, axis) / np.dot(axis, axis)
# orth_vect = orth_vect * axis + vertex - i
# orth_dist = np.linalg.norm(orth_vect)
# # print("orth distance")
# # print(orth_dist)
# if orth_dist > cone_radius:
# angle = angle + 0.01
# points_outside = True
#
# print("angle 8")
# print(angle)
# KEEPING ANGLE 3
print("KEEPING ANGLE 3")
old_axis = axis
axis = old_axis + np.array([old_axis[0] + 0.1, old_axis[1], old_axis[2]])
axis = axis / np.linalg.norm(axis)
print("new axis 1")
print(axis)
points_outside = True
angle = 0.261799
while points_outside:
points_outside = False
for i in xyz:
cone_dist = np.dot(i - vertex, axis)
# print("cone distance")
# print(cone_dist)
cone_radius = cone_dist * math.tan(angle)
# print("cone radius")
# print(cone_radius)
orth_vect = np.dot(i - vertex, axis) / np.dot(axis, axis)
orth_vect = orth_vect * axis + vertex - i
orth_dist = np.linalg.norm(orth_vect)
# print("orth distance")
# print(orth_dist)
if orth_dist > cone_radius:
angle = angle + 0.01
points_outside = True
print("angle 1")
print(angle)
axis = old_axis + np.array([old_axis[0] - 0.1, old_axis[1], old_axis[2]])
axis = axis / np.linalg.norm(axis)
print("new axis 2")
print(axis)
points_outside = True
angle = 0.261799
while points_outside:
points_outside = False
for i in xyz:
cone_dist = np.dot(i - vertex, axis)
# print("cone distance")
# print(cone_dist)
cone_radius = cone_dist * math.tan(angle)
# print("cone radius")
# print(cone_radius)
orth_vect = np.dot(i - vertex, axis) / np.dot(axis, axis)
orth_vect = orth_vect * axis + vertex - i
orth_dist = np.linalg.norm(orth_vect)
# print("orth distance")
# print(orth_dist)
if orth_dist > cone_radius:
angle = angle + 0.01
points_outside = True
print("angle 2")
print(angle)
axis = old_axis + np.array([old_axis[0], old_axis[1] + 0.1, old_axis[2]])
axis = axis / np.linalg.norm(axis)
print("new axis 3")
print(axis)
points_outside = True
angle = 0.261799
while points_outside:
points_outside = False
for i in xyz:
cone_dist = np.dot(i - vertex, axis)
# print("cone distance")
# print(cone_dist)
cone_radius = cone_dist * math.tan(angle)
# print("cone radius")
# print(cone_radius)
orth_vect = np.dot(i - vertex, axis) / np.dot(axis, axis)
orth_vect = orth_vect * axis + vertex - i
orth_dist = np.linalg.norm(orth_vect)
# print("orth distance")
# print(orth_dist)
if orth_dist > cone_radius:
angle = angle + 0.01
points_outside = True
print("angle 3")
print(angle)
axis = old_axis + np.array([old_axis[0], old_axis[1] - 0.1, old_axis[2]])
axis = axis / np.linalg.norm(axis)
print("new axis 4")
print(axis)
points_outside = True
angle = 0.261799
while points_outside:
points_outside = False
for i in xyz:
cone_dist = np.dot(i - vertex, axis)
# print("cone distance")
# print(cone_dist)
cone_radius = cone_dist * math.tan(angle)
# print("cone radius")
# print(cone_radius)
orth_vect = np.dot(i - vertex, axis) / np.dot(axis, axis)
orth_vect = orth_vect * axis + vertex - i
orth_dist = np.linalg.norm(orth_vect)
# print("orth distance")
# print(orth_dist)
if orth_dist > cone_radius:
angle = angle + 0.01
points_outside = True
print("angle 4")
print(angle)
print("final axis")
print(axis)
points_inside = True
while points_inside:
points_inside = False
for i in low_perc:
cone_dist = np.dot(i - vertex, axis)
# print("cone distance")
# print(cone_dist)
cone_radius = cone_dist * math.tan(angle)
# print("cone radius")
# print(cone_radius)
orth_vect = np.dot(i - vertex, axis) / np.dot(axis, axis)
orth_vect = orth_vect * axis + vertex - i
orth_dist = np.linalg.norm(orth_vect)
# print("orth distance")
# print(orth_dist)
if orth_dist < cone_radius:
angle = angle - 0.01
points_inside = True
axis_line_x = [vertex[0], vertex[0] + 6 * axis[0]]
axis_line_y = [vertex[1], vertex[1] + 6 * axis[1]]
axis_line_z = [vertex[2], vertex[2] + 6 * axis[2]]
print("low conf angle")
print(angle)
print(xs.size)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(xs, ys, zs, c='b', marker='o')
ax.scatter(vertex[0], vertex[1], vertex[2], c='r', marker='x')
ax.plot(axis_line_x, axis_line_y, axis_line_z, c="g")
plt.title('Echo Sounder Data')
ax.set_xlabel('X')
ax.set_ylabel('Y')
ax.set_zlabel('Z')
ax.legend(loc='lower left')
plt.show()
| 27.883134
| 94
| 0.59478
| 3,012
| 20,996
| 3.980412
| 0.049469
| 0.057052
| 0.043373
| 0.038035
| 0.877221
| 0.874218
| 0.868296
| 0.860539
| 0.856535
| 0.856202
| 0
| 0.024165
| 0.256954
| 20,996
| 752
| 95
| 27.920213
| 0.744311
| 0.366546
| 0
| 0.794721
| 0
| 0
| 0.045465
| 0.008954
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.017595
| 0
| 0.017595
| 0.170088
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3c1e0206f8756a4ca3e243fc76c329c9aacff075
| 14,321
|
py
|
Python
|
pmutt/tests/empirical/test_pmutt_empirical_Nasa.py
|
wittregr/pMuTT
|
1678fd3d3a10d8ef5389c02970a7ebaa92fc7344
|
[
"MIT"
] | 28
|
2018-10-29T17:44:30.000Z
|
2022-03-23T14:20:16.000Z
|
pmutt/tests/empirical/test_pmutt_empirical_Nasa.py
|
wittregr/pMuTT
|
1678fd3d3a10d8ef5389c02970a7ebaa92fc7344
|
[
"MIT"
] | 101
|
2018-10-18T19:49:30.000Z
|
2022-01-19T10:59:57.000Z
|
pmutt/tests/empirical/test_pmutt_empirical_Nasa.py
|
wittregr/pMuTT
|
1678fd3d3a10d8ef5389c02970a7ebaa92fc7344
|
[
"MIT"
] | 16
|
2018-12-15T17:01:21.000Z
|
2022-01-03T17:42:23.000Z
|
import unittest
import numpy as np
from ase.build import molecule
from pmutt import constants as c
from pmutt import get_molecular_weight
from pmutt.statmech import StatMech, trans, rot, vib, elec
from pmutt.empirical.nasa import Nasa
class TestNasa(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
self.Nasa_direct = Nasa(name='H2O',
elements={
'H': 2,
'O': 1
},
phase='g',
a_low=np.array([
4.04618796E+00, -6.87238823E-04,
2.79722240E-06, -1.42318006E-09,
2.34551159E-13, -3.02826236E+04,
-2.50036531E-01
]),
a_high=np.array([
2.41854323E+00, 3.35448922E-03,
-9.66398101E-07, 1.34441829E-10,
-7.18940063E-15, -2.97582484E+04,
8.37839787E+00
]),
T_low=100.,
T_mid=1610.97,
T_high=5000.)
self.Nasa_direct_dict = {
'class':
"<class 'pmutt.empirical.nasa.Nasa'>",
'name':
'H2O',
'elements': {
'H': 2,
'O': 1
},
'phase':
'g',
'a_low': [
4.04618796E+00, -6.87238823E-04, 2.79722240E-06,
-1.42318006E-09, 2.34551159E-13, -3.02826236E+04,
-2.50036531E-01
],
'a_high': [
2.41854323E+00, 3.35448922E-03, -9.66398101E-07,
1.34441829E-10, -7.18940063E-15, -2.97582484E+04,
8.37839787E+00
],
'T_low':
100.,
'T_mid':
1610.97,
'T_high':
5000.,
'notes':
None,
'model':
None,
'misc_models': [{
'class': "<class 'pmutt.empirical.GasPressureAdj'>"
}],
'cat_site':
None,
'n_sites':
None,
'smiles':
None,
'type':
'nasa'
}
self.Nasa_data = Nasa.from_data(
name='H2O',
elements={
'H': 2,
'O': 1
},
phase='g',
T=np.array([
500., 600., 700., 800., 900., 1000., 1100., 1200., 1300.,
1400., 1500., 1600., 1700., 1800., 1900., 2000., 2100., 2200
]),
CpoR=np.array([
4.238636088, 4.363835667, 4.503924733, 4.654023202,
4.809813915, 4.967542636, 5.124018051, 5.276611768,
5.423258319, 5.56245516, 5.693262665, 5.815304137, 5.928750505,
6.034087273, 6.131819121, 6.222433488, 6.306400563, 6.384173277
]),
T_ref=500.,
HoRT_ref=-56.49930957,
SoR_ref=24.84583501)
self.Nasa_statmech = Nasa.from_model(name='H2O',
elements={
'H': 2,
'O': 1
},
phase='g',
model=StatMech,
trans_model=trans.FreeTrans,
n_degrees=3,
vib_model=vib.HarmonicVib,
elec_model=elec.GroundStateElec,
rot_model=rot.RigidRotor,
potentialenergy=-14.2209,
atoms=molecule('H2O'),
symmetrynumber=2,
spin=0,
vib_wavenumbers=np.array(
[0.47462, 0.46033, 0.19633]),
T_low=100.,
T_mid=1610.97,
T_high=5000.)
self.mw = get_molecular_weight({'H': 2, 'O': 1}) # g/mol
def test_get_a(self):
np.testing.assert_array_equal(self.Nasa_direct.get_a(T=300.),
self.Nasa_direct.a_low)
np.testing.assert_array_equal(self.Nasa_direct.get_a(T=2000.),
self.Nasa_direct.a_high)
with self.assertWarns(RuntimeWarning):
np.testing.assert_array_equal(self.Nasa_direct.get_a(T=6000.),
self.Nasa_direct.a_high)
with self.assertWarns(RuntimeWarning):
np.testing.assert_array_equal(self.Nasa_direct.get_a(T=50.),
self.Nasa_direct.a_low)
def test_get_CpoR(self):
T = np.array([
500., 600., 700., 800., 900., 1000., 1100., 1200., 1300., 1400.,
1500., 1600., 1700., 1800., 1900., 2000., 2100., 2200
])
CpoR_expected = np.array([
4.238636088, 4.363835667, 4.503924733, 4.654023202, 4.809813915,
4.967542636, 5.124018051, 5.276611768, 5.423258319, 5.56245516,
5.693262665, 5.815304137, 5.928750505, 6.034087273, 6.131819121,
6.222433488, 6.306400563, 6.384173277
])
np.testing.assert_almost_equal(self.Nasa_direct.get_CpoR(T=T[0]),
CpoR_expected[0])
np.testing.assert_array_almost_equal(self.Nasa_direct.get_CpoR(T=T),
CpoR_expected)
def test_get_Cp(self):
T = np.array([
500., 600., 700., 800., 900., 1000., 1100., 1200., 1300., 1400.,
1500., 1600., 1700., 1800., 1900., 2000., 2100., 2200
])
Cp_expected = c.R('J/mol/K') *\
np.array([4.238636088, 4.363835667,
4.503924733, 4.654023202, 4.809813915, 4.967542636,
5.124018051, 5.276611768, 5.423258319, 5.56245516,
5.693262665, 5.815304137, 5.928750505, 6.034087273,
6.131819121, 6.222433488, 6.306400563, 6.384173277])
np.testing.assert_almost_equal(
self.Nasa_direct.get_Cp(T=T[0], units='J/mol/K'), Cp_expected[0])
np.testing.assert_array_almost_equal(
self.Nasa_direct.get_Cp(T=T, units='J/mol/K'), Cp_expected)
np.testing.assert_almost_equal(
self.Nasa_direct.get_Cp(T=T[0], units='J/g/K'),
Cp_expected[0] / self.mw)
np.testing.assert_array_almost_equal(
self.Nasa_direct.get_Cp(T=T, units='J/g/K'), Cp_expected / self.mw)
def test_get_HoRT(self):
T = np.array([
500., 600., 700., 800., 900., 1000., 1100., 1200., 1300., 1400.,
1500., 1600., 1700., 1800., 1900., 2000., 2100., 2200
])
HoRT_expected = np.array([
-56.49930957, -46.36612849, -39.10913137, -33.64819891,
-29.38377578, -25.95653237, -23.13812007, -20.77654898,
-18.76677584, -17.03389718, -15.52306522, -14.19318522,
-13.01283758, -11.95756475, -11.00803153, -10.14874498,
-9.367140366, -8.652916499
])
np.testing.assert_almost_equal(self.Nasa_direct.get_HoRT(T=T[0]),
HoRT_expected[0])
np.testing.assert_array_almost_equal(self.Nasa_direct.get_HoRT(T=T),
HoRT_expected)
def test_get_H(self):
T = np.array([
500., 600., 700., 800., 900., 1000., 1100., 1200., 1300., 1400.,
1500., 1600., 1700., 1800., 1900., 2000., 2100., 2200
])
H_expected = c.R('J/mol/K')*T *\
np.array([-56.49930957, -46.36612849, -39.10913137, -33.64819891,
-29.38377578, -25.95653237, -23.13812007, -20.77654898,
-18.76677584, -17.03389718, -15.52306522, -14.19318522,
-13.01283758, -11.95756475, -11.00803153, -10.14874498,
-9.367140366, -8.652916499])
np.testing.assert_almost_equal(self.Nasa_direct.get_H(T=T[0],
units='J/mol'),
H_expected[0],
decimal=4)
np.testing.assert_array_almost_equal(self.Nasa_direct.get_H(
T=T, units='J/mol'),
H_expected,
decimal=4)
np.testing.assert_almost_equal(self.Nasa_direct.get_H(T=T[0],
units='J/g'),
H_expected[0] / self.mw,
decimal=4)
np.testing.assert_array_almost_equal(self.Nasa_direct.get_H(
T=T, units='J/g'),
H_expected / self.mw,
decimal=4)
def test_get_SoR(self):
T = np.array([
500., 600., 700., 800., 900., 1000., 1100., 1200., 1300., 1400.,
1500., 1600., 1700., 1800., 1900., 2000., 2100., 2200
])
SoR_expected = np.array([
24.84583501, 25.62943045, 26.31248017, 26.92360771, 27.48073089,
27.99565652, 28.47647349, 28.92890014, 29.35709049, 29.76414079,
30.15242096, 30.52379873, 30.87979567, 31.22169282, 31.55059054,
31.86744523, 32.17309661, 32.46828858
])
np.testing.assert_almost_equal(self.Nasa_direct.get_SoR(T=T[0]),
SoR_expected[0])
np.testing.assert_array_almost_equal(self.Nasa_direct.get_SoR(T=T),
SoR_expected)
def test_get_S(self):
T = np.array([
500., 600., 700., 800., 900., 1000., 1100., 1200., 1300., 1400.,
1500., 1600., 1700., 1800., 1900., 2000., 2100., 2200
])
S_expected = c.R('J/mol/K') *\
np.array([24.84583501, 25.62943045, 26.31248017, 26.92360771,
27.48073089, 27.99565652, 28.47647349, 28.92890014,
29.35709049, 29.76414079, 30.15242096, 30.52379873,
30.87979567, 31.22169282, 31.55059054, 31.86744523,
32.17309661, 32.46828858])
np.testing.assert_almost_equal(
self.Nasa_direct.get_S(T=T[0], units='J/mol/K'), S_expected[0])
np.testing.assert_array_almost_equal(
self.Nasa_direct.get_S(T=T, units='J/mol/K'), S_expected)
np.testing.assert_almost_equal(
self.Nasa_direct.get_S(T=T[0], units='J/g/K'),
S_expected[0] / self.mw)
np.testing.assert_array_almost_equal(
self.Nasa_direct.get_S(T=T, units='J/g/K'), S_expected / self.mw)
def test_get_GoRT(self):
T = np.array([
500., 600., 700., 800., 900., 1000., 1100., 1200., 1300., 1400.,
1500., 1600., 1700., 1800., 1900., 2000., 2100., 2200
])
HoRT_expected = np.array([
-56.49930957, -46.36612849, -39.10913137, -33.64819891,
-29.38377578, -25.95653237, -23.13812007, -20.77654898,
-18.76677584, -17.03389718, -15.52306522, -14.19318522,
-13.01283758, -11.95756475, -11.00803153, -10.14874498,
-9.367140366, -8.652916499
])
SoR_expected = np.array([
24.84583501, 25.62943045, 26.31248017, 26.92360771, 27.48073089,
27.99565652, 28.47647349, 28.92890014, 29.35709049, 29.76414079,
30.15242096, 30.52379873, 30.87979567, 31.22169282, 31.55059054,
31.86744523, 32.17309661, 32.46828858
])
GoRT_expected = HoRT_expected - SoR_expected
np.testing.assert_almost_equal(self.Nasa_direct.get_GoRT(T=T[0]),
GoRT_expected[0])
np.testing.assert_array_almost_equal(self.Nasa_direct.get_GoRT(T=T),
GoRT_expected)
def test_get_G(self):
T = np.array([
500., 600., 700., 800., 900., 1000., 1100., 1200., 1300., 1400.,
1500., 1600., 1700., 1800., 1900., 2000., 2100., 2200
])
HoRT_expected = np.array([
-56.49930957, -46.36612849, -39.10913137, -33.64819891,
-29.38377578, -25.95653237, -23.13812007, -20.77654898,
-18.76677584, -17.03389718, -15.52306522, -14.19318522,
-13.01283758, -11.95756475, -11.00803153, -10.14874498,
-9.367140366, -8.652916499
])
SoR_expected = np.array([
24.84583501, 25.62943045, 26.31248017, 26.92360771, 27.48073089,
27.99565652, 28.47647349, 28.92890014, 29.35709049, 29.76414079,
30.15242096, 30.52379873, 30.87979567, 31.22169282, 31.55059054,
31.86744523, 32.17309661, 32.46828858
])
GoRT_expected = HoRT_expected - SoR_expected
np.testing.assert_almost_equal(self.Nasa_direct.get_GoRT(T=T[0]),
GoRT_expected[0])
np.testing.assert_array_almost_equal(self.Nasa_direct.get_GoRT(T=T),
GoRT_expected)
def test_to_dict(self):
self.maxDiff = None
self.assertEqual(self.Nasa_direct.to_dict(), self.Nasa_direct_dict)
def test_from_dict(self):
self.assertEqual(Nasa.from_dict(self.Nasa_direct_dict),
self.Nasa_direct)
if __name__ == '__main__':
unittest.main()
| 45.753994
| 79
| 0.46568
| 1,544
| 14,321
| 4.167098
| 0.148964
| 0.047249
| 0.078334
| 0.07678
| 0.831054
| 0.811159
| 0.797016
| 0.795151
| 0.790488
| 0.777432
| 0
| 0.358613
| 0.409818
| 14,321
| 312
| 80
| 45.900641
| 0.402627
| 0.000349
| 0
| 0.486395
| 0
| 0
| 0.021028
| 0.004262
| 0
| 0
| 0
| 0
| 0.102041
| 1
| 0.040816
| false
| 0
| 0.02381
| 0
| 0.068027
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3c507a7f3338ed0569b4f655be8b9f30c1bd7264
| 28
|
py
|
Python
|
python/testData/refactoring/introduceVariable/invalidElementAccessAfterPostReformatOfUsageSite.after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/refactoring/introduceVariable/invalidElementAccessAfterPostReformatOfUsageSite.after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/refactoring/introduceVariable/invalidElementAccessAfterPostReformatOfUsageSite.after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
a = (1 + 1)
foo = 1 + a + 1
| 9.333333
| 15
| 0.321429
| 7
| 28
| 1.285714
| 0.428571
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.428571
| 28
| 2
| 16
| 14
| 0.3125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3c8c6998b355d0f84615c1e6b1d6415f04f80a73
| 11,130
|
py
|
Python
|
rucio_jupyterlab/tests/test_config.py
|
didithilmy/jupyterlab-extension
|
3268dccda0a282d96b4411d267a851535a900eca
|
[
"Apache-2.0"
] | null | null | null |
rucio_jupyterlab/tests/test_config.py
|
didithilmy/jupyterlab-extension
|
3268dccda0a282d96b4411d267a851535a900eca
|
[
"Apache-2.0"
] | null | null | null |
rucio_jupyterlab/tests/test_config.py
|
didithilmy/jupyterlab-extension
|
3268dccda0a282d96b4411d267a851535a900eca
|
[
"Apache-2.0"
] | null | null | null |
# Copyright European Organization for Nuclear Research (CERN)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Authors:
# - Muhammad Aditya Hilmy, <mhilmy@hey.com>, 2020
import pytest
from jsonschema.exceptions import ValidationError
from rucio_jupyterlab.config import Config
from .mocks.mock_db import Struct
def test_config_init__local_config__replica_mode__schema_valid():
mock_instances = [
{
"name": "atlas",
"display_name": "ATLAS",
"rucio_base_url": "https://rucio",
"mode": "replica",
"destination_rse": "SWAN-EOS",
"rse_mount_path": "/eos/user/rucio",
"path_begins_at": 4
}
]
mock_config = Struct(instances=mock_instances)
config = Config(mock_config)
assert config.get_instance_config('atlas') == mock_instances[0], "Invalid instances"
def test_config_init__local_config__replica_mode__schema_invalid():
mock_instances = [
{
"display_name": "ATLAS",
"rucio_base_url": "https://rucio",
"mode": "replica",
"rse_mount_path": "/eos/user/rucio",
"path_begins_at": 4
}
]
mock_config = Struct(instances=mock_instances)
with pytest.raises(ValidationError):
Config(mock_config)
def test_config_init__local_config__download_mode__schema_valid():
mock_instances = [
{
"name": "atlas",
"display_name": "ATLAS",
"rucio_base_url": "https://rucio",
"mode": "download",
"rucio_ca_cert": '/opt/rucio.pem'
}
]
mock_config = Struct(instances=mock_instances)
config = Config(mock_config)
assert config.get_instance_config('atlas') == mock_instances[0], "Invalid instances"
def test_config_init__local_config__download_mode__schema_invalid():
mock_instances = [
{
"display_name": "ATLAS",
"rucio_base_url": "https://rucio",
"mode": "download"
}
]
mock_config = Struct(instances=mock_instances)
with pytest.raises(ValidationError):
Config(mock_config)
def test_config_init__remote_config__schema_valid__no_overlapping_item(requests_mock):
mock_instances = [
{
"name": "cms",
"display_name": "CMS",
"mode": "replica",
"$url": "http://localhost/rucio.json"
}
]
mock_config = Struct(instances=mock_instances)
remote_config = {
"rucio_base_url": "https://rucio",
"destination_rse": "SWAN-EOS",
"rse_mount_path": "/eos/user/rucio",
"path_begins_at": 1
}
requests_mock.get("http://localhost/rucio.json", json=remote_config)
config = Config(mock_config)
expected_config = {
"name": "cms",
"display_name": "CMS",
"mode": "replica",
"$url": "http://localhost/rucio.json",
"rucio_base_url": "https://rucio",
"destination_rse": "SWAN-EOS",
"rse_mount_path": "/eos/user/rucio",
"path_begins_at": 1
}
assert config.get_instance_config('cms') == expected_config, "Invalid remote config format"
def test_config_init__remote_config__schema_valid__overlapping_item(requests_mock):
mock_instances = [
{
"name": "cms",
"display_name": "CMS-Local",
"mode": "replica",
"$url": "http://localhost/rucio.json"
}
]
mock_config = Struct(instances=mock_instances)
remote_config = {
"display_name": "CMS-Remote",
"rucio_base_url": "https://rucio",
"destination_rse": "SWAN-EOS",
"rse_mount_path": "/eos/user/rucio",
"path_begins_at": 1
}
requests_mock.get("http://localhost/rucio.json", json=remote_config)
config = Config(mock_config)
expected_config = {
"name": "cms",
"display_name": "CMS-Local",
"mode": "replica",
"$url": "http://localhost/rucio.json",
"rucio_base_url": "https://rucio",
"destination_rse": "SWAN-EOS",
"rse_mount_path": "/eos/user/rucio",
"path_begins_at": 1
}
assert config.get_instance_config('cms') == expected_config, "Invalid remote config format"
def test_config_init__remote_config__schema_invalid(requests_mock):
mock_instances = [
{
"name": "cms",
"display_name": "CMS",
"mode": "replica",
"$url": "http://localhost/rucio.json"
}
]
mock_config = Struct(instances=mock_instances)
remote_config = {
"rse_mount_path": "/eos/user/rucio",
"path_begins_at": 1
}
requests_mock.get("http://localhost/rucio.json", json=remote_config)
with pytest.raises(ValidationError):
Config(mock_config)
def test_list_instances__oidc_disabled():
mock_instances = [
{
"name": "atlas",
"display_name": "ATLAS",
"mode": "replica",
"rucio_base_url": "https://rucio",
"destination_rse": "SWAN-EOS",
"rse_mount_path": "/eos/user/rucio",
"path_begins_at": 4
},
{
"name": "cms",
"display_name": "CMS",
"mode": "replica",
"rucio_base_url": "https://rucio-cms",
"destination_rse": "SWAN-EOS",
"rse_mount_path": "/eos/user/rucio",
"path_begins_at": 4
}
]
mock_config = Struct(instances=mock_instances)
config = Config(mock_config)
expected_instances = [
{'display_name': 'ATLAS', 'name': 'atlas', 'oidc_enabled': False},
{'display_name': 'CMS', 'name': 'cms', 'oidc_enabled': False}
]
assert config.list_instances() == expected_instances, "Invalid instances"
def test_list_instances__oidc_file__file_config_exists__file_exists__oidc_should_be_enabled(mocker):
mock_instances = [
{
"name": "escape",
"display_name": "ESCAPE",
"mode": "replica",
"rucio_base_url": "https://rucio-cms",
"destination_rse": "SWAN-EOS",
"rse_mount_path": "/eos/user/rucio",
"path_begins_at": 4,
"oidc_auth": "file",
"oidc_file_name": "/tmp/oauth2"
}
]
mocker.patch('rucio_jupyterlab.config.config.get_oidc_token', return_value='oauth2:token:resource')
mock_config = Struct(instances=mock_instances)
config = Config(mock_config)
expected_instances = [
{'display_name': 'ESCAPE', 'name': 'escape', 'oidc_enabled': True},
]
assert config.list_instances() == expected_instances, "Invalid instances"
def test_list_instances__oidc_file__file_config_exists__file_does_not_exist__oidc_should_be_disabled(mocker):
mock_instances = [
{
"name": "escape",
"display_name": "ESCAPE",
"mode": "replica",
"rucio_base_url": "https://rucio-cms",
"destination_rse": "SWAN-EOS",
"rse_mount_path": "/eos/user/rucio",
"path_begins_at": 4,
"oidc_auth": "file",
"oidc_file_name": "/tmp/oauth2"
}
]
mocker.patch('rucio_jupyterlab.config.config.get_oidc_token', return_value=None)
mock_config = Struct(instances=mock_instances)
config = Config(mock_config)
expected_instances = [
{'display_name': 'ESCAPE', 'name': 'escape', 'oidc_enabled': False},
]
assert config.list_instances() == expected_instances, "Invalid instances"
def test_list_instances__oidc_file__file_config_does_not_exist__oidc_should_be_disabled(mocker):
mock_instances = [
{
"name": "escape",
"display_name": "ESCAPE",
"mode": "replica",
"rucio_base_url": "https://rucio-cms",
"destination_rse": "SWAN-EOS",
"rse_mount_path": "/eos/user/rucio",
"path_begins_at": 4,
"oidc_auth": "file",
}
]
mocker.patch('rucio_jupyterlab.config.config.get_oidc_token', return_value=None)
mock_config = Struct(instances=mock_instances)
config = Config(mock_config)
expected_instances = [
{'display_name': 'ESCAPE', 'name': 'escape', 'oidc_enabled': False},
]
assert config.list_instances() == expected_instances, "Invalid instances"
def test_list_instances__oidc_env__env_config_exists__env_exists__oidc_should_be_enabled(mocker):
mock_instances = [
{
"name": "escape",
"display_name": "ESCAPE",
"mode": "replica",
"rucio_base_url": "https://rucio-cms",
"destination_rse": "SWAN-EOS",
"rse_mount_path": "/eos/user/rucio",
"path_begins_at": 4,
"oidc_auth": "env",
"oidc_env_name": "/tmp/oauth2"
}
]
mocker.patch('rucio_jupyterlab.config.config.get_oidc_token', return_value='oauth2:token:resource')
mock_config = Struct(instances=mock_instances)
config = Config(mock_config)
expected_instances = [
{'display_name': 'ESCAPE', 'name': 'escape', 'oidc_enabled': True},
]
assert config.list_instances() == expected_instances, "Invalid instances"
def test_list_instances__oidc_env__env_config_exists__env_does_not_exist__oidc_should_be_disabled(mocker):
mock_instances = [
{
"name": "escape",
"display_name": "ESCAPE",
"mode": "replica",
"rucio_base_url": "https://rucio-cms",
"destination_rse": "SWAN-EOS",
"rse_mount_path": "/eos/user/rucio",
"path_begins_at": 4,
"oidc_auth": "env",
"oidc_env_name": "/tmp/oauth2"
}
]
mocker.patch('rucio_jupyterlab.config.config.get_oidc_token', return_value=None)
mock_config = Struct(instances=mock_instances)
config = Config(mock_config)
expected_instances = [
{'display_name': 'ESCAPE', 'name': 'escape', 'oidc_enabled': False},
]
assert config.list_instances() == expected_instances, "Invalid instances"
def test_list_instances__oidc_env__env_config_does_not_exist__oidc_should_be_disabled(mocker):
mock_instances = [
{
"name": "escape",
"display_name": "ESCAPE",
"mode": "replica",
"rucio_base_url": "https://rucio-cms",
"destination_rse": "SWAN-EOS",
"rse_mount_path": "/eos/user/rucio",
"path_begins_at": 4,
"oidc_auth": "env",
}
]
mocker.patch('rucio_jupyterlab.config.config.get_oidc_token', return_value=None)
mock_config = Struct(instances=mock_instances)
config = Config(mock_config)
expected_instances = [
{'display_name': 'ESCAPE', 'name': 'escape', 'oidc_enabled': False},
]
assert config.list_instances() == expected_instances, "Invalid instances"
| 30
| 109
| 0.604672
| 1,206
| 11,130
| 5.192371
| 0.10199
| 0.06228
| 0.030661
| 0.043437
| 0.92207
| 0.919355
| 0.919355
| 0.908176
| 0.903705
| 0.884063
| 0
| 0.003765
| 0.260288
| 11,130
| 370
| 110
| 30.081081
| 0.756832
| 0.02947
| 0
| 0.71831
| 0
| 0
| 0.307977
| 0.028908
| 0
| 0
| 0
| 0
| 0.038732
| 1
| 0.049296
| false
| 0
| 0.014085
| 0
| 0.06338
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3c9b9550518bb101bfb40fb1856d7f40d9c21d49
| 19,344
|
py
|
Python
|
JanggiGame/Engine/tests/unit/test_path_generation_strategy.py
|
MohamedAl-Hussein/janggi-game
|
0664a408563b824c0b665d755ff9b232b6f24644
|
[
"MIT"
] | null | null | null |
JanggiGame/Engine/tests/unit/test_path_generation_strategy.py
|
MohamedAl-Hussein/janggi-game
|
0664a408563b824c0b665d755ff9b232b6f24644
|
[
"MIT"
] | null | null | null |
JanggiGame/Engine/tests/unit/test_path_generation_strategy.py
|
MohamedAl-Hussein/janggi-game
|
0664a408563b824c0b665d755ff9b232b6f24644
|
[
"MIT"
] | null | null | null |
from __future__ import annotations
import unittest
from typing import List, TYPE_CHECKING
from helpers import BranchPathStrategy, LinearDiagonalPathStrategy, LinearPathStrategy
from utils import Point2D
if TYPE_CHECKING:
from helpers import IPathGenerationStrategy
class TestIPathGenerationStrategy(unittest.TestCase):
pass
class TestLinearDiagonalPathStrategy(unittest.TestCase):
def setUp(self) -> None:
self.general_guard_strat: IPathGenerationStrategy = LinearDiagonalPathStrategy(
step_range=(1, 2), x_magnitudes={-1, 0, 1}, y_magnitudes={-1, 0, 1}, diag_limit=1
)
self.blue_soldier_in_palace_strat: IPathGenerationStrategy = LinearDiagonalPathStrategy(
step_range=(1, 2), x_magnitudes={-1, 0, 1}, y_magnitudes={0, 1}, diag_limit=1
)
self.red_soldier_in_palace_strat: IPathGenerationStrategy = LinearDiagonalPathStrategy(
step_range=(1, 2), x_magnitudes={-1, 0, 1}, y_magnitudes={0, -1}, diag_limit=1
)
self.chariot_cannon_in_palace_strat: IPathGenerationStrategy = LinearDiagonalPathStrategy(
step_range=(1, 10), x_magnitudes={-1, 0, 1}, y_magnitudes={-1, 0, 1}, diag_limit=2
)
self.chariot_cannon_expected_paths: List[List[Point2D]] = [
[Point2D(0, 0), Point2D(0, 1)],
[Point2D(0, 0), Point2D(0, -1)],
[Point2D(0, 0), Point2D(1, 0)],
[Point2D(0, 0), Point2D(-1, 0)],
[Point2D(0, 0), Point2D(0, 1), Point2D(0, 2)],
[Point2D(0, 0), Point2D(0, -1), Point2D(0, -2)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, 0)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, 0)],
[Point2D(0, 0), Point2D(0, 1), Point2D(0, 2), Point2D(0, 3)],
[Point2D(0, 0), Point2D(0, -1), Point2D(0, -2), Point2D(0, -3)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, 0), Point2D(3, 0)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, 0), Point2D(-3, 0)],
[Point2D(0, 0), Point2D(0, 1), Point2D(0, 2), Point2D(0, 3), Point2D(0, 4)],
[Point2D(0, 0), Point2D(0, -1), Point2D(0, -2), Point2D(0, -3), Point2D(0, -4)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, 0), Point2D(3, 0), Point2D(4, 0)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, 0), Point2D(-3, 0), Point2D(-4, 0)],
[Point2D(0, 0), Point2D(0, 1), Point2D(0, 2), Point2D(0, 3), Point2D(0, 4), Point2D(0, 5)],
[Point2D(0, 0), Point2D(0, -1), Point2D(0, -2), Point2D(0, -3), Point2D(0, -4), Point2D(0, -5)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, 0), Point2D(3, 0), Point2D(4, 0), Point2D(5, 0)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, 0), Point2D(-3, 0), Point2D(-4, 0), Point2D(-5, 0)],
[Point2D(0, 0), Point2D(0, 1), Point2D(0, 2), Point2D(0, 3), Point2D(0, 4), Point2D(0, 5),
Point2D(0, 6)],
[Point2D(0, 0), Point2D(0, -1), Point2D(0, -2), Point2D(0, -3), Point2D(0, -4), Point2D(0, -5),
Point2D(0, -6)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, 0), Point2D(3, 0), Point2D(4, 0), Point2D(5, 0),
Point2D(6, 0)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, 0), Point2D(-3, 0), Point2D(-4, 0), Point2D(-5, 0),
Point2D(-6, 0)],
[Point2D(0, 0), Point2D(0, 1), Point2D(0, 2), Point2D(0, 3), Point2D(0, 4), Point2D(0, 5),
Point2D(0, 6), Point2D(0, 7)],
[Point2D(0, 0), Point2D(0, -1), Point2D(0, -2), Point2D(0, -3), Point2D(0, -4), Point2D(0, -5),
Point2D(0, -6), Point2D(0, -7)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, 0), Point2D(3, 0), Point2D(4, 0), Point2D(5, 0),
Point2D(6, 0), Point2D(7, 0)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, 0), Point2D(-3, 0), Point2D(-4, 0), Point2D(-5, 0),
Point2D(-6, 0), Point2D(-7, 0)],
[Point2D(0, 0), Point2D(0, 1), Point2D(0, 2), Point2D(0, 3), Point2D(0, 4), Point2D(0, 5),
Point2D(0, 6), Point2D(0, 7), Point2D(0, 8)],
[Point2D(0, 0), Point2D(0, -1), Point2D(0, -2), Point2D(0, -3), Point2D(0, -4), Point2D(0, -5),
Point2D(0, -6), Point2D(0, -7), Point2D(0, -8)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, 0), Point2D(3, 0), Point2D(4, 0), Point2D(5, 0),
Point2D(6, 0), Point2D(7, 0), Point2D(8, 0)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, 0), Point2D(-3, 0), Point2D(-4, 0), Point2D(-5, 0),
Point2D(-6, 0), Point2D(-7, 0), Point2D(-8, 0)],
[Point2D(0, 0), Point2D(0, 1), Point2D(0, 2), Point2D(0, 3), Point2D(0, 4), Point2D(0, 5),
Point2D(0, 6), Point2D(0, 7), Point2D(0, 8), Point2D(0, 9)],
[Point2D(0, 0), Point2D(0, -1), Point2D(0, -2), Point2D(0, -3), Point2D(0, -4), Point2D(0, -5),
Point2D(0, -6), Point2D(0, -7), Point2D(0, -8), Point2D(0, -9)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, 0), Point2D(3, 0), Point2D(4, 0), Point2D(5, 0),
Point2D(6, 0), Point2D(7, 0), Point2D(8, 0), Point2D(9, 0)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, 0), Point2D(-3, 0), Point2D(-4, 0), Point2D(-5, 0),
Point2D(-6, 0), Point2D(-7, 0), Point2D(-8, 0), Point2D(-9, 0)],
[Point2D(0, 0), Point2D(1, 1)],
[Point2D(0, 0), Point2D(-1, -1)],
[Point2D(0, 0), Point2D(1, -1)],
[Point2D(0, 0), Point2D(-1, 1)],
[Point2D(0, 0), Point2D(1, 1), Point2D(2, 2)],
[Point2D(0, 0), Point2D(-1, -1), Point2D(-2, -2)],
[Point2D(0, 0), Point2D(1, -1), Point2D(2, -2)],
[Point2D(0, 0), Point2D(-1, 1), Point2D(-2, 2)]
]
def test_path_generator_returns_all_paths_for_general_or_guard(self) -> None:
# -------------------- Arrange -------------------- #
# All paths starting at origin.
# Only need to test for one source position as paths go in all directions.
starting_coord: Point2D = Point2D(0, 0)
expected_paths: List[List[Point2D]] = [
[Point2D(0, 0), Point2D(1, 0)],
[Point2D(0, 0), Point2D(1, -1)],
[Point2D(0, 0), Point2D(0, -1)],
[Point2D(0, 0), Point2D(-1, -1)],
[Point2D(0, 0), Point2D(-1, 0)],
[Point2D(0, 0), Point2D(-1, 1)],
[Point2D(0, 0), Point2D(0, 1)],
[Point2D(0, 0), Point2D(1, 1)]
]
# -------------------- Act -------------------- #
# Generate all paths starting at origin.
paths: List[List[Point2D]] = list(self.general_guard_strat.path_generator(source=starting_coord))
# -------------------- Assert -------------------- #
self.assertCountEqual(expected_paths, paths)
def test_path_generator_returns_all_paths_for_blue_soldier_in_palace(self) -> None:
# -------------------- Arrange -------------------- #
# All paths starting at origin.
# Only need to test for one source position as paths go in all directions.
starting_coord: Point2D = Point2D(0, 0)
expected_paths: List[List[Point2D]] = [
[Point2D(0, 0), Point2D(1, 0)],
[Point2D(0, 0), Point2D(-1, 0)],
[Point2D(0, 0), Point2D(-1, 1)],
[Point2D(0, 0), Point2D(0, 1)],
[Point2D(0, 0), Point2D(1, 1)]
]
# -------------------- Act -------------------- #
# Generate all paths starting at origin.
paths: List[List[Point2D]] = list(self.blue_soldier_in_palace_strat.path_generator(source=starting_coord))
# -------------------- Assert -------------------- #
self.assertCountEqual(expected_paths, paths)
def test_path_generator_returns_all_paths_for_red_soldier_in_palace(self) -> None:
# -------------------- Arrange -------------------- #
# All paths starting at origin.
# Only need to test for one source position as paths go in all directions.
starting_coord: Point2D = Point2D(0, 0)
expected_paths: List[List[Point2D]] = [
[Point2D(0, 0), Point2D(1, 0)],
[Point2D(0, 0), Point2D(1, -1)],
[Point2D(0, 0), Point2D(0, -1)],
[Point2D(0, 0), Point2D(-1, -1)],
[Point2D(0, 0), Point2D(-1, 0)]
]
# -------------------- Act -------------------- #
# Generate all paths starting at origin.
paths: List[List[Point2D]] = list(self.red_soldier_in_palace_strat.path_generator(source=starting_coord))
# -------------------- Assert -------------------- #
self.assertCountEqual(expected_paths, paths)
def test_path_generator_returns_all_paths_for_chariot_or_cannon_in_palace(self) -> None:
# -------------------- Arrange -------------------- #
# All paths starting at origin.
# Only need to test for one source position as paths go in all directions.
starting_coord: Point2D = Point2D(0, 0)
# -------------------- Act -------------------- #
# Generate all paths starting at origin.
paths: List[List[Point2D]] = list(self.chariot_cannon_in_palace_strat.path_generator(source=starting_coord))
# -------------------- Assert -------------------- #
self.assertCountEqual(self.chariot_cannon_expected_paths, paths)
class TestLinearPathStrategy(unittest.TestCase):
def setUp(self) -> None:
self.blue_soldier_strat: IPathGenerationStrategy = LinearPathStrategy(
step_range=(1, 2), x_magnitudes={-1, 0, 1}, y_magnitudes={1}
)
self.red_soldier_strat: IPathGenerationStrategy = LinearPathStrategy(
step_range=(1, 2), x_magnitudes={-1, 0, 1}, y_magnitudes={-1}
)
self.chariot_cannon_strat: IPathGenerationStrategy = LinearPathStrategy(
step_range=(1, 10), x_magnitudes={0, 1}, y_magnitudes={-1, 1}
)
self.chariot_cannon_expected_paths: List[List[Point2D]] = [
[Point2D(0, 0), Point2D(0, 1)],
[Point2D(0, 0), Point2D(0, -1)],
[Point2D(0, 0), Point2D(1, 0)],
[Point2D(0, 0), Point2D(-1, 0)],
[Point2D(0, 0), Point2D(0, 1), Point2D(0, 2)],
[Point2D(0, 0), Point2D(0, -1), Point2D(0, -2)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, 0)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, 0)],
[Point2D(0, 0), Point2D(0, 1), Point2D(0, 2), Point2D(0, 3)],
[Point2D(0, 0), Point2D(0, -1), Point2D(0, -2), Point2D(0, -3)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, 0), Point2D(3, 0)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, 0), Point2D(-3, 0)],
[Point2D(0, 0), Point2D(0, 1), Point2D(0, 2), Point2D(0, 3), Point2D(0, 4)],
[Point2D(0, 0), Point2D(0, -1), Point2D(0, -2), Point2D(0, -3), Point2D(0, -4)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, 0), Point2D(3, 0), Point2D(4, 0)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, 0), Point2D(-3, 0), Point2D(-4, 0)],
[Point2D(0, 0), Point2D(0, 1), Point2D(0, 2), Point2D(0, 3), Point2D(0, 4), Point2D(0, 5)],
[Point2D(0, 0), Point2D(0, -1), Point2D(0, -2), Point2D(0, -3), Point2D(0, -4), Point2D(0, -5)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, 0), Point2D(3, 0), Point2D(4, 0), Point2D(5, 0)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, 0), Point2D(-3, 0), Point2D(-4, 0), Point2D(-5, 0)],
[Point2D(0, 0), Point2D(0, 1), Point2D(0, 2), Point2D(0, 3), Point2D(0, 4), Point2D(0, 5),
Point2D(0, 6)],
[Point2D(0, 0), Point2D(0, -1), Point2D(0, -2), Point2D(0, -3), Point2D(0, -4), Point2D(0, -5),
Point2D(0, -6)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, 0), Point2D(3, 0), Point2D(4, 0), Point2D(5, 0),
Point2D(6, 0)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, 0), Point2D(-3, 0), Point2D(-4, 0), Point2D(-5, 0),
Point2D(-6, 0)],
[Point2D(0, 0), Point2D(0, 1), Point2D(0, 2), Point2D(0, 3), Point2D(0, 4), Point2D(0, 5),
Point2D(0, 6), Point2D(0, 7)],
[Point2D(0, 0), Point2D(0, -1), Point2D(0, -2), Point2D(0, -3), Point2D(0, -4), Point2D(0, -5),
Point2D(0, -6), Point2D(0, -7)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, 0), Point2D(3, 0), Point2D(4, 0), Point2D(5, 0),
Point2D(6, 0), Point2D(7, 0)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, 0), Point2D(-3, 0), Point2D(-4, 0), Point2D(-5, 0),
Point2D(-6, 0), Point2D(-7, 0)],
[Point2D(0, 0), Point2D(0, 1), Point2D(0, 2), Point2D(0, 3), Point2D(0, 4), Point2D(0, 5),
Point2D(0, 6), Point2D(0, 7), Point2D(0, 8)],
[Point2D(0, 0), Point2D(0, -1), Point2D(0, -2), Point2D(0, -3), Point2D(0, -4), Point2D(0, -5),
Point2D(0, -6), Point2D(0, -7), Point2D(0, -8)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, 0), Point2D(3, 0), Point2D(4, 0), Point2D(5, 0),
Point2D(6, 0), Point2D(7, 0), Point2D(8, 0)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, 0), Point2D(-3, 0), Point2D(-4, 0), Point2D(-5, 0),
Point2D(-6, 0), Point2D(-7, 0), Point2D(-8, 0)],
[Point2D(0, 0), Point2D(0, 1), Point2D(0, 2), Point2D(0, 3), Point2D(0, 4), Point2D(0, 5),
Point2D(0, 6), Point2D(0, 7), Point2D(0, 8), Point2D(0, 9)],
[Point2D(0, 0), Point2D(0, -1), Point2D(0, -2), Point2D(0, -3), Point2D(0, -4), Point2D(0, -5),
Point2D(0, -6), Point2D(0, -7), Point2D(0, -8), Point2D(0, -9)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, 0), Point2D(3, 0), Point2D(4, 0), Point2D(5, 0),
Point2D(6, 0), Point2D(7, 0), Point2D(8, 0), Point2D(9, 0)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, 0), Point2D(-3, 0), Point2D(-4, 0), Point2D(-5, 0),
Point2D(-6, 0), Point2D(-7, 0), Point2D(-8, 0), Point2D(-9, 0)]
]
def test_path_generator_returns_all_paths_for_blue_soldier(self) -> None:
# -------------------- Arrange -------------------- #
# All paths starting at origin.
# Only need to test for one source position as paths go in all directions.
starting_coord: Point2D = Point2D(0, 0)
expected_paths: List[List[Point2D]] = [
[Point2D(0, 0), Point2D(0, 1)],
[Point2D(0, 0), Point2D(1, 0)],
[Point2D(0, 0), Point2D(-1, 0)]
]
# -------------------- Act -------------------- #
# Generate all paths starting at origin.
paths: List[List[Point2D]] = list(self.blue_soldier_strat.path_generator(source=starting_coord))
# -------------------- Assert -------------------- #
self.assertCountEqual(expected_paths, paths)
def test_path_generator_returns_all_paths_for_red_soldier(self) -> None:
# -------------------- Arrange -------------------- #
# All paths starting at origin.
# Only need to test for one source position as paths go in all directions.
starting_coord: Point2D = Point2D(0, 0)
expected_paths: List[List[Point2D]] = [
[Point2D(0, 0), Point2D(0, -1)],
[Point2D(0, 0), Point2D(1, 0)],
[Point2D(0, 0), Point2D(-1, 0)]
]
# -------------------- Act -------------------- #
# Generate all paths starting at origin.
paths: List[List[Point2D]] = list(self.red_soldier_strat.path_generator(source=starting_coord))
# -------------------- Assert -------------------- #
self.assertCountEqual(expected_paths, paths)
def test_path_generator_returns_all_paths_for_chariot_or_cannon(self) -> None:
# -------------------- Arrange -------------------- #
# All paths starting at origin.
# Only need to test for one source position as paths go in all directions.
starting_coord: Point2D = Point2D(0, 0)
# -------------------- Act -------------------- #
# Generate all paths starting at origin.
paths: List[List[Point2D]] = list(self.chariot_cannon_strat.path_generator(source=starting_coord))
# -------------------- Assert -------------------- #
self.assertCountEqual(self.chariot_cannon_expected_paths, paths)
class TestBranchPathStrategy(unittest.TestCase):
def setUp(self) -> None:
self.horse_strat: IPathGenerationStrategy = BranchPathStrategy(
step_range=(1, 2), scalars={0, 1}, x_magnitudes={-1, 1}, y_magnitudes={-1, 1}
)
self.elephant_strat: IPathGenerationStrategy = BranchPathStrategy(
step_range=(2, 3), scalars={0, 1}, x_magnitudes={-1, 1}, y_magnitudes={-1, 1}
)
def test_path_generator_returns_all_paths_for_horse(self) -> None:
# -------------------- Arrange -------------------- #
# All paths starting at origin.
# Only need to test for one source position as paths go in all directions.
starting_coord: Point2D = Point2D(0, 0)
expected_paths: List[List[Point2D]] = [
[Point2D(0, 0), Point2D(0, 1), Point2D(1, 2)],
[Point2D(0, 0), Point2D(0, 1), Point2D(-1, 2)],
[Point2D(0, 0), Point2D(0, -1), Point2D(1, -2)],
[Point2D(0, 0), Point2D(0, -1), Point2D(-1, -2)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, 1)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, -1)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, 1)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, -1)]
]
# -------------------- Act -------------------- #
# Generate all paths starting at origin.
paths: List[List[Point2D]] = list(self.horse_strat.path_generator(source=starting_coord))
# -------------------- Assert -------------------- #
self.assertCountEqual(expected_paths, paths)
def test_path_generator_returns_all_paths_for_elephant(self) -> None:
# -------------------- Arrange -------------------- #
# All paths starting at origin.
# Only need to test for one source position as paths go in all directions.
starting_coord: Point2D = Point2D(0, 0)
expected_paths: List[List[Point2D]] = [
[Point2D(0, 0), Point2D(0, 1), Point2D(1, 2), Point2D(2, 3)],
[Point2D(0, 0), Point2D(0, 1), Point2D(-1, 2), Point2D(-2, 3)],
[Point2D(0, 0), Point2D(0, -1), Point2D(1, -2), Point2D(2, -3)],
[Point2D(0, 0), Point2D(0, -1), Point2D(-1, -2), Point2D(-2, -3)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, 1), Point2D(3, 2)],
[Point2D(0, 0), Point2D(1, 0), Point2D(2, -1), Point2D(3, -2)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, 1), Point2D(-3, 2)],
[Point2D(0, 0), Point2D(-1, 0), Point2D(-2, -1), Point2D(-3, -2)]
]
# -------------------- Act -------------------- #
# Generate all paths starting at origin.
paths: List[List[Point2D]] = list(self.elephant_strat.path_generator(source=starting_coord))
# -------------------- Assert -------------------- #
self.assertCountEqual(expected_paths, paths)
if __name__ == "__main__":
unittest.main()
| 55.746398
| 116
| 0.519127
| 2,574
| 19,344
| 3.815462
| 0.034188
| 0.26311
| 0.118216
| 0.195499
| 0.956725
| 0.939517
| 0.92791
| 0.916913
| 0.901843
| 0.901843
| 0
| 0.123681
| 0.250569
| 19,344
| 346
| 117
| 55.907514
| 0.55377
| 0.134822
| 0
| 0.636
| 0
| 0
| 0.00048
| 0
| 0
| 0
| 0
| 0
| 0.036
| 1
| 0.048
| false
| 0.004
| 0.024
| 0
| 0.088
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
590335716cf9b7add8f963994b784d92ed754946
| 4,213
|
py
|
Python
|
baselines/baselines/common/vec_env/vec_normalize.py
|
amiranas/flow_rl
|
9e6e69991c22bb85a9aeb2d5399bbade1e3bb682
|
[
"Apache-2.0"
] | 26
|
2019-01-11T05:20:53.000Z
|
2022-03-28T11:25:33.000Z
|
baselines/baselines/common/vec_env/vec_normalize.py
|
amiranas/flow_rl
|
9e6e69991c22bb85a9aeb2d5399bbade1e3bb682
|
[
"Apache-2.0"
] | 1
|
2019-04-19T17:35:31.000Z
|
2019-04-30T09:03:23.000Z
|
baselines/baselines/common/vec_env/vec_normalize.py
|
amiranas/flow_rl
|
9e6e69991c22bb85a9aeb2d5399bbade1e3bb682
|
[
"Apache-2.0"
] | 10
|
2019-03-19T08:21:37.000Z
|
2022-03-11T09:05:35.000Z
|
from baselines.common.vec_env import VecEnvWrapper
from baselines.common.running_mean_std import RunningMeanStd
import numpy as np
from os.path import join as p_join
import pickle
class VecNormalize(VecEnvWrapper):
"""
Vectorized environment base class
"""
def __init__(self, venv, ob=True, ret=True, clipob=10., cliprew=10., gamma=0.99, epsilon=1e-8):
VecEnvWrapper.__init__(self, venv)
self.ob_rms = RunningMeanStd(shape=self.observation_space.shape) if ob else None
self.ret_rms = RunningMeanStd(shape=()) if ret else None
self.clipob = clipob
self.cliprew = cliprew
self.ret = np.zeros(self.num_envs)
self.gamma = gamma
self.epsilon = epsilon
def step_wait(self):
"""
Apply sequence of actions to sequence of environments
actions -> (observations, rewards, news)
where 'news' is a boolean vector indicating whether each element is new.
"""
obs, rews, news, infos = self.venv.step_wait()
self.ret = self.ret * self.gamma + rews
obs = self._obfilt(obs)
if self.ret_rms:
self.ret_rms.update(self.ret)
rews = np.clip(rews / np.sqrt(self.ret_rms.var + self.epsilon), -self.cliprew, self.cliprew)
return obs, rews, news, infos
def _obfilt(self, obs):
if self.ob_rms:
self.ob_rms.update(obs)
obs = np.clip((obs - self.ob_rms.mean) / np.sqrt(self.ob_rms.var + self.epsilon), -self.clipob, self.clipob)
return obs
else:
return obs
def reset(self):
"""
Reset all environments
"""
obs = self.venv.reset()
return self._obfilt(obs)
class ImVecNormalize(VecEnvWrapper):
"""
Vectorized environment base class
"""
def __init__(self, venv, ob=True, ret=True, clipob=10., cliprew=10., gamma=0.99, epsilon=1e-8):
VecEnvWrapper.__init__(self, venv)
if isinstance(self.observation_space, dict):
self.ob_rms = RunningMeanStd(
shape=self.observation_space["vector"].shape) if ob else None
else:
self.ob_rms = RunningMeanStd(
shape=self.observation_space.shape) if ob else None
self.ret_rms = RunningMeanStd(shape=()) if ret else None
self.clipob = clipob
self.cliprew = cliprew
self.ret = np.zeros(self.num_envs)
self.gamma = gamma
self.epsilon = epsilon
def save_norm(self, path, id):
with open(p_join(path, "vec_norm_{}".format(id)), 'wb') as f:
pickle.dump([self.ob_rms, self.ret_rms, self.clipob, self.cliprew, self.ret, self.gamma,
self.epsilon], f)
def load_norm(self, path, id):
with open(p_join(path, "vec_norm_{}".format(id)), 'rb') as f:
self.ob_rms, self.ret_rms, self.clipob, self.cliprew, self.ret, self.gamma, \
self.epsilon = pickle.load(f)
def step_wait(self):
"""
Apply sequence of actions to sequence of environments
actions -> (observations, rewards, news)
where 'news' is a boolean vector indicating whether each element is new.
"""
obs, rews, news, infos = self.venv.step_wait()
self.ret = self.ret * self.gamma + rews
if isinstance(obs, dict):
obs["vector"] = self._obfilt(obs["vector"])
else:
obs = self._obfilt(obs)
if self.ret_rms:
self.ret_rms.update(self.ret)
rews = np.clip(rews / np.sqrt(self.ret_rms.var + self.epsilon), -self.cliprew, self.cliprew)
return obs, rews, news, infos
def _obfilt(self, obs):
if self.ob_rms:
self.ob_rms.update(obs)
obs = np.clip((obs - self.ob_rms.mean) / np.sqrt(self.ob_rms.var + self.epsilon), -self.clipob, self.clipob)
return obs
else:
return obs
def reset(self):
"""
Reset all environments
"""
obs = self.venv.reset()
if isinstance(obs, dict):
obs["vector"] = self._obfilt(obs["vector"])
else:
obs = self._obfilt(obs)
return obs
| 36.008547
| 120
| 0.596962
| 548
| 4,213
| 4.463504
| 0.175182
| 0.057236
| 0.047833
| 0.026165
| 0.875715
| 0.868765
| 0.868765
| 0.868765
| 0.849141
| 0.849141
| 0
| 0.006002
| 0.288156
| 4,213
| 116
| 121
| 36.318966
| 0.809603
| 0.10705
| 0
| 0.753086
| 0
| 0
| 0.015525
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.123457
| false
| 0
| 0.061728
| 0
| 0.308642
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3cb4c635b7c5236209e5db2d7e3ba484d57baa22
| 20,368
|
py
|
Python
|
examples/handcoded/simmodel.py
|
aadeshnpn/swarm
|
873e5d90de4a3b3f69d4edc8de55eb9311226c2e
|
[
"MIT"
] | 9
|
2018-03-26T22:22:08.000Z
|
2021-08-30T20:45:27.000Z
|
examples/handcoded/simmodel.py
|
aadeshnpn/swarm
|
873e5d90de4a3b3f69d4edc8de55eb9311226c2e
|
[
"MIT"
] | 1
|
2021-05-06T12:45:11.000Z
|
2021-05-12T07:21:53.000Z
|
examples/handcoded/simmodel.py
|
aadeshnpn/swarm
|
873e5d90de4a3b3f69d4edc8de55eb9311226c2e
|
[
"MIT"
] | 1
|
2019-04-22T00:27:09.000Z
|
2019-04-22T00:27:09.000Z
|
"""Inherited model class."""
from swarms.lib.model import Model
from swarms.lib.time import SimultaneousActivation
# RandomActivation, StagedActivation
from swarms.lib.space import Grid
from swarms.utils.jsonhandler import JsonData
from swarms.utils.results import Experiment
from swarms.utils.db import Connect
from simagent import SimForgAgent, SimCTAgent, SimNMAgent
from swarms.lib.objects import Hub, Sites, Food, Debris, Obstacles
import os
import imp
import datetime
import numpy as np
from swarms.utils.ui import UI
filename = os.path.join(imp.find_module("swarms")[1] + "/utils/world.json")
class SimForgModel(Model):
"""A environemnt to model swarms."""
def __init__(
self, N, width, height, grid=10, iter=100000,
xmlstrings=None, seed=None, viewer=False, pname=None):
"""Initialize the attributes."""
if seed is None:
super(SimForgModel, self).__init__(seed=None)
else:
super(SimForgModel, self).__init__(seed)
self.runid = datetime.datetime.now().strftime(
"%s") + str(self.random.randint(1, 1000, 1)[0])
if pname is None:
self.pname = os.getcwd() + '/' + self.runid + "SForagingSimulation"
else:
self.pname = pname + '/' + self.runid + "SForagingSimulation"
self.width = width
self.height = height
self.stepcnt = 1
self.iter = iter
self.xmlstrings = xmlstrings
self.viewer = viewer
# Create db connection
connect = Connect('swarm', 'swarm', 'swarm', 'localhost')
self.connect = connect.tns_connect()
# Fill out the experiment table
self.experiment = Experiment(
self.connect, self.runid, N, seed, 'Simuation Single Foraging',
iter, width, height, grid, phenotype=xmlstrings[0])
self.experiment.insert_experiment_simulation()
self.sn = self.experiment.sn
# Create a folder to store results
os.mkdir(self.pname)
self.num_agents = N
self.grid = Grid(width, height, grid)
self.schedule = SimultaneousActivation(self)
# self.site = Sites(id=1, location=(5, 5), radius=11, q_value=0.5)
# self.grid.add_object_to_grid(self.site.location, self.site)
# self.hub = Hub(id=1, location=(0, 0), radius=11)
# self.grid.add_object_to_grid(self.hub.location, self.hub)
self.agents = []
bound = np.ceil((self.num_agents * 1.0) / len(self.xmlstrings))
j = 0
# Create agents
for i in range(self.num_agents):
# print (i, j, self.xmlstrings[j])
a = SimForgAgent(i, self, xmlstring=self.xmlstrings[j])
self.schedule.add(a)
# Add the agent to a random grid cell
# x = self.random.randint(
# -self.grid.width / 2, self.grid.width / 2)
x = 0
# y = self.random.randint(
# -self.grid.height / 2, self.grid.height / 2)
y = 0
a.location = (x, y)
self.grid.add_object_to_grid((x, y), a)
a.operation_threshold = 2 # self.num_agents // 10
self.agents.append(a)
if (i + 1) % bound == 0:
j += 1
# Add equal number of food source
# for i in range(20):
# f = Food(i, location=(-29, -29), radius=5)
# self.grid.add_object_to_grid(f.location, f)
# print (i,x,y)
def create_environment_object(self, jsondata, obj):
"""Create env from jsondata."""
name = obj.__name__.lower()
temp_list = []
i = 0
for json_object in jsondata[name]:
location = (json_object["x"], json_object["y"])
if "q_value" in json_object:
temp_obj = obj(
i, location, json_object["radius"], q_value=json_object[
"q_value"])
else:
temp_obj = obj(i, location, json_object["radius"])
self.grid.add_object_to_grid(location, temp_obj)
temp_list.append(temp_obj)
i += 1
return temp_list
def build_environment_from_json(self):
"""Build env from jsondata."""
jsondata = JsonData.load_json_file(filename)
# Create a instance of JsonData to store object that
# needs to be sent to UI
self.render = JsonData()
self.render.objects = {}
for name in jsondata.keys():
obj = eval(name.capitalize())
self.render.objects[name] = self.create_environment_object(
jsondata, obj)
self.hub = self.render.objects['hub'][0]
try:
self.site = self.render.objects['sites'][0]
self.foods = []
for i in range(self.num_agents * 1):
f = Food(
i, location=self.site.location, radius=self.site.radius)
f.agent_name = None
self.grid.add_object_to_grid(f.location, f)
self.foods.append(f)
except KeyError:
pass
if self.viewer:
self.ui = UI(
(self.width, self.height), [self.hub], self.agents,
[self.site], food=self.foods)
def step(self):
"""Step through the environment."""
# Gather info from all the agents
# self.gather_info()
# Next step
self.schedule.step()
# Increment the step count
self.stepcnt += 1
if self.viewer:
self.ui.step()
def find_higest_performer(self):
"""Find the best agent."""
fitness = self.agents[0].individual[0].fitness
fittest = self.agents[0]
for agent in self.agents:
if agent.individual[0].fitness > fitness:
fittest = agent
return fittest
def find_higest_food_collector(self):
"""Find the best agent to collect food."""
fitness = self.agents[0].food_collected
fittest = self.agents[0]
for agent in self.agents:
if agent.food_collected > fitness:
fittest = agent
return fittest
def detect_food_moved(self):
"""Detect food moved."""
grid = self.grid
food_loc = self.site.location
neighbours = grid.get_neighborhood(food_loc, 10)
food_objects = grid.get_objects_from_list_of_grid('Food', neighbours)
# print (food_objects)
return food_objects
def food_in_hub(self):
"""Find amount of food in hub."""
grid = self.grid
food_loc = self.hub.location
neighbours = grid.get_neighborhood(food_loc, 10)
food_objects = grid.get_objects_from_list_of_grid('Food', neighbours)
return len(food_objects)
def food_in_loc(self, loc):
"""Find amount of food in hub."""
grid = self.grid
neighbours = grid.get_neighborhood(loc, 10)
food_objects = grid.get_objects_from_list_of_grid('Food', neighbours)
return food_objects
class SimCTModel(Model):
"""A environemnt to model swarms."""
def __init__(
self, N, width, height, grid=10, iter=100000,
xmlstrings=None, seed=None, viewer=False, pname=None,
expname='COTSimulation', agent='SimCTAgent'):
"""Initialize the attributes."""
if seed is None:
super(SimCTModel, self).__init__(seed=None)
else:
super(SimCTModel, self).__init__(seed)
self.runid = datetime.datetime.now().strftime(
"%s") + str(self.random.randint(1, 1000, 1)[0])
if pname is None:
self.pname = os.getcwd() + '/' + self.runid + expname
else:
self.pname = pname + '/' + self.runid + expname
self.width = width
self.height = height
self.stepcnt = 1
self.iter = iter
self.xmlstrings = xmlstrings
self.viewer = viewer
# Create db connection
connect = Connect('swarm', 'swarm', 'swarm', 'localhost')
self.connect = connect.tns_connect()
# Fill out the experiment table
self.experiment = Experiment(
self.connect, self.runid, N, seed, expname,
iter, width, height, grid, phenotype=xmlstrings[0])
self.experiment.insert_experiment_simulation()
self.sn = self.experiment.sn
# Create a folder to store results
os.mkdir(self.pname)
self.num_agents = N
self.grid = Grid(width, height, grid)
self.schedule = SimultaneousActivation(self)
self.agents = []
bound = np.ceil((self.num_agents * 1.0) / len(self.xmlstrings))
j = 0
# Create agents
for i in range(self.num_agents):
# print (i, j, self.xmlstrings[j])
a = eval(agent)(i, self, xmlstring=self.xmlstrings[j])
self.schedule.add(a)
# Add the agent to a random grid cell
# x = self.random.randint(
# -self.grid.width / 2, self.grid.width / 2)
x = 0
# y = self.random.randint(
# -self.grid.height / 2, self.grid.height / 2)
y = 0
a.location = (x, y)
self.grid.add_object_to_grid((x, y), a)
a.operation_threshold = 2 # self.num_agents // 10
self.agents.append(a)
if (i + 1) % bound == 0:
j += 1
def create_environment_object(self, jsondata, obj):
"""Create env from jsondata."""
name = obj.__name__.lower()
temp_list = []
i = 0
for json_object in jsondata[name]:
location = (json_object["x"], json_object["y"])
if "q_value" in json_object:
temp_obj = obj(
i, location, json_object["radius"], q_value=json_object[
"q_value"])
else:
temp_obj = obj(i, location, json_object["radius"])
self.grid.add_object_to_grid(location, temp_obj)
temp_list.append(temp_obj)
i += 1
return temp_list
def build_environment_from_json(self):
"""Build env from jsondata."""
jsondata = JsonData.load_json_file(filename)
# Create a instance of JsonData to store object that
# needs to be sent to UI
self.render = JsonData()
self.render.objects = {}
for name in jsondata.keys():
obj = eval(name.capitalize())
self.render.objects[name] = self.create_environment_object(
jsondata, obj)
self.hub = self.render.objects['hub'][0]
try:
self.foods = []
self.site = self.render.objects['sites'][0]
food_radius = self.random.randint(20, 30)
for i in range(self.num_agents):
f = Food(
i, location=self.site.location,
radius=food_radius)
f.agent_name = None
self.grid.add_object_to_grid(f.location, f)
self.foods.append(f)
except KeyError:
pass
if self.viewer:
self.ui = UI(
(self.width, self.height), [self.hub], self.agents,
[self.site], food=self.foods)
def step(self):
"""Step through the environment."""
# Gather info from all the agents
# self.gather_info()
# Next step
self.schedule.step()
# Increment the step count
self.stepcnt += 1
if self.viewer:
self.ui.step()
def find_higest_performer(self):
"""Find the best agent."""
fitness = self.agents[0].individual[0].fitness
fittest = self.agents[0]
for agent in self.agents:
if agent.individual[0].fitness > fitness:
fittest = agent
return fittest
def find_higest_food_collector(self):
"""Find the best agent to collect food."""
fitness = self.agents[0].food_collected
fittest = self.agents[0]
for agent in self.agents:
if agent.food_collected > fitness:
fittest = agent
return fittest
def detect_food_moved(self):
"""Detect food moved."""
grid = self.grid
food_loc = self.site.location
neighbours = grid.get_neighborhood(food_loc, 10)
food_objects = grid.get_objects_from_list_of_grid('Food', neighbours)
# print (food_objects)
return food_objects
def food_in_hub(self):
"""Find amount of food in hub."""
grid = self.grid
food_loc = self.hub.location
neighbours = grid.get_neighborhood(food_loc, 10)
food_objects = grid.get_objects_from_list_of_grid('Food', neighbours)
return len(food_objects)
def food_in_loc(self, loc):
"""Find amount of food in hub."""
grid = self.grid
neighbours = grid.get_neighborhood(loc, 10)
food_objects = grid.get_objects_from_list_of_grid('Food', neighbours)
return food_objects
class SimNMModel(Model):
"""A environemnt to model swarms."""
def __init__(
self, N, width, height, grid=10, iter=100000,
xmlstrings=None, seed=None, viewer=False, pname=None,
expname='NMSimulation', agent='SimNMAgent'):
"""Initialize the attributes."""
if seed is None:
super(SimNMModel, self).__init__(seed=None)
else:
super(SimNMModel, self).__init__(seed)
# self.runid = datetime.datetime.now().strftime(
# "%s") + str(self.random.randint(1, 1000, 1)[0])
self.runid = datetime.datetime.now().timestamp()
self.runid = str(self.runid).replace('.', '')
if pname is None:
self.pname = os.getcwd() + '/' + self.runid + expname
else:
self.pname = pname + '/' + self.runid + expname
self.width = width
self.height = height
self.stepcnt = 1
self.iter = iter
self.xmlstrings = xmlstrings
self.viewer = viewer
# Create db connection
connect = Connect('swarm', 'swarm', 'swarm', 'localhost')
self.connect = connect.tns_connect()
# Fill out the experiment table
self.experiment = Experiment(
self.connect, self.runid, N, seed, expname,
iter, width, height, grid, phenotype=xmlstrings[0])
self.experiment.insert_experiment_simulation()
self.sn = self.experiment.sn
# Create a folder to store results
os.mkdir(self.pname)
self.num_agents = N
self.grid = Grid(width, height, grid)
self.schedule = SimultaneousActivation(self)
self.agents = []
bound = np.ceil((self.num_agents * 1.0) / len(self.xmlstrings))
j = 0
# Create agents
for i in range(self.num_agents):
# print (i, j, self.xmlstrings[j])
a = eval(agent)(i, self, xmlstring=self.xmlstrings[j])
self.schedule.add(a)
# Add the agent to a random grid cell
# x = self.random.randint(
# -self.grid.width / 2, self.grid.width / 2)
x = 0
# y = self.random.randint(
# -self.grid.height / 2, self.grid.height / 2)
y = 0
a.location = (x, y)
self.grid.add_object_to_grid((x, y), a)
a.operation_threshold = 2 # self.num_agents // 10
self.agents.append(a)
if (i + 1) % bound == 0:
j += 1
def create_environment_object(self, jsondata, obj):
"""Create env from jsondata."""
name = obj.__name__.lower()
temp_list = []
i = 0
for json_object in jsondata[name]:
location = (json_object["x"], json_object["y"])
if "q_value" in json_object:
temp_obj = obj(
i, location, json_object["radius"], q_value=json_object[
"q_value"])
else:
temp_obj = obj(i, location, json_object["radius"])
self.grid.add_object_to_grid(location, temp_obj)
temp_list.append(temp_obj)
i += 1
return temp_list
def build_environment_from_json(self):
"""Build env from jsondata."""
jsondata = JsonData.load_json_file(filename)
# Create a instance of JsonData to store object that
# needs to be sent to UI
self.render = JsonData()
self.render.objects = {}
for name in jsondata.keys():
obj = eval(name.capitalize())
self.render.objects[name] = self.create_environment_object(
jsondata, obj)
self.hub = self.render.objects['hub'][0]
try:
self.debris = []
for i in range(self.num_agents):
dx, dy = self.random.randint(1, 10, 2)
dx = self.hub.location[0] + dx
dy = self.hub.location[1] + dy
d = Debris(
i, location=(dx, dy),
radius=5)
d.agent_name = None
self.grid.add_object_to_grid(d.location, d)
self.debris.append(d)
except KeyError:
pass
# Create a place for the agents to drop the derbis
try:
self.obstacles = []
for i in range(1):
dx, dy = self.random.randint(5, 10, 2)
dx = self.hub.location[0] + 25 + dx
dy = self.hub.location[1] + 25 + dy
o = Obstacles(id=i, location=(dx, dy), radius=10)
self.grid.add_object_to_grid(o.location, o)
self.obstacles.append(o)
except AttributeError:
pass
if self.viewer:
self.ui = UI(
(self.width, self.height), [self.hub], self.agents,
[], food=[], derbis=self.debris)
def step(self):
"""Step through the environment."""
# Gather info from all the agents
# self.gather_info()
# Next step
self.schedule.step()
# Increment the step count
self.stepcnt += 1
if self.viewer:
self.ui.step()
def find_higest_performer(self):
"""Find the best agent."""
fitness = self.agents[0].individual[0].fitness
fittest = self.agents[0]
for agent in self.agents:
if agent.individual[0].fitness > fitness:
fittest = agent
return fittest
def find_higest_debris_collector(self):
"""Find the best agent to collect debris."""
fitness = self.agents[0].debris_collected
fittest = self.agents[0]
for agent in self.agents:
if agent.food_collected > fitness:
fittest = agent
return fittest
def detect_debris_moved(self):
"""Detect debris moved."""
grid = self.grid
debris_loc = self.hub.location
neighbours = grid.get_neighborhood(debris_loc, 30)
debris_objects = grid.get_objects_from_list_of_grid(
'Debris', neighbours)
return debris_objects
def debris_around_hub(self):
"""Find amount of debris around hub."""
grid = self.grid
food_loc = self.hub.location
neighbours = grid.get_neighborhood(food_loc, 20)
food_objects = grid.get_objects_from_list_of_grid(
'Debris', neighbours)
return len(food_objects)
def debris_in_loc(self, loc):
"""Find amount of debris in a location."""
grid = self.grid
neighbours = grid.get_neighborhood(loc, 10)
debris_objects = grid.get_objects_from_list_of_grid(
'Debris', neighbours)
return debris_objects
def debris_cleaned(self):
"""Find amount of debris cleaned."""
grid = self.grid
debris_objects = []
for obstacle in self.obstacles:
neighbours = grid.get_neighborhood(
obstacle.location, obstacle.radius)
debris_objects += grid.get_objects_from_list_of_grid(
'Debris', neighbours)
return list(set(debris_objects))
| 33.335516
| 79
| 0.562402
| 2,433
| 20,368
| 4.569667
| 0.085902
| 0.027343
| 0.017539
| 0.019878
| 0.872009
| 0.852312
| 0.835942
| 0.813276
| 0.788361
| 0.777658
| 0
| 0.013969
| 0.328702
| 20,368
| 611
| 80
| 33.335516
| 0.799166
| 0.146455
| 0
| 0.820639
| 0
| 0
| 0.02127
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068796
| false
| 0.009828
| 0.031941
| 0
| 0.154791
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
59898e413aa624e9d2c97dc8d3d3f7b6d1fd4c15
| 8,537
|
py
|
Python
|
mailchimp/services/app-server/tests/test_app.py
|
filibuster-testing/filibuster-corpus
|
225ee0017005801bee591137f82117fe37a0f899
|
[
"Apache-2.0"
] | 7
|
2021-11-01T21:09:47.000Z
|
2022-03-16T20:38:57.000Z
|
mailchimp/services/app-server/tests/test_app.py
|
filibuster-testing/filibuster-corpus
|
225ee0017005801bee591137f82117fe37a0f899
|
[
"Apache-2.0"
] | null | null | null |
mailchimp/services/app-server/tests/test_app.py
|
filibuster-testing/filibuster-corpus
|
225ee0017005801bee591137f82117fe37a0f899
|
[
"Apache-2.0"
] | null | null | null |
import requests
import sys
import os
import enum
import sys
if sys.version_info[0] >= 3 and sys.version_info[1] >= 3:
from unittest import mock
else:
import mock
service_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(service_path)
from app-server.app import app
parent_path = os.path.dirname(os.path.dirname(
os.path.dirname(os.path.dirname(os.path.realpath(__file__)))))
sys.path.append(parent_path)
import helper
helper = helper.Helper("mailchimp")
class MockFailure(enum.Enum):
SUCCESS = 0
REQUESTMAPPER_FAIL = 1
REQUESTMAPPER_TIMEOUT = 2
DB_PRIMARY_FAIL = 3
DB_PRIMARY_TIMEOUT = 4
DB_PRIMARY_READ_ONLY = 5
DB_PRIMARY_FAIL_DB_SECONDARY_FAIL = 6
DB_PRIMARY_FAIL_DB_SECONDARY_TIMEOUT = 7
DB_PRIMARY_FAIL_DB_SECONDARY_READ_ONLY = 8
class MockResponse:
def __init__(self, json_data, status_code):
self.json_data = json_data
self.status_code = status_code
def json(self):
return self.json_data
def mock_requests_get_with_failure_setting(failure_setting):
def mock_requests_get(*args, **kwargs):
requestmapper_request = "http://{}:{}/urls/prettyurl".format(helper.resolve_requests_host('requestmapper'), helper.get_port('requestmapper'))
if args == (requestmapper_request,):
if failure_setting == MockFailure.REQUESTMAPPER_FAIL:
raise requests.exceptions.ConnectionError
elif failure_setting == MockFailure.REQUESTMAPPER_TIMEOUT:
raise requests.exceptions.Timeout
else:
return MockResponse(RESPONSE, 200)
db_primary_request = "http://{}:{}/read".format(helper.resolve_requests_host('db-primary'), helper.get_port('db-primary'))
if args == (db_primary_request,):
if failure_setting in [MockFailure.DB_PRIMARY_FAIL, MockFailure.DB_PRIMARY_FAIL_DB_SECONDARY_FAIL, MockFailure.DB_PRIMARY_FAIL_DB_SECONDARY_TIMEOUT, MockFailure.DB_PRIMARY_FAIL_DB_SECONDARY_READ_ONLY]:
raise requests.exceptions.ConnectionError
elif failure_setting == MockFailure.DB_PRIMARY_TIMEOUT:
raise requests.exceptions.Timeout
else:
return MockResponse({}, 200)
db_secondary_request = "http://{}:{}/read".format(helper.resolve_requests_host('db-secondary'), helper.get_port('db-secondary'))
if args == (db_secondary_request,):
if failure_setting == MockFailure.DB_PRIMARY_FAIL_DB_SECONDARY_FAIL:
raise requests.exceptions.ConnectionError
elif failure_setting == MockFailure.DB_PRIMARY_FAIL_DB_SECONDARY_TIMEOUT:
raise requests.exceptions.Timeout
else:
return MockResponse({}, 200)
return mock_requests_get
def mock_requests_post_with_failure_setting(failure_setting):
def mock_requests_post(*args, **kwargs):
# db_primary_request = "http://{}:{}/write/urls/prettyurl".format(helper.resolve_requests_host('db-primary'), helper.get_port('db-primary'))
db_primary_request = "http://0.0.0.0:5003/write/urls/prettyurl"
if args == (db_primary_request,):
if failure_setting in [MockFailure.DB_PRIMARY_FAIL, MockFailure.DB_PRIMARY_FAIL_DB_SECONDARY_FAIL, MockFailure.DB_PRIMARY_FAIL_DB_SECONDARY_TIMEOUT, MockFailure.DB_PRIMARY_FAIL_DB_SECONDARY_READ_ONLY]:
raise requests.exceptions.ConnectionError
elif failure_setting == MockFailure.DB_PRIMARY_TIMEOUT:
raise requests.exceptions.Timeout
elif failure_setting == MockFailure.DB_PRIMARY_READ_ONLY:
return MockResponse({}, 403)
else:
return MockResponse({}, 200)
db_secondary_request = "http://{}:{}/write/urls/prettyurl".format(helper.resolve_requests_host('db-secondary'), helper.get_port('db-secondary'))
if args == (db_secondary_request,):
if failure_setting == MockFailure.DB_PRIMARY_FAIL_DB_SECONDARY_FAIL:
raise requests.exceptions.ConnectionError
elif failure_setting == MockFailure.DB_PRIMARY_FAIL_DB_SECONDARY_TIMEOUT:
raise requests.exceptions.Timeout
elif failure_setting == MockFailure.DB_PRIMARY_FAIL_DB_SECONDARY_READ_ONLY:
return MockResponse({}, 403)
else:
return MockResponse({}, 200)
return mock_requests_post
@mock.patch('requests.get', side_effect=mock_requests_get_with_failure_setting(MockFailure.SUCCESS))
@mock.patch('requests.post', side_effect=mock_requests_post_with_failure_setting(MockFailure.SUCCESS))
def test_app_server_success(mock_get, mock_post):
client = app.test_client()
reply = client.get("/urls/prettyurl")
assert reply.status_code == 200
assert reply.json == RESPONSE
@mock.patch('requests.get', side_effect=mock_requests_get_with_failure_setting(MockFailure.REQUESTMAPPER_FAIL))
@mock.patch('requests.post', side_effect=mock_requests_post_with_failure_setting(MockFailure.SUCCESS))
def test_app_server_requestmapper_fail(mock_get, mock_post):
client = app.test_client()
reply = client.get("/urls/prettyurl")
assert reply.status_code == 500
@mock.patch('requests.get', side_effect=mock_requests_get_with_failure_setting(MockFailure.REQUESTMAPPER_TIMEOUT))
@mock.patch('requests.post', side_effect=mock_requests_post_with_failure_setting(MockFailure.SUCCESS))
def test_app_server_requestmapper_timeout(mock_get, mock_post):
client = app.test_client()
reply = client.get("/urls/prettyurl")
assert reply.status_code == 500
@mock.patch('requests.get', side_effect=mock_requests_get_with_failure_setting(MockFailure.DB_PRIMARY_FAIL))
@mock.patch('requests.post', side_effect=mock_requests_post_with_failure_setting(MockFailure.DB_PRIMARY_FAIL))
def test_app_server_db_primary_fail(mock_get, mock_post):
client = app.test_client()
reply = client.get("/urls/prettyurl")
assert reply.status_code == 200
assert reply.json == RESPONSE
@mock.patch('requests.get', side_effect=mock_requests_get_with_failure_setting(MockFailure.DB_PRIMARY_TIMEOUT))
@mock.patch('requests.post', side_effect=mock_requests_post_with_failure_setting(MockFailure.DB_PRIMARY_TIMEOUT))
def test_app_server_db_primary_timeout(mock_get, mock_post):
client = app.test_client()
reply = client.get("/urls/prettyurl")
assert reply.status_code == 200
assert reply.json == RESPONSE
@mock.patch('requests.get', side_effect=mock_requests_get_with_failure_setting(MockFailure.DB_PRIMARY_READ_ONLY))
@mock.patch('requests.post', side_effect=mock_requests_post_with_failure_setting(MockFailure.DB_PRIMARY_READ_ONLY))
def test_app_server_db_primary_read_only(mock_get, mock_post):
client = app.test_client()
reply = client.get("/urls/prettyurl")
assert reply.status_code == 200
assert reply.json["result"] == RESPONSE["result"]
assert reply.json["alert"] == ALERT
@mock.patch('requests.get', side_effect=mock_requests_get_with_failure_setting(MockFailure.DB_PRIMARY_FAIL_DB_SECONDARY_FAIL))
@mock.patch('requests.post', side_effect=mock_requests_post_with_failure_setting(MockFailure.DB_PRIMARY_FAIL_DB_SECONDARY_FAIL))
def test_app_server_db_primary_fail_db_secondary_fail(mock_get, mock_post):
client = app.test_client()
reply = client.get("/urls/prettyurl")
assert reply.status_code == 500
@mock.patch('requests.get', side_effect=mock_requests_get_with_failure_setting(MockFailure.DB_PRIMARY_FAIL_DB_SECONDARY_TIMEOUT))
@mock.patch('requests.post', side_effect=mock_requests_post_with_failure_setting(MockFailure.DB_PRIMARY_FAIL_DB_SECONDARY_TIMEOUT))
def test_app_server_db_primary_fail_db_secondary_timeout(mock_get, mock_post):
client = app.test_client()
reply = client.get("/urls/prettyurl")
assert reply.status_code == 500
@mock.patch('requests.get', side_effect=mock_requests_get_with_failure_setting(MockFailure.DB_PRIMARY_FAIL_DB_SECONDARY_READ_ONLY))
@mock.patch('requests.post', side_effect=mock_requests_post_with_failure_setting(MockFailure.DB_PRIMARY_FAIL_DB_SECONDARY_READ_ONLY))
def test_app_server_db_primary_fail_db_secondary_read_only(mock_get, mock_post):
client = app.test_client()
reply = client.get("/urls/prettyurl")
assert reply.status_code == 200
assert reply.json["result"] == RESPONSE["result"]
assert reply.json["alert"] == ALERT
RESPONSE = {
"result": "internalurl"
}
ALERT = "cannot write to DB"
| 45.652406
| 213
| 0.746164
| 1,101
| 8,537
| 5.393279
| 0.082652
| 0.074267
| 0.063489
| 0.0581
| 0.880094
| 0.868811
| 0.853654
| 0.829404
| 0.790165
| 0.726339
| 0
| 0.009548
| 0.15345
| 8,537
| 186
| 214
| 45.897849
| 0.812094
| 0.016165
| 0
| 0.493243
| 0
| 0
| 0.079314
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 0
| null | null | 0
| 0.060811
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
59c9447ee634c5cd107fe735ed1ddae5cc02f9ed
| 35,243
|
py
|
Python
|
tests/test_backend_v2.py
|
liuyu81/datagator-contrib
|
813529e211f680732bd1dc9568f5b4f2bdcacdcc
|
[
"Apache-2.0"
] | 2
|
2015-02-20T02:50:07.000Z
|
2017-05-02T19:26:42.000Z
|
tests/test_backend_v2.py
|
liuyu81/datagator-contrib
|
813529e211f680732bd1dc9568f5b4f2bdcacdcc
|
[
"Apache-2.0"
] | null | null | null |
tests/test_backend_v2.py
|
liuyu81/datagator-contrib
|
813529e211f680732bd1dc9568f5b4f2bdcacdcc
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
tests.test_backend_v2
~~~~~~~~~~~~~~~~~~~~~
:copyright: 2015 by `University of Denver <http://pardee.du.edu/>`_
:license: Apache 2.0, see LICENSE for more details.
:author: `LIU Yu <liuyu@opencps.net>`_
:date: 2015/02/26
"""
from __future__ import unicode_literals
import json
import jsonschema
import logging
import os
import sys
import time
try:
from . import config
from .config import *
except (ValueError, ImportError):
import config
from config import *
from datagator.api.client import environ
from datagator.api.client._backend import DataGatorService
__all__ = ['TestRoot',
'TestRepo',
'TestDataSet',
'TestDataItem',
'TestRecipe',
'TestSearch',
'TestAccount',
'TestHttpCrossOrigin',
'TestHttpRateLimit', ]
__all__ = [to_native(n) for n in __all__]
_log = logging.getLogger("datagator.{0}".format(__name__))
def monitor_task(service, url, retry=180):
task = None
while retry > 0:
task = service.get(url).json()
assert(task.get("kind") == "datagator#Task")
if task.get("status") in ("SUC", "ERR"):
break
time.sleep(3.0)
retry -= 1
return task
class TestRoot(unittest.TestCase):
"""
Endpoint:
``^/``
``^/schema``
"""
@classmethod
def setUpClass(cls):
environ.DATAGATOR_API_VERSION = "v2"
cls.service = DataGatorService()
pass # void return
@classmethod
def tearDownClass(cls):
del cls.service
pass # void return
def test_ROOT_status(self):
msg = self.service.status
validator = jsonschema.Draft4Validator(self.service.schema)
self.assertEqual(validator.validate(msg), None)
self.assertEqual(msg.get("kind"), "datagator#Status")
self.assertEqual(msg.get("code"), 200)
self.assertEqual(msg.get("version"), environ.DATAGATOR_API_VERSION)
pass # void return
pass
@unittest.skipIf(
not os.environ.get('DATAGATOR_CREDENTIALS', None) and
os.environ.get('TRAVIS', False),
"credentials required for unsupervised testing")
class TestRepo(unittest.TestCase):
"""
Endpoint:
``^/repo/{repo}``
"""
@classmethod
def setUpClass(cls):
environ.DATAGATOR_API_VERSION = "v2"
cls.repo, cls.secret = get_credentials()
cls.service = DataGatorService(auth=(cls.repo, cls.secret))
cls.validator = jsonschema.Draft4Validator(cls.service.schema)
pass # void return
@classmethod
def tearDownClass(cls):
del cls.service
pass # void return
def test_Repo_base_GET(self):
uri = "repo/{0}".format(self.repo)
response = self.service.get(uri)
self.assertEqual(response.status_code, 200)
repo = response.json()
self.assertEqual(self.validator.validate(repo), None)
self.assertEqual(repo.get("kind"), "datagator#Repo")
self.assertEqual(repo.get("name"), self.repo)
pass # void return
def test_Repo_base_GET_NonExistence(self):
uri = "repo/NonExistence"
response = self.service.get(uri)
self.assertEqual(response.status_code, 404)
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
self.assertEqual(msg.get("kind"), "datagator#Error")
self.assertEqual(msg.get("code"), response.status_code)
pass # void return
def test_Repo_content_GET(self):
uri = "repo/{0}/".format(self.repo)
response = self.service.get(uri)
self.assertEqual(response.status_code, 200)
page = response.json()
self.assertEqual(self.validator.validate(page), None)
self.assertEqual(page.get("kind"), "datagator#Page")
pass # void return
pass
@unittest.skipIf(
not os.environ.get('DATAGATOR_CREDENTIALS', None) and
os.environ.get('TRAVIS', False),
"credentials required for unsupervised testing")
class TestDataSet(unittest.TestCase):
"""
Endpoint (Base):
``^/repo/{repo}/{dataset}``
Endpoint (Contents):
``^/repo/{repo}/{dataset}/data``
"""
@classmethod
def setUpClass(cls):
environ.DATAGATOR_API_VERSION = "v2"
cls.repo, cls.secret = get_credentials()
cls.service = DataGatorService(auth=(cls.repo, cls.secret))
cls.validator = jsonschema.Draft4Validator(cls.service.schema)
pass # void return
@classmethod
def tearDownClass(cls):
del cls.service
pass # void return
def test_DataSet_base_PUT_IGO_Members(self):
uri = "repo/{0}/{1}".format(self.repo, "IGO_Members")
IGO_Members = {
"kind": "datagator#DataSet",
"name": "IGO_Members",
"repo": {
"kind": "datagator#Repo",
"name": self.repo
}
}
response = self.service.put(uri, IGO_Members)
self.assertTrue(response.status_code in [200, 201])
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
_log.debug(msg.get("message"))
self.assertEqual(msg.get("kind"), "datagator#Status")
self.assertEqual(msg.get("code"), response.status_code)
pass # void return
def test_DataSet_base_PUT_Bakery(self):
uri = "repo/{0}/{1}".format(self.repo, "Bakery")
IGO_Members = {
"kind": "datagator#DataSet",
"name": "Bakery",
"repo": {
"kind": "datagator#Repo",
"name": self.repo
}
}
response = self.service.put(uri, IGO_Members)
self.assertTrue(response.status_code in [200, 201])
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
_log.debug(msg.get("message"))
self.assertEqual(msg.get("kind"), "datagator#Status")
self.assertEqual(msg.get("code"), response.status_code)
pass # void return
def test_DataSet_base_PUT_IGO_Aims(self):
uri = "repo/{0}/{1}".format(self.repo, "IGO_Aims")
IGO_Members = {
"kind": "datagator#DataSet",
"name": "IGO_Aims",
"repo": {
"kind": "datagator#Repo",
"name": self.repo
}
}
response = self.service.put(uri, IGO_Members)
self.assertTrue(response.status_code in [200, 201])
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
_log.debug(msg.get("message"))
self.assertEqual(msg.get("kind"), "datagator#Status")
self.assertEqual(msg.get("code"), response.status_code)
pass # void return
def test_DataSet_base_PUT_InvalidName(self):
# triggers SchemaValidationError within backend service
uri = "repo/{0}/{1}".format(self.repo, "IGO_Members")
InvalidName = {
"kind": "datagator#DataSet",
"name": "IGO Members",
"repo": {
"kind": "datagator#Repo",
"name": self.repo
}
}
response = self.service.put(uri, InvalidName)
self.assertEqual(response.status_code, 400)
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
_log.debug(msg.get("message"))
self.assertEqual(msg.get("kind"), "datagator#Error")
self.assertEqual(msg.get("code"), response.status_code)
pass # void return
def test_DataSet_base_PUT_MissingKind(self):
# triggers SchemaValidationError within backend service
uri = "repo/{0}/{1}".format(self.repo, "IGO_Members")
MissingKind = {
"name": "IGO_Members",
"repo": {
"kind": "datagator#Repo",
"name": self.repo
}
}
response = self.service.put(uri, MissingKind)
self.assertEqual(response.status_code, 400)
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
_log.debug(msg.get("message"))
self.assertEqual(msg.get("kind"), "datagator#Error")
self.assertEqual(msg.get("code"), response.status_code)
pass # void return
def test_DataSet_base_PUT_InvalidKind(self):
# triggers AssertionError within backend service
uri = "repo/{0}/{1}".format(self.repo, "Whatever")
InvalidKind = {
"kind": "datagator#Repo",
"name": "Whatever"
}
response = self.service.put(uri, InvalidKind)
self.assertEqual(response.status_code, 400)
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
_log.debug(msg.get("message"))
self.assertEqual(msg.get("kind"), "datagator#Error")
self.assertEqual(msg.get("code"), response.status_code)
pass # void return
def test_DataSet_base_PUT_InconsistentRepo(self):
# triggers AssertionError within backend service
uri = "repo/{0}/{1}".format(self.repo, "Whatever")
InconsistentRepo = {
"kind": "datagator#DataSet",
"name": "Whatever",
"repo": {
"kind": "datagator#Repo",
"name": "NonExistentRepo"
}
}
response = self.service.put(uri, InconsistentRepo)
self.assertEqual(response.status_code, 400)
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
_log.debug(msg.get("message"))
self.assertEqual(msg.get("kind"), "datagator#Error")
self.assertEqual(msg.get("code"), response.status_code)
pass # void return
def test_DataSet_content_PATCH_IGO_Members(self):
uri = "repo/{0}/{1}/data/".format(self.repo, "IGO_Members")
revision = {
"kind": "datagator#DataSet",
"repo": {
"kind": "datagator#Repo",
"name": self.repo
},
"name": "IGO_Members",
"items": [
{
"kind": "datagator#Matrix",
"name": "UN",
"data": json.loads(to_unicode(load_data(os.path.join(
"json", "IGO_Members", "UN.json")))),
},
{
"kind": "datagator#Matrix",
"name": "WTO",
"data": json.loads(to_unicode(load_data(os.path.join(
"json", "IGO_Members", "WTO.json")))),
},
{
"kind": "datagator#Matrix",
"name": "IMF",
"data": json.loads(to_unicode(load_data(os.path.join(
"json", "IGO_Members", "IMF.json")))),
},
{
"kind": "datagator#Matrix",
"name": "OPEC",
"data": json.loads(to_unicode(load_data(os.path.join(
"json", "IGO_Members", "OPEC.json")))),
}
],
"itemsCount": 4
}
response = self.service.patch(uri, revision)
self.assertEqual(response.status_code, 202)
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
_log.debug(msg.get("message"))
self.assertEqual(msg.get("kind"), "datagator#Status")
self.assertEqual(msg.get("code"), response.status_code)
# monitor the task until the revision is committed or an error occurs
self.assertTrue("Location" in response.headers)
url = response.headers['Location']
_log.debug(url)
task = monitor_task(self.service, url)
self.assertEqual(self.validator.validate(task), None)
self.assertEqual(task.get("kind"), "datagator#Task")
self.assertEqual(task.get("status"), "SUC")
pass # void return
def test_DataSet_content_PATCH_Bakery(self):
uri = "repo/{0}/{1}/data/".format(self.repo, "Bakery")
revision = {
"kind": "datagator#DataSet",
"repo": {
"kind": "datagator#Repo",
"name": self.repo
},
"name": "Bakery",
"items": [
{
"kind": "datagator#Recipe",
"name": "US_Membership",
"data": json.loads(to_unicode(load_data(os.path.join(
"json", "Bakery", "US_Membership.json"))))
}
],
"itemsCount": 1
}
response = self.service.patch(uri, revision)
self.assertEqual(response.status_code, 202)
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
_log.debug(msg.get("message"))
self.assertEqual(msg.get("kind"), "datagator#Status")
self.assertEqual(msg.get("code"), response.status_code)
# monitor the task until the revision is committed or an error occurs
self.assertTrue("Location" in response.headers)
url = response.headers['Location']
_log.debug(url)
task = monitor_task(self.service, url)
self.assertEqual(self.validator.validate(task), None)
self.assertEqual(task.get("kind"), "datagator#Task")
self.assertEqual(task.get("status"), "SUC")
pass # void return
def test_DataSet_content_PATCH_IGO_Aims(self):
# AAAID.json contains unescaped unicode characters
uri = "repo/{0}/{1}/data/".format(self.repo, "IGO_Aims")
revision = {
"kind": "datagator#DataSet",
"repo": {
"kind": "datagator#Repo",
"name": self.repo
},
"name": "IGO_Aims",
"items": [
{
"kind": "datagator#Matrix",
"name": "AAAID",
"data": json.loads(to_unicode(load_data(os.path.join(
"json", "IGO_Aims", "AAAID.json"))))
}
],
"itemsCount": 1
}
response = self.service.patch(uri, revision)
self.assertEqual(response.status_code, 202)
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
_log.debug(msg.get("message"))
self.assertEqual(msg.get("kind"), "datagator#Status")
self.assertEqual(msg.get("code"), response.status_code)
# monitor the task until the revision is committed or an error occurs
self.assertTrue("Location" in response.headers)
url = response.headers['Location']
_log.debug(url)
task = monitor_task(self.service, url)
self.assertEqual(self.validator.validate(task), None)
self.assertEqual(task.get("kind"), "datagator#Task")
self.assertEqual(task.get("status"), "SUC")
pass # void return
def test_DataSet_content_PATCH_InvalidPayload(self):
# triggers AssertionError within backend service
uri = "repo/{0}/{1}/data/".format(self.repo, "IGO_Members")
InvalidPayload = ["array", "as", "payload"]
response = self.service.patch(uri, InvalidPayload)
self.assertEqual(response.status_code, 400)
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
_log.debug(msg.get("message"))
self.assertEqual(msg.get("kind"), "datagator#Error")
self.assertEqual(msg.get("code"), response.status_code)
pass # void return
def test_DataSet_content_PATCH_MissingKind(self):
# triggers SchemaValidationError within backend service
uri = "repo/{0}/{1}/data/".format(self.repo, "IGO_Members")
MissingKind = {
"kind": "datagator#DataSet",
"repo": {
"kind": "datagator#Repo",
"name": self.repo
},
"name": "IGO_Members",
"items": [
{
"name": "IGO_Members",
"repo": {
"kind": "datagator#Repo",
"name": self.repo
}
}
],
"itemsCount": 1
}
response = self.service.patch(uri, MissingKind)
self.assertEqual(response.status_code, 400)
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
_log.debug(msg.get("message"))
self.assertEqual(msg.get("kind"), "datagator#Error")
self.assertEqual(msg.get("code"), response.status_code)
pass # void return
def test_DataSet_content_PATCH_InvalidKey(self):
# triggers AssertionError within backend service
uri = "repo/{0}/{1}/data/".format(self.repo, "IGO_Members")
InvalidKey = {
"kind": "datagator#DataSet",
"repo": {
"kind": "datagator#Repo",
"name": self.repo
},
"name": "IGO_Members",
"items": [
{
"kind": "datagator#Matrix",
"name": "U#N",
"data": json.loads(to_unicode(load_data(os.path.join(
"json", "IGO_Members", "WTO.json"))))
}
],
"itemsCount": 1
}
response = self.service.patch(uri, InvalidKey)
self.assertEqual(response.status_code, 400)
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
_log.debug(msg.get("message"))
self.assertEqual(msg.get("kind"), "datagator#Error")
self.assertEqual(msg.get("code"), response.status_code)
pass # void return
def test_DataSet_content_PATCH_InvalidKind(self):
# triggers AssertionError within backend service
uri = "repo/{0}/{1}/data/".format(self.repo, "IGO_Members")
InvalidKind = {
"kind": "datagator#DataSet",
"repo": {
"kind": "datagator#Repo",
"name": self.repo
},
"name": "IGO_Members",
"items": [
{
"kind": "datagator#DataSet",
"name": "IGO_Members",
"repo": {
"kind": "datagator#Repo",
"name": self.repo
}
}
],
"itemsCount": 1
}
response = self.service.patch(uri, InvalidKind)
self.assertEqual(response.status_code, 400)
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
_log.debug(msg.get("message"))
self.assertEqual(msg.get("kind"), "datagator#Error")
self.assertEqual(msg.get("code"), response.status_code)
pass # void return
def test_DataSet_content_PATCH_InvalidShape(self):
# triggers AssertionError within backend service
uri = "repo/{0}/{1}/data/".format(self.repo, "IGO_Members")
InvalidShape = {
"kind": "datagator#DataSet",
"repo": {
"kind": "datagator#Repo",
"name": self.repo
},
"name": "IGO_Members",
"items": [
{
"kind": "datagator#Matrix",
"name": "UN",
"data": {
"kind": "datagator#Matrix",
"rows": [
[1, 2, 3], [4, 5], [6, 7, 8]], # ill-formed row(s)
"columnsCount": 3,
"rowsCount": 3,
"rowHeaders": 0,
"columnHeaders": 0
}
}
],
"itemsCount": 1
}
response = self.service.patch(uri, InvalidShape)
self.assertEqual(response.status_code, 400)
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
_log.debug(msg.get("message"))
self.assertEqual(msg.get("kind"), "datagator#Error")
self.assertEqual(msg.get("code"), response.status_code)
pass # void return
def test_DataSet_content_PATCH_InconsistentShape(self):
# triggers AssertionError within backend service
uri = "repo/{0}/{1}/data/".format(self.repo, "IGO_Members")
InconsistentShape = {
"kind": "datagator#DataSet",
"repo": {
"kind": "datagator#Repo",
"name": self.repo
},
"name": "IGO_Members",
"items": [
{
"kind": "datagator#Matrix",
"name": "UN",
"data": {
"kind": "datagator#Matrix",
"rows": [[1, 2, 3], [4, 5, 6], [6, 7, 8]],
"columnsCount": 4, # inconsistent columns count
"rowsCount": 3,
"rowHeaders": 0,
"columnHeaders": 0
}
}
],
"itemsCount": 1
}
response = self.service.patch(uri, InconsistentShape)
self.assertEqual(response.status_code, 400)
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
_log.debug(msg.get("message"))
self.assertEqual(msg.get("kind"), "datagator#Error")
self.assertEqual(msg.get("code"), response.status_code)
pass # void return
def test_DataSet_content_PATCH_RemoveNonExistent(self):
# NOTE: this does NOT trigger an error on the backend service
uri = "repo/{0}/{1}/data/".format(self.repo, "IGO_Members")
RemoveNonExistent = {
"kind": "datagator#DataSet",
"repo": {
"kind": "datagator#Repo",
"name": self.repo
},
"name": "IGO_Members",
"items": [
{
"kind": "datagator#Matrix",
"name": "NonExistent",
"data": None
}
],
"itemsCount": 1
}
response = self.service.patch(uri, RemoveNonExistent)
self.assertEqual(response.status_code, 202)
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
_log.debug(msg.get("message"))
self.assertEqual(msg.get("kind"), "datagator#Status")
self.assertEqual(msg.get("code"), response.status_code)
# monitor the task until the revision is committed or an error occurs
self.assertTrue("Location" in response.headers)
url = response.headers['Location']
_log.debug(url)
task = monitor_task(self.service, url)
self.assertEqual(self.validator.validate(task), None)
self.assertEqual(task.get("kind"), "datagator#Task")
self.assertEqual(task.get("status"), "SUC")
pass # void return
def test_DataSet_base_GET(self):
uri = "repo/{0}/{1}".format(self.repo, "IGO_Members")
response = self.service.get(uri)
self.assertEqual(response.status_code, 200)
ds = response.json()
self.assertEqual(self.validator.validate(ds), None)
self.assertEqual(ds.get("kind"), "datagator#DataSet")
self.assertEqual(ds.get("name"), "IGO_Members")
# check if ds/repo/name matches the requested one
repo = ds.get("repo")
self.assertEqual(repo.get("kind"), "datagator#Repo")
self.assertEqual(repo.get("name"), self.repo)
pass # void return
def test_DataSet_base_GET_NonExistence(self):
uri = "repo/Pardee/NonExistence"
response = self.service.get(uri)
self.assertEqual(response.status_code, 404)
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
self.assertEqual(msg.get("kind"), "datagator#Error")
self.assertEqual(msg.get("code"), response.status_code)
pass # void return
pass
@unittest.skipIf(
not os.environ.get('DATAGATOR_CREDENTIALS', None) and
os.environ.get('TRAVIS', False),
"credentials required for unsupervised testing")
class TestDataItem(unittest.TestCase):
"""
Endpoint:
``^/repo/<repo>/<dataset>/<key>``
"""
@classmethod
def setUpClass(cls):
environ.DATAGATOR_API_VERSION = "v2"
cls.repo, cls.secret = get_credentials()
cls.service = DataGatorService(auth=(cls.repo, cls.secret))
cls.validator = jsonschema.Draft4Validator(cls.service.schema)
pass # void return
@classmethod
def tearDownClass(cls):
del cls.service
pass # void return
def test_DataItem_GET(self):
uri = "repo/{0}/{1}/data/{2}".format(
self.repo, "IGO_Members", "UN")
UN = json.loads(to_unicode(
load_data(os.path.join("json", "IGO_Members", "UN.json"))))
# full GET
response = self.service.get(uri)
self.assertEqual(response.status_code, 200)
item = response.json()
self.assertEqual(self.validator.validate(item), None)
self.assertEqual(item.get("kind"), "datagator#Matrix")
for k in ["rowsCount", "columnsCount", "rowHeaders", "columnHeaders"]:
self.assertEqual(item.get(k), UN.get(k))
# conditional GET
etag = response.headers.get("ETag")
response = self.service.get(uri, {"If-None-Match": etag})
self.assertEqual(response.status_code, 304)
self.assertTrue("ETag" in response.headers)
self.assertEqual(response.headers['ETag'], etag)
pass # void return
def test_DataItem_POST_MatrixToXlsx(self):
uri = "repo/{0}/{1}/data/{2}".format(
self.repo, "IGO_Members", "UN")
data = {"format": "xlsx"}
# submit conversion request
response = self.service.post(uri, data=data)
self.assertTrue(response.status_code in [201, 202])
self.assertTrue("Location" in response.headers)
url = response.headers['Location']
_log.debug(url)
# ready for download
if response.status_code == 201:
download = self.service.get(url)
self.assertEqual(download.status_code, 200)
self.assertTrue("Content-Type" in download.headers)
self.assertEqual(
download.headers['Content-Type'], "application/octet-stream")
self.assertTrue("Content-Disposition" in download.headers)
pass
# pending conversion
if response.status_code == 202:
task = monitor_task(self.service, url)
self.assertEqual(self.validator.validate(task), None)
self.assertEqual(task.get("kind"), "datagator#Task")
self.assertEqual(task.get("status"), "SUC")
pass
pass # void return
pass
@unittest.skipIf(
not os.environ.get('DATAGATOR_CREDENTIALS', None) and
os.environ.get('TRAVIS', False),
"credentials required for unsupervised testing")
class TestRecipe(unittest.TestCase):
"""
Endpoint:
``^/repo/<repo>/<dataset>/<key>.recipe``
"""
@classmethod
def setUpClass(cls):
environ.DATAGATOR_API_VERSION = "v2"
cls.repo, cls.secret = get_credentials()
cls.service = DataGatorService(auth=(cls.repo, cls.secret))
cls.validator = jsonschema.Draft4Validator(cls.service.schema)
pass # void return
@classmethod
def tearDownClass(cls):
del cls.service
pass # void return
def test_Recipe_GET(self):
uri = "repo/{0}/{1}/data/{2}".format(
self.repo, "Bakery", "US_Membership.recipe")
AST = json.loads(to_unicode(
load_data(os.path.join("json", "Bakery", "US_Membership.json"))))
DGML = to_unicode(
load_data(os.path.join("raw", "Bakery", "US_Membership.dgml")))
# GET json
response = self.service.get(uri)
self.assertEqual(response.status_code, 200)
item = response.json()
self.assertEqual(self.validator.validate(item), None)
self.assertEqual(item.get("kind"), "datagator#Recipe")
self.assertEqual(len(item), len(AST))
# GET dgml
# response = self.service.get("{0}?format=dgml".format(uri))
# self.assertEqual(response.status_code, 200)
# code = response.text
# for u, v in zip(filter(None, code.split()),
# filter(None, DGML.split())):
# self.assertEqual(u, v)
pass # void return
def test_Recipe_POST(self):
uri = "repo/{0}/{1}/data/{2}".format(
self.repo, "Bakery", "US_Membership.recipe")
data = {"act": "bake"}
response = self.service.post(uri, data=data)
self.assertEqual(response.status_code, 202)
msg = response.json()
self.assertEqual(self.validator.validate(msg), None)
_log.debug(msg.get("message"))
self.assertEqual(msg.get("kind"), "datagator#Status")
self.assertEqual(msg.get("code"), response.status_code)
# monitor the task until the baking is completed or an error occurs
self.assertTrue("Location" in response.headers)
url = response.headers['Location']
_log.debug(url)
task = monitor_task(self.service, url)
self.assertEqual(self.validator.validate(task), None)
self.assertEqual(task.get("kind"), "datagator#Task")
self.assertEqual(task.get("status"), "SUC")
# download the baked matrix
uri = "repo/{0}/{1}/data/{2}".format(
self.repo, "Bakery", "US_Membership")
download = self.service.get(uri)
self.assertEqual(download.status_code, 200)
self.assertTrue("Content-Type" in download.headers)
self.assertEqual(download.headers['Content-Type'], "application/json")
self.assertTrue("Content-Disposition" in download.headers)
pass # void return
pass
class TestSearch(unittest.TestCase):
"""
Endpoint:
``^/search``
"""
@classmethod
def setUpClass(cls):
environ.DATAGATOR_API_VERSION = "v2"
cls.repo, cls.secret = get_credentials()
cls.service = DataGatorService(auth=(cls.repo, cls.secret))
cls.validator = jsonschema.Draft4Validator(cls.service.schema)
pass # void return
@classmethod
def tearDownClass(cls):
del cls.service
pass # void return
pass
@unittest.skipIf(
not os.environ.get('DATAGATOR_CREDENTIALS', None) and
os.environ.get('TRAVIS', False),
"credentials required for unsupervised testing")
class TestAccount(unittest.TestCase):
@classmethod
def setUpClass(cls):
environ.DATAGATOR_API_VERSION = "v2"
cls.repo, cls.secret = get_credentials()
cls.service = DataGatorService(auth=(cls.repo, cls.secret))
cls.validator = jsonschema.Draft4Validator(cls.service.schema)
pass # void return
@classmethod
def tearDownClass(cls):
del cls.service
pass # void return
# def test_Account_Clip_PUT(self):
# uri = "account/clip/"
# response = self.service.put(uri, "")
# self.assertEqual(response.status_code, 501)
# msg = response.json()
# _log.debug(msg.get("message"))
# self.assertEqual(msg.get("kind"), "datagator#Error")
# self.assertEqual(msg.get("code"), response.status_code)
pass
class TestHttpCrossOrigin(unittest.TestCase):
@classmethod
def setUpClass(cls):
environ.DATAGATOR_API_VERSION = "v2"
cls.service = DataGatorService()
cls.validator = jsonschema.Draft4Validator(cls.service.schema)
pass # void return
def test_HTTP_cors_headers(self):
r = self.service.get("/", headers={'Origin': "http://example.com"})
self.assertTrue("Access-Control-Allow-Credentials" in r.headers)
self.assertTrue("Access-Control-Allow-Origin" in r.headers)
self.assertTrue("Access-Control-Expose-Headers" in r.headers)
# CORS preflight
r = self.service.options("/", headers={
'Origin': "http://example.com",
'Access-Control-Request-Headers': "Accept, Authorization",
'Access-Control-Request-Methods': "GET"})
self.assertTrue("Access-Control-Allow-Methods" in r.headers)
self.assertTrue("Access-Control-Allow-Headers" in r.headers)
self.assertTrue("Access-Control-Allow-Credentials" in r.headers)
self.assertTrue("Access-Control-Allow-Origin" in r.headers)
self.assertTrue("Access-Control-Expose-Headers" in r.headers)
self.assertEqual(r.status_code, 200)
# CORS preflight (non-existent endpoint)
r = self.service.options("/NonExistentEndpoint", headers={
'Origin': "http://example.com",
'Access-Control-Request-Headers': "Accept, Authorization",
'Access-Control-Request-Methods': "GET"})
self.assertTrue("Access-Control-Allow-Methods" in r.headers)
self.assertTrue("Access-Control-Allow-Headers" in r.headers)
self.assertTrue("Access-Control-Allow-Credentials" in r.headers)
self.assertTrue("Access-Control-Allow-Origin" in r.headers)
self.assertTrue("Access-Control-Expose-Headers" in r.headers)
self.assertEqual(r.status_code, 404)
pass # void return
pass
@unittest.skipIf(
not os.environ.get('DATAGATOR_CREDENTIALS', None) and
os.environ.get('TRAVIS', False),
"credentials required for unsupervised testing")
class TestHttpRateLimit(unittest.TestCase):
"""
Test rate limiting headers
"""
@classmethod
def setUpClass(cls):
environ.DATAGATOR_API_VERSION = "v2"
cls.repo, cls.secret = get_credentials()
cls.service = DataGatorService(auth=(cls.repo, cls.secret))
cls.validator = jsonschema.Draft4Validator(cls.service.schema)
pass # void return
@classmethod
def tearDownClass(cls):
del cls.service
pass # void return
def test_HTTP_ratelimit_headers(self):
r = self.service.get("/")
self.assertTrue("X-RateLimit-Limit" in r.headers)
self.assertTrue("X-RateLimit-Remaining" in r.headers)
self.assertTrue("X-RateLimit-Reset" in r.headers)
total = int(r.headers["X-RateLimit-Limit"])
remain = int(r.headers["X-RateLimit-Remaining"])
reset = int(r.headers["X-RateLimit-Reset"])
self.assertEqual(total, 2000)
self.assertTrue(0 <= remain < total)
self.assertTrue(0 <= reset - time.time() < 3600)
pass
pass
def test_suite():
return unittest.TestSuite([
unittest.TestLoader().loadTestsFromTestCase(eval(c)) for c in __all__])
if __name__ == '__main__':
unittest.main(defaultTest=to_native("test_suite"))
| 35.563068
| 79
| 0.57217
| 3,685
| 35,243
| 5.379104
| 0.083311
| 0.101402
| 0.04722
| 0.047674
| 0.837403
| 0.812481
| 0.795429
| 0.772172
| 0.757895
| 0.751791
| 0
| 0.010428
| 0.29254
| 35,243
| 990
| 80
| 35.59899
| 0.784583
| 0.084811
| 0
| 0.685751
| 0
| 0
| 0.162801
| 0.025539
| 0
| 0
| 0
| 0
| 0.207379
| 1
| 0.061069
| false
| 0.072519
| 0.017812
| 0.001272
| 0.092875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
ab750c9df59e0ef44a51ba0e90ed23e1e052dc0c
| 172
|
py
|
Python
|
identity_converter/__init__.py
|
johnrkriter/aad-pod-identity-converter
|
8a99e42caf0d4e4e2a743e168fa6558f058bd83f
|
[
"MIT"
] | 2
|
2020-10-02T08:55:21.000Z
|
2020-10-21T04:58:04.000Z
|
identity_converter/__init__.py
|
johnrkriter/aad-pod-identity-converter
|
8a99e42caf0d4e4e2a743e168fa6558f058bd83f
|
[
"MIT"
] | 3
|
2020-11-04T09:52:25.000Z
|
2020-11-11T07:10:45.000Z
|
identity_converter/__init__.py
|
johnrkriter/aad-pod-identity-converter
|
8a99e42caf0d4e4e2a743e168fa6558f058bd83f
|
[
"MIT"
] | null | null | null |
from .converter import read_input_csv
from .converter import write_azure_identity
from .converter import write_azure_identity_binding
from .converter import full_conversion
| 43
| 51
| 0.889535
| 24
| 172
| 6.041667
| 0.5
| 0.358621
| 0.524138
| 0.331034
| 0.510345
| 0.510345
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087209
| 172
| 4
| 52
| 43
| 0.923567
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
abaa3b6f303703fe13728063c2e3f13bc5f8cce7
| 4,965
|
py
|
Python
|
tests/test_functions.py
|
choiking10/mytorch
|
67140b608b14e2ec6ecca1638705af91d2d71b6b
|
[
"MIT"
] | null | null | null |
tests/test_functions.py
|
choiking10/mytorch
|
67140b608b14e2ec6ecca1638705af91d2d71b6b
|
[
"MIT"
] | null | null | null |
tests/test_functions.py
|
choiking10/mytorch
|
67140b608b14e2ec6ecca1638705af91d2d71b6b
|
[
"MIT"
] | null | null | null |
import unittest
import numpy as np
import mytorch.functions as F
from mytorch import as_variable
from tests.utils import FunctionTestMixin, ForwardAndBackwardCheckMixin
class ExpTest(unittest.TestCase, FunctionTestMixin):
def get_function(self):
return F.exp
def get_forward_input_output(self):
x = np.array(2.0)
y = np.array(np.exp(x))
return x, y
def get_backward_input_output(self):
x = np.array(3.0)
grad = np.array(np.exp(x))
return x, grad
def test_numerical_check(self):
self.numerical_gradient_check(1)
class SinTest(unittest.TestCase, FunctionTestMixin):
def get_function(self):
return F.sin
def get_forward_input_output(self):
x = np.array(2.0)
y = np.sin(x)
return x, y
def get_backward_input_output(self):
x = np.array(3.0)
grad = np.cos(x)
return x, grad
def test_numerical_check(self):
self.numerical_gradient_check(1)
class CosTest(unittest.TestCase, FunctionTestMixin):
def get_function(self):
return F.cos
def get_forward_input_output(self):
x = np.array(2.0)
y = np.cos(x)
return x, y
def get_backward_input_output(self):
x = np.array(3.0)
grad = -np.sin(x)
return x, grad
def test_numerical_check(self):
self.numerical_gradient_check(1)
class TanhTest(unittest.TestCase, FunctionTestMixin):
def get_function(self):
return F.tanh
def get_forward_input_output(self):
x = np.array(2.0)
y = np.tanh(x)
return x, y
def get_backward_input_output(self):
x = np.array(3.0)
y = np.tanh(x)
grad = 1 - y * y
return x, grad
def test_numerical_check(self):
self.numerical_gradient_check(1)
class ReshapeTest(unittest.TestCase, FunctionTestMixin):
do_as_variable = False
def get_function(self):
return F.reshape
def get_forward_input_output(self):
x = as_variable(np.array([[1, 2, 3], [4, 5, 6]])), (6,)
y = np.array([1, 2, 3, 4, 5, 6])
return x, y
def get_backward_input_output(self):
x = as_variable(np.array([[1, 2, 3], [4, 5, 6]])), (6,)
grad = np.array([[1, 1, 1], [1, 1, 1]])
return x, grad
class TransposeTest(unittest.TestCase, FunctionTestMixin):
def get_function(self):
return F.transpose
def get_forward_input_output(self):
x = np.array([[1, 2, 3], [4, 5, 6]])
y = np.array([[1, 4], [2, 5], [3, 6]])
return x, y
def get_backward_input_output(self):
x = np.array([[1, 2, 3], [4, 5, 6]])
grad = np.array([[1, 1, 1], [1, 1, 1]])
return x, grad
class SumTest(unittest.TestCase, FunctionTestMixin):
do_as_variable = False
def get_function(self):
return F.sum
def get_forward_input_output(self):
x = as_variable(np.array([[1, 2, 3], [4, 5, 6]])), 0, True
y = np.array([[5, 7, 9]])
return x, y
def get_backward_input_output(self):
x = as_variable(np.array([[1, 2, 3], [4, 5, 6]])), 0, True
grad = np.array([[1, 1, 1], [1, 1, 1]])
return x, grad
def test_numerical_check(self):
self.numerical_gradient_check((4, 10))
class MatmulTest(unittest.TestCase, FunctionTestMixin):
def get_function(self):
return F.matmul
def get_forward_input_output(self):
x = np.array([[1, 2, 3], [4, 5, 6]])
W = np.array([[1, 4], [2, 5], [3, 6]])
res = np.array([[14, 32], [32, 77]])
return (x, W), res
def get_backward_input_output(self):
x = np.array([[1, 2, 3], [4, 5, 6]])
W = np.array([[1, 4], [2, 5], [3, 6]])
grad_x = np.array([[5, 7, 9], [5, 7, 9]])
grad_W = np.array([[5, 5], [7, 7], [9, 9]])
return (x, W), (grad_x, grad_W)
def test_numerical_check(self):
self.numerical_gradient_check((3, 4), (4, 2))
class MeanSquaredErrorTest(unittest.TestCase, ForwardAndBackwardCheckMixin):
def get_function(self):
return F.mean_squared_error
def test_numerical_check(self):
self.numerical_gradient_check((100, 1), (100, 1))
class LinearTest(unittest.TestCase, ForwardAndBackwardCheckMixin):
def get_function(self):
return F.linear
def test_numerical_check(self):
self.numerical_gradient_check((3, 4), (4, 2), (3, 2))
def test_numerical_check_none(self):
self.numerical_gradient_check((3, 4), (4, 2))
class SigmoidTest(unittest.TestCase, ForwardAndBackwardCheckMixin):
def get_function(self):
return F.sigmoid
def test_numerical_check(self):
self.numerical_gradient_check((3, 3))
class SoftmaxTest(unittest.TestCase, ForwardAndBackwardCheckMixin):
def get_function(self):
return F.softmax
def test_numerical_check(self):
self.numerical_gradient_check((1, 10))
| 25.994764
| 76
| 0.607654
| 715
| 4,965
| 4.051748
| 0.11049
| 0.070072
| 0.082844
| 0.088367
| 0.832931
| 0.817052
| 0.817052
| 0.810148
| 0.801864
| 0.568864
| 0
| 0.045479
| 0.255992
| 4,965
| 190
| 77
| 26.131579
| 0.738766
| 0
| 0
| 0.62406
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.293233
| false
| 0
| 0.037594
| 0.090226
| 0.646617
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
abaf2abb8bd6a399991bc83be2b389c123ac5be7
| 122
|
py
|
Python
|
dqc/api/__init__.py
|
Jaikinator/dqc
|
47c964c7d1323a35f4f69521d40476c41843810e
|
[
"Apache-2.0"
] | 39
|
2021-05-31T17:01:23.000Z
|
2022-03-23T19:20:35.000Z
|
dqc/api/__init__.py
|
Jaikinator/dqc
|
47c964c7d1323a35f4f69521d40476c41843810e
|
[
"Apache-2.0"
] | 14
|
2021-09-01T13:39:11.000Z
|
2022-03-13T16:45:39.000Z
|
dqc/api/__init__.py
|
Jaikinator/dqc
|
47c964c7d1323a35f4f69521d40476c41843810e
|
[
"Apache-2.0"
] | 6
|
2021-07-16T09:08:29.000Z
|
2022-02-24T01:13:54.000Z
|
from dqc.api.loadbasis import *
from dqc.api.getxc import *
from dqc.api.properties import *
from dqc.api.parser import *
| 24.4
| 32
| 0.770492
| 20
| 122
| 4.7
| 0.4
| 0.297872
| 0.425532
| 0.510638
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131148
| 122
| 4
| 33
| 30.5
| 0.886792
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e6289f00f51da434ccff305414f458f7ea51cfe4
| 13,987
|
py
|
Python
|
pyDEA/core/models/envelopment_model.py
|
rbotter/pyDEA
|
2c8b4a70e8c071d580eff26a040efc22fc264045
|
[
"MIT"
] | 29
|
2017-10-22T03:03:20.000Z
|
2022-03-21T09:15:22.000Z
|
pyDEA/core/models/envelopment_model.py
|
rbotter/pyDEA
|
2c8b4a70e8c071d580eff26a040efc22fc264045
|
[
"MIT"
] | 6
|
2018-07-18T01:40:43.000Z
|
2021-04-11T00:38:30.000Z
|
pyDEA/core/models/envelopment_model.py
|
rbotter/pyDEA
|
2c8b4a70e8c071d580eff26a040efc22fc264045
|
[
"MIT"
] | 20
|
2018-01-23T05:50:29.000Z
|
2022-02-22T05:04:56.000Z
|
''' This module contains concrete implementations of input- and
output-oriented envelopment models as well as the model with
non-discretionary variables.
'''
import pulp
from pyDEA.core.models.input_output_model_bases import InputOrientedModel
from pyDEA.core.models.input_output_model_bases import OutputOrientedModel
class EnvelopmentModelInputOriented(InputOrientedModel):
''' This class defines methods specific to input-oriented envelopment
model.
Attributes:
upper_bound_generator (function): function that
generates an upper bound on efficiency scores for
envelopment model, see :mod:`refactored.bound_generators`
Args:
upper_bound_generator (function): function that
generates an upper bound on efficiency scores for
envelopment
model, see :mod:`refactored.bound_generators`.
'''
def __init__(self, upper_bound_generator):
self.upper_bound_generator = upper_bound_generator
def get_upper_bound_for_objective_variable(self):
''' Returns a proper upper bound on efficiency score which
is minimized in the case of input-oriented envelopment model.
Returns:
double: upper bound on efficiency spyDEA.core.
'''
return self.upper_bound_generator()
def get_lower_bound_for_objective_variable(self):
''' Returns 0 which is the lower bound on efficiency score which
is minimized in the case of input-oriented envelopment model.
Returns:
double: zero.
'''
return 0
def get_objective_type(self):
''' Returns pulp.LpMinimize - we minimize objective function in case
of input-oriented envelopment model.
Returns:
pulp.LpMaximize: type of objective function.
'''
return pulp.LpMinimize
def get_output_variable_coefficient(self, obj_variable, output_category):
''' Returns 1, since in input-oriented model we do not multiply
current output by anything.
Args:
obj_variable (pulp.LpVariable): pulp variable that corresponds
to output category of the current DMU.
output_category (str): output category for which current
constraint is being created.
Returns:
double: output variable coefficient.
'''
return 1
def get_input_variable_coefficient(self, obj_variable, input_category):
''' Returns obj_variable, since in input-oriented model we multiply
current input by efficiency spyDEA.core.
Args:
obj_variable (pulp.LpVariable): pulp variable that corresponds
to input category of current DMU.
input_category (str): input category for which current
constraint is being created.
Returns:
pulp.LpVariable: input variable coefficient.
'''
return obj_variable
def update_output_category_coefficient(self, current_output, constraint,
obj_var, output_category):
''' Updates coefficient of a given output category with a new
value.
Args:
current_output (double): new value for the coefficient.
constraint (pulp.LpConstraint): constraint whose coefficient
should be updated.
obj_var (pulp.LpVariable): variable of the envelopment
model that is optimised in the objective function.
output_category (str): output category name.
'''
constraint.changeRHS(current_output)
def update_input_category_coefficient(self, current_input, constraint,
obj_var, input_category):
''' Updates coefficient of a given input category with a new
value.
Args:
current_output (double): new value for the coefficient.
constraint (pulp.LpConstraint): constraint whose coefficient
should be updated.
obj_var (pulp.LpVariable): variable of the envelopment
model that is optimised in the objective function.
output_category (str): input category name.
'''
constraint[obj_var] = current_input
class EnvelopmentModelOutputOriented(OutputOrientedModel):
''' This class defines methods specific
to output-oriented envelopment model.
Attributes:
lower_bound_generator (function): function that
generates a lower bound on inverse of efficiency scores
for envelopment
model, see :mod:`refactored.bound_generators`
Args:
lower_bound_generator (function): function that
generates a lower bound on inverse of efficiency scores for
envelopment
model, see :mod:`refactored.bound_generators`
'''
def __init__(self, lower_bound_generator):
self.lower_bound_generator = lower_bound_generator
def get_upper_bound_for_objective_variable(self):
''' Returns None, since variables of output-oriented envelopment
model are not bounded.
Returns:
double: None.
'''
return None
def get_lower_bound_for_objective_variable(self):
''' Returns a proper lower bound on the variables corresponding
to output-oriented envelopment model.
Returns:
double: lower bound on variables.
'''
return self.lower_bound_generator()
def get_objective_type(self):
''' Returns pulp.LpMinimize - we maximize objective function in case
of output-oriented envelopment model.
Returns:
pulp.LpMaximize: objective function type.
'''
return pulp.LpMaximize
def get_output_variable_coefficient(self, obj_variable, output_category):
''' Returns obj_variable, since in output-oriented model we multiply
current output by inverse efficiency spyDEA.core.
Args:
obj_variable (pulp.LpVariable): pulp variable that corresponds
to output category of current DMU.
output_category (str): output category for which current
constraint is being created.
Returns:
pulp.LpVariable: output variable coefficient.
'''
return obj_variable
def get_input_variable_coefficient(self, obj_variable, input_category):
''' Returns 1, since in output-oriented model we do not multiply
current input by anything.
Args:
obj_variable (pulp.LpVariable): pulp variable that corresponds
to input category of current DMU.
input_category (str): input category for which current
constraint is being created.
Returns:
double: input variable coefficient.
'''
return 1
def update_output_category_coefficient(self, current_output, constraint,
obj_var, output_category):
''' Updates coefficient of a given output category with a new
value.
Args:
current_output (double): new value for the coefficient.
constraint (pulp.LpConstraint): constraint whose coefficient
should be updated.
obj_var (pulp.LpVariable): variable of the envelopment
model that is optimised in the objective function.
output_category (str): output category name.
'''
constraint[obj_var] = -current_output
def update_input_category_coefficient(self, current_input, constraint,
obj_var, input_category):
''' Updates coefficient of a given input category with a new
value.
Args:
current_output (double): new value for the coefficient.
constraint (pulp.LpConstraint): constraint whose coefficient
should be updated.
obj_var (pulp.LpVariable): variable of the envelopment
model that is optimised in the objective function.
output_category (str): input category name.
'''
constraint.changeRHS(-current_input)
class EnvelopmentModelInputOrientedWithNonDiscVars(
EnvelopmentModelInputOriented):
''' This class redefines some methods of EnvelopmentModelInputOriented
in order to take into account non-discretionary variables.
Note:
This class does not have a reference
to InputData object. Hence, it cannot check if supplied
non-discretionary categories are valid input categories.
Attributes:
non_disc_inputs (list of str): list of non-discretionary input
categories.
Args:
non_disc_inputs (list of str): list of non-discretionary input
categories.
upper_bound_generator (function): function that
generates an upper bound on efficiency scores for envelopment
model, see :mod:`refactored.bound_generators`
'''
def __init__(self, non_disc_inputs, upper_bound_generator):
super(EnvelopmentModelInputOrientedWithNonDiscVars, self).__init__(
upper_bound_generator)
assert(non_disc_inputs)
self.non_disc_inputs = non_disc_inputs
def get_input_variable_coefficient(self, obj_variable, input_category):
''' Returns proper coefficient depending on the fact if variable
is discretionary or not.
Args:
obj_variable (pulp.LpVariable): pulp variable that corresponds
to input category of current DMU.
input_category (str): input category for which current
constraint is being created.
Returns:
double or pulp.LpVariable: input variable coefficient.
'''
if input_category in self.non_disc_inputs:
return 1
return obj_variable
def update_input_category_coefficient(self, current_input, constraint,
obj_var, input_category):
''' Updates coefficient of a given input category with a new
value.
Args:
current_output (double): new value for the coefficient.
constraint (pulp.LpConstraint): constraint whose coefficient
should be updated.
obj_var (pulp.LpVariable): variable of the envelopment
model that is optimised in the objective function.
output_category (str): input category name.
'''
if input_category in self.non_disc_inputs:
constraint.changeRHS(-current_input)
else:
constraint[obj_var] = current_input
class EnvelopmentModelOutputOrientedWithNonDiscVars(
EnvelopmentModelOutputOriented):
''' This class redefines some methods of EnvelopmentModelOutputOriented
in order to take into account non-discretionary variables.
Note:
This class does not have a reference
to InputData object. Hence, it cannot check if supplied
non-discretionary categories are valid output categories.
Attributes:
non_disc_outputs (list of str): list of non-discretionary output
categories.
Args:
non_disc_outputs (list of str): list of non-discretionary output
categories.
lower_bound_generator (function): objects that
generates a lower bound on inverse of efficiency scores for
envelopment model, see :mod:`refactored.bound_generators`.
'''
def __init__(self, non_disc_outputs, lower_bound_generator):
assert(non_disc_outputs)
super(EnvelopmentModelOutputOrientedWithNonDiscVars, self).__init__(
lower_bound_generator)
self.non_disc_outputs = non_disc_outputs
def get_output_variable_coefficient(self, obj_variable, output_category):
''' Returns a proper coefficient depending on the fact if the variable
is discretionary or not.
Args:
obj_variable (pulp.LpVariable): pulp variable that corresponds
to output category of current DMU.
input_category (str): output category for which current
constraint is being created.
Returns:
double or pulp.LpVariable: output variable coefficient.
'''
if output_category in self.non_disc_outputs:
return 1
return obj_variable
def update_output_category_coefficient(self, current_output, constraint,
obj_var, output_category):
''' Updates coefficient of a given output category with a new
value.
Args:
current_output (double): new value for the coefficient.
constraint (pulp.LpConstraint): constraint whose coefficient
should be updated.
obj_var (pulp.LpVariable): variable of the envelopment
model that is optimised in the objective function.
output_category (str): output category name.
'''
if output_category in self.non_disc_outputs:
constraint.changeRHS(current_output)
else:
constraint[obj_var] = -current_output
| 40.077364
| 78
| 0.621792
| 1,438
| 13,987
| 5.876912
| 0.103616
| 0.051355
| 0.018933
| 0.021299
| 0.836351
| 0.781328
| 0.733286
| 0.707017
| 0.688794
| 0.655899
| 0
| 0.000852
| 0.328448
| 13,987
| 348
| 79
| 40.192529
| 0.898861
| 0.564381
| 0
| 0.613333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026667
| 1
| 0.293333
| false
| 0
| 0.04
| 0
| 0.573333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
e6439c865b23669003da1a00c2c6b9b6ce71a605
| 56,644
|
py
|
Python
|
analysis/thesis.py
|
Harshavardhan-BV/Cancer-compe-strat
|
e4decacd5779e85a68c81d0ce3bedf42dea2964f
|
[
"MIT"
] | 1
|
2020-10-18T15:54:26.000Z
|
2020-10-18T15:54:26.000Z
|
analysis/thesis.py
|
Harshavardhan-BV/Cancer-compe-strat
|
e4decacd5779e85a68c81d0ce3bedf42dea2964f
|
[
"MIT"
] | null | null | null |
analysis/thesis.py
|
Harshavardhan-BV/Cancer-compe-strat
|
e4decacd5779e85a68c81d0ce3bedf42dea2964f
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
import seaborn as sns
import common_fn as cf
plt.rcParams["svg.hashsalt"]=0
plt.rcParams["font.size"]=11
# f_i(res)
fig,axes=plt.subplots(figsize=(5,3))
x=np.linspace(0,1,1000)
lim=np.array([0.2,0.8])
def f(x,lim):
if x<lim[0]:
return 0
elif x<lim[1]:
return (x-lim[0])/(lim[1]-lim[0])
else:
return 1
y=[f(i,lim) for i in x]
axes.plot(x,y)
axes.set_xticks(lim)
axes.set_xticklabels(['ll','ul'])
axes.set_ylabel('$f_{i}(res)$')
axes.set_xlabel('res')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/f_res.pdf')
fig.clf()
plt.close(fig)
# Tp - T-
## testosterone limits
fig,axes=plt.subplots(2,2,sharex=True,sharey=True,figsize=(10,5))
props=['','0.9Tp-']
rows=['0.5','0.9']
u_lims=['0.5','0.1']
cols=['testosterone limited','not testosterone limited']
path='../raw_output/EnvEq/pairwise/Tneg-Tpro/u_lim_testTpro/'
for i in range(2):
prop=props[i]
for j in range(2):
u_lim=u_lims[j]
df=pd.read_csv(path+prop+u_lim+'.csv')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
axes[0,-1].legend(bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/Tpro-Tneg_testlims.pdf')
fig.clf()
plt.close(fig)
## oxygen limits
fig,axes=plt.subplots(1,2,sharex=True,sharey=True,figsize=(10,3))
p_o2_u_lim=['1.10e-01-5.00e-01','6.75e-02-5.00e-01']
cols=['normal production','poor production']
path='../raw_output/EnvEq/pairwise/Tneg-Tpro/p_o2-u_lim_testTpro/Tneg-o2limited_'
for i in range(2):
po2=p_o2_u_lim[i]
df=pd.read_csv(path+po2+'.csv')
axes[i].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i].set_xlabel('Time (days)')
pad = 5 # in points
for ax, col in zip(axes, cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
axes[0].set_ylabel('No of Cells')
axes[1].legend(bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/Tpro-Tneg_o2lims.pdf')
fig.clf()
plt.close(fig)
## Cases
plt.rcParams["font.size"]=16
### normal o2 production
cases=np.array([[1,4,3],[10,11,12],[6,2,8]])
path='../analysed_data/EnvEq/pairwise/Tneg-Tpro/Case-Tp_initratio-Totcell/'
cols=['no','moderate','severe']
rows=['low','high','severe']
fig,axes=plt.subplots(3,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(3):
for j in range(3):
df=pd.read_csv(path+'Case{:.1f}-eq_values.csv'.format(cases[i,j]))
cf.allcell_eq_ratio(df,-0.1)
df['Tpro_0']=df['Tp_initratio']*df['Totcell']
sns.lineplot(data=df,x='Tpro_0',y='Tpro_ratio',color='tab:blue',style='Totcell',markers=True,ax=axes[i,j],legend=(i==0 and j==2))
axes[i,j].set_ylim(-0.2,1.1)
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[2], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Initial Tp seeding')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('Final Tp ratio')
axes[0,-1].legend(bbox_to_anchor=(1,1), loc="upper left",title='Initial Total Seeding')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/Tpro-Tneg_cases_normal.pdf')
fig.clf()
plt.close(fig)
### low o2 production
cases=np.array([[13,14,15],[7,5,9],[16,17,18]])
path='../analysed_data/EnvEq/pairwise/Tneg-Tpro/Case-Tp_initratio-Totcell/'
cols=['no','moderate','severe']
rows=['low','high','severe']
fig,axes=plt.subplots(3,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(3):
for j in range(3):
df=pd.read_csv(path+'Case{:.1f}-eq_values.csv'.format(cases[i,j]))
cf.allcell_eq_ratio(df,-0.1)
df['Tpro_0']=df['Tp_initratio']*df['Totcell']
sns.lineplot(data=df,x='Tpro_0',y='Tpro_ratio',color='tab:blue',style='Totcell',markers=True,ax=axes[i,j],legend=(i==0 and j==2))
axes[i,j].set_ylim(-0.2,1.1)
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[2], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Initial Tp seeding')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('Final Tp ratio')
axes[0,-1].legend(bbox_to_anchor=(1,1), loc="upper left",title='Initial Total Seeding')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/Tpro-Tneg_cases_poor.pdf')
fig.clf()
plt.close(fig)
# T+ - Tp
plt.rcParams["font.size"]=11
## When Tp test limited and T+ test limited
fig,axes=plt.subplots(1,2,sharey=True,figsize=(10,3))
path='../raw_output/EnvEq/pairwise/Tpos-Tpro/u_lim_testTpos-u_lim_testTpro/'
u_lims=['0.3-0.5','0.5-0.3']
cols=['Tp more testosterone limited','T+ more testosterone limited']
for i in range(2):
u_lim=u_lims[i]
df=pd.read_csv(path+u_lim+'.csv')
axes[i].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i].set_xlabel('Time (days)')
axes[0].set_ylabel('No of Cells')
for ax, col in zip(axes, cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
axes[1].legend(bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/Tpos-Tpro_testlims.pdf')
fig.clf()
plt.close(fig)
## When both not o2 limited and T+ more o2 limited
fig,axes=plt.subplots(1,2,sharey=True,figsize=(10,3))
path='../raw_output/EnvEq/pairwise/Tpos-Tpro/l_lim_o2Tpos-l_lim_o2Tpro/'
l_lims=['0.0-0.0','0.6-0.4']
cols=['both not oxygen limited','only T+ severly oxygen limited']
for i in range(2):
l_lim=l_lims[i]
df=pd.read_csv(path+l_lim+'.csv')
axes[i].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i].set_xlabel('Time (days)')
axes[0].set_ylabel('No of Cells')
for ax, col in zip(axes, cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
axes[1].legend(bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/Tpos-Tpro_o2lims.pdf')
fig.clf()
plt.close(fig)
## Cases
plt.rcParams["font.size"]=16
### test limits
cases=np.array([[7,1,8],[2,9,10],[11,12,13]])
path='../analysed_data/EnvEq/pairwise/Tpos-Tpro/Case-Tp_initratio-Totcell/'
cols=['no','moderate','severe']
rows=['no','moderate','severe']
fig,axes=plt.subplots(3,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(3):
for j in range(3):
df=pd.read_csv(path+'Case{:.1f}-eq_values.csv'.format(cases[i,j]))
cf.allcell_eq_ratio(df,-0.1)
df['Tpro_0']=df['Tp_initratio']*df['Totcell']
sns.lineplot(data=df,x='Tpro_0',y='Tpro_ratio',color='tab:blue',style='Totcell',markers=True,ax=axes[i,j],legend=(i==0 and j==2))
axes[i,j].set_ylim(-0.2,1.1)
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[2], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Initial Tp seeding')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('Final Tp ratio')
axes[0,-1].legend(bbox_to_anchor=(1,1), loc="upper left",title='Initial Total Seeding')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/Tpos-Tpro_cases_test.pdf')
fig.clf()
plt.close(fig)
### oxygen limits
cases=np.array([[7,14,15],[16,17,18],[19,20,21]])
path='../analysed_data/EnvEq/pairwise/Tpos-Tpro/Case-Tp_initratio-Totcell/'
cols=['low','moderate','severe']
rows=['low','moderate','severe']
fig,axes=plt.subplots(3,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(3):
for j in range(3):
df=pd.read_csv(path+'Case{:.1f}-eq_values.csv'.format(cases[i,j]))
cf.allcell_eq_ratio(df,-0.1)
df['Tpro_0']=df['Tp_initratio']*df['Totcell']
sns.lineplot(data=df,x='Tpro_0',y='Tpro_ratio',color='tab:blue',style='Totcell',markers=True,ax=axes[i,j],legend=(i==0 and j==2))
axes[i,j].set_ylim(-0.2,1.1)
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[2], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Initial Tp seeding')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('Final Tp ratio')
axes[0,-1].legend(bbox_to_anchor=(1,1), loc="upper left",title='Initial Total Seeding')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/Tpos-Tpro_cases_o2.pdf')
fig.clf()
plt.close(fig)
### combination
plt.rcParams["font.size"]=19
cases=np.array([[7,1,2,9],[14,22,23,24],[16,25,26,27],[17,28,29,30]])
path='../analysed_data/EnvEq/pairwise/Tpos-Tpro/Case-Tp_initratio-Totcell/'
cols=['no,no','no,moderate','moderate,no','moderate,moderate']
rows=['low,low','low,moderate','moderate,low','moderate,moderate']
fig,axes=plt.subplots(4,4,sharex=True,sharey=True,figsize=(20,11))
for i in range(4):
for j in range(4):
df=pd.read_csv(path+'Case{:.1f}-eq_values.csv'.format(cases[i,j]))
cf.allcell_eq_ratio(df,-0.1)
df['Tpro_0']=df['Tp_initratio']*df['Totcell']
sns.lineplot(data=df,x='Tpro_0',y='Tpro_ratio',color='tab:blue',style='Totcell',markers=True,ax=axes[i,j],legend=(i==0 and j==3))
axes[i,j].set_ylim(-0.2,1.1)
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[3], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Initial Tp seeding')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('Final Tp ratio')
axes[0,-1].legend(bbox_to_anchor=(1,1), loc="upper left",title='Initial Total Seeding')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/Tpos-Tpro_cases.pdf')
fig.clf()
plt.close(fig)
# All3
## Efficiency
plt.rcParams["font.size"]=16
### proportions 1:1:1 Tp:T+:T-
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../analysed_data/EnvEq/All3/efficiency/'
df1=pd.read_csv(path+'Case-eq_values.csv')
cf.allcell_eq_ratio(df1,-0.1)
df1['TotCell']=df1['TotCell'].astype(str)
colors=['tab:green','tab:blue','tab:red']
labels=['T+','Tp','T-']
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=df1[(df1['Test_Eff']==testeff) & (df1['O2_Eff']==o2eff)]
df.plot.bar(x='TotCell',y=['Tpos_ratio','Tpro_ratio','Tneg_ratio'],color=colors,stacked=True,ax=axes[i,j],legend=None)
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Initial Total seeding')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('Final ratio')
axes[0,-1].legend(labels,bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_efficiency_1:1:1.pdf')
fig.clf()
plt.close(fig)
### proportions 8:1:1 Tp:T+:T-
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../analysed_data/EnvEq/All3/efficiency/'
df1=pd.read_csv(path+'0.8Tp-Case-eq_values.csv')
cf.allcell_eq_ratio(df1,-0.1)
colors=['tab:green','tab:blue','tab:red']
labels=['T+','Tp','T-']
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=df1[(df1['Test_Eff']==testeff) & (df1['O2_Eff']==o2eff)]
df.plot.bar(x='TotCell',y=['Tpos_ratio','Tpro_ratio','Tneg_ratio'],color=colors,stacked=True,ax=axes[i,j],legend=None)
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Initial Total seeding')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('Final ratio')
axes[0,-1].legend(labels,bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_efficiency_8:1:1.pdf')
fig.clf()
plt.close(fig)
### timeseries, proportions 1:1:1 Tp:T+:T-, totalcell=1000
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../raw_output/EnvEq/All3/efficiency/'
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=pd.read_csv(path+'Case-'+o2eff+'-'+testeff+'-1000.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
axes[0,-1].legend(bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_efficiency_1:1:1-1000.pdf')
fig.clf()
plt.close(fig)
### timeseries, proportions 1:1:1 Tp:T+:T-, totalcell=2000
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../raw_output/EnvEq/All3/efficiency/'
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=pd.read_csv(path+'Case-'+o2eff+'-'+testeff+'-2000.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
axes[0,-1].legend(bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_efficiency_1:1:1-2000.pdf')
fig.clf()
plt.close(fig)
### timeseries, proportions 1:1:1 Tp:T+:T-, totalcell=4000
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../raw_output/EnvEq/All3/efficiency/'
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=pd.read_csv(path+'Case-'+o2eff+'-'+testeff+'-4000.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
axes[0,-1].legend(bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_efficiency_1:1:1-4000.pdf')
fig.clf()
plt.close(fig)
### timeseries, proportions 8:1:1 Tp:T+:T-, totalcell=1000
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../raw_output/EnvEq/All3/efficiency/'
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=pd.read_csv(path+'0.8Tp-Case-'+o2eff+'-'+testeff+'-1000.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
axes[0,-1].legend(bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_efficiency_8:1:1-1000.pdf')
fig.clf()
plt.close(fig)
### timeseries, proportions 8:1:1 Tp:T+:T-, totalcell=2000
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../raw_output/EnvEq/All3/efficiency/'
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=pd.read_csv(path+'0.8Tp-Case-'+o2eff+'-'+testeff+'-2000.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
axes[0,-1].legend(bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_efficiency_8:1:1-2000.pdf')
fig.clf()
plt.close(fig)
### timeseries, proportions 8:1:1 Tp:T+:T-, totalcell=4000
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../raw_output/EnvEq/All3/efficiency/'
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=pd.read_csv(path+'0.8Tp-Case-'+o2eff+'-'+testeff+'-4000.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
axes[0,-1].legend(bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_efficiency_8:1:1-4000.pdf')
fig.clf()
plt.close(fig)
## Efficiency-mixed
### proportions 1:1:1 Tp:T+:T-
Tpos_effs=['Null','Null','LE']
Tpro_effs=['Null','LE','Null']
rows=['low,low','low,moderate','moderate,low']
Tneg_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../analysed_data/EnvEq/All3/efficiency-mixed/'
df1=pd.read_csv(path+'Case-eq_values.csv')
cf.allcell_eq_ratio(df1,-0.1)
colors=['tab:green','tab:blue','tab:red']
labels=['T+','Tp','T-']
fig,axes=plt.subplots(3,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(3):
Tposo2eff='Tpos_o2_'+Tpos_effs[i]
Tproo2eff='Tpro_o2_'+Tpro_effs[i]
for j in range(3):
Tnego2eff='Tneg_o2_'+Tneg_effs[j]
df=df1[(df1['Tpos_o2_Eff']==Tposo2eff) & (df1['Tpro_o2_Eff']==Tproo2eff) & (df1['Tneg_o2_Eff']==Tnego2eff)]
df.plot.bar(x='TotCell',y=['Tpos_ratio','Tpro_ratio','Tneg_ratio'],color=colors,stacked=True,ax=axes[i,j],legend=None)
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[-1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Initial Total seeding')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('Final ratio')
axes[0,-1].legend(labels,bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_efficiency-mixed_1:1:1.pdf')
fig.clf()
plt.close(fig)
### proportions 8:1:1 Tp:T+:T-
Tpos_effs=['Null','Null','LE']
Tpro_effs=['Null','LE','Null']
rows=['low,low','low,moderate','moderate,low']
Tneg_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../analysed_data/EnvEq/All3/efficiency-mixed/'
df1=pd.read_csv(path+'0.8Tp-Case-eq_values.csv')
cf.allcell_eq_ratio(df1,-0.1)
colors=['tab:green','tab:blue','tab:red']
labels=['T+','Tp','T-']
fig,axes=plt.subplots(3,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(3):
Tposo2eff='Tpos_o2_'+Tpos_effs[i]
Tproo2eff='Tpro_o2_'+Tpro_effs[i]
for j in range(3):
Tnego2eff='Tneg_o2_'+Tneg_effs[j]
df=df1[(df1['Tpos_o2_Eff']==Tposo2eff) & (df1['Tpro_o2_Eff']==Tproo2eff) & (df1['Tneg_o2_Eff']==Tnego2eff)]
df.plot.bar(x='TotCell',y=['Tpos_ratio','Tpro_ratio','Tneg_ratio'],color=colors,stacked=True,ax=axes[i,j],legend=None)
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[-1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Initial Total seeding')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('Final ratio')
axes[0,-1].legend(labels,bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_efficiency-mixed_8:1:1.pdf')
fig.clf()
plt.close(fig)
## Therapy-standardization
plt.rcParams["font.size"]=19
### Only Tp and T+ considered for threshold
#### 2000 window
props=['','0.8Tp-']
rows=['1:1:1','8:1:1']
limits=['2000-1000','4000-2000','6000-4000','8000-6000']
cols=['2000-1000','4000-2000','6000-4000','8000-6000']
path='../raw_output/EnvEq/All3/therapy-abi-threshold/'
fig,axes=plt.subplots(2,4,sharex=True,sharey=True,figsize=(20,8))
for i in range(2):
prop=props[i]
for j in range(4):
limit=limits[j]
df=pd.read_csv(path+prop+'AT_nn-'+limit+'.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
df['abi_itherapy']=np.where((df['abi_therapy'])|(df['abi_therapy'].shift(1)),-200,np.nan)
axes[i,j].plot(df.t/24/60,df.abi_itherapy,color="tab:purple",linewidth=5,label='ABI')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
handles, labels = ax.get_legend_handles_labels()
axes[0,-1].legend(handles=handles[0:4], bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
axes[1,-1].legend(handles=handles[4:], bbox_to_anchor=(1,1), loc="upper left",title='Therapy')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_therapy-standardization.pdf')
fig.clf()
plt.close(fig)
#### 500 window
props=['','0.8Tp-']
rows=['1:1:1','8:1:1']
limits=['2000-1500','4000-3500','6000-5500','8000-7500']
cols=['2000-1500','4000-3500','6000-5500','8000-7500']
path='../raw_output/EnvEq/All3/therapy-abi-threshold/'
fig,axes=plt.subplots(2,4,sharex=True,sharey=True,figsize=(20,8))
for i in range(2):
prop=props[i]
for j in range(4):
limit=limits[j]
df=pd.read_csv(path+prop+'AT_nn-'+limit+'.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
df['abi_itherapy']=np.where((df['abi_therapy'])|(df['abi_therapy'].shift(1)),-200,np.nan)
axes[i,j].plot(df.t/24/60,df.abi_itherapy,color="tab:purple",linewidth=5,label='ABI')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
handles, labels = ax.get_legend_handles_labels()
axes[0,-1].legend(handles=handles[0:4], bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
axes[1,-1].legend(handles=handles[4:], bbox_to_anchor=(1,1), loc="upper left",title='Therapy')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_therapy-standardization-sw.pdf')
fig.clf()
plt.close(fig)
### All 3 celltypes considered for threshold
#### 2000 window
props=['','0.8Tp-']
rows=['1:1:1','8:1:1']
limits=['2000-1000','4000-2000','6000-4000','8000-6000']
cols=['2000-1000','4000-2000','6000-4000','8000-6000']
path='../raw_output/EnvEq/All3/therapy-abi-threshold/'
fig,axes=plt.subplots(2,4,sharex=True,sharey=True,figsize=(20,8))
for i in range(2):
prop=props[i]
for j in range(4):
limit=limits[j]
df=pd.read_csv(path+prop+'AT-'+limit+'.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
df['abi_itherapy']=np.where((df['abi_therapy'])|(df['abi_therapy'].shift(1)),-200,np.nan)
axes[i,j].plot(df.t/24/60,df.abi_itherapy,color="tab:purple",linewidth=5,label='ABI')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
handles, labels = ax.get_legend_handles_labels()
axes[0,-1].legend(handles=handles[0:4], bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
axes[1,-1].legend(handles=handles[4:], bbox_to_anchor=(1,1), loc="upper left",title='Therapy')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_therapy-standardization-total.pdf')
fig.clf()
plt.close(fig)
#### 500 window
props=['','0.8Tp-']
rows=['1:1:1','8:1:1']
limits=['2000-1500','4000-3500','6000-5500','8000-7500']
cols=['2000-1500','4000-3500','6000-5500','8000-7500']
path='../raw_output/EnvEq/All3/therapy-abi-threshold/'
fig,axes=plt.subplots(2,4,sharex=True,sharey=True,figsize=(20,8))
for i in range(2):
prop=props[i]
for j in range(4):
limit=limits[j]
df=pd.read_csv(path+prop+'AT-'+limit+'.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
df['abi_itherapy']=np.where((df['abi_therapy'])|(df['abi_therapy'].shift(1)),-200,np.nan)
axes[i,j].plot(df.t/24/60,df.abi_itherapy,color="tab:purple",linewidth=5,label='ABI')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
handles, labels = ax.get_legend_handles_labels()
axes[0,-1].legend(handles=handles[0:4], bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
axes[1,-1].legend(handles=handles[4:], bbox_to_anchor=(1,1), loc="upper left",title='Therapy')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_therapy-standardization-total-sw.pdf')
fig.clf()
plt.close(fig)
## Therapy-SOC
plt.rcParams["font.size"]=16
### proportions 1:1:1 Tp:T+:T-
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../analysed_data/EnvEq/All3/therapy-abi-SOC/'
colors=['tab:green','tab:blue','tab:red']
labels=['T+','Tp','T-']
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=pd.read_csv(path+o2eff+'-'+testeff+'/eq_values.csv')
cf.allcell_eq_ratio(df,-0.1)
df.plot.bar(x='Totcell',y=['Tpos_ratio','Tpro_ratio','Tneg_ratio'],color=colors,stacked=True,ax=axes[i,j],legend=None)
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Initial Total seeding')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('Final ratio')
axes[0,-1].legend(labels,bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_therapy-SOC_1:1:1.pdf')
fig.clf()
plt.close(fig)
### proportions 8:1:1 Tp:T+:T- - Total; 2000
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../analysed_data/EnvEq/All3/therapy-abi-SOC/'
colors=['tab:green','tab:blue','tab:red']
labels=['T+','Tp','T-']
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=pd.read_csv(path+o2eff+'-'+testeff+'/0.8Tp-eq_values.csv')
cf.allcell_eq_ratio(df,-0.1)
df.plot.bar(x='Totcell',y=['Tpos_ratio','Tpro_ratio','Tneg_ratio'],color=colors,stacked=True,ax=axes[i,j],legend=None)
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Initial Total seeding')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('Final ratio')
axes[0,-1].legend(labels,bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_therapy-SOC_8:1:1.pdf')
fig.clf()
plt.close(fig)
## Therapy-abi
### No delay
#### proportions 1:1:1 Tp:T+:T- - total cell 2000
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../raw_output/EnvEq/All3/therapy-abi-w-delay/'
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=pd.read_csv(path+o2eff+'-'+testeff+'/AT_nn-0-2000.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
df['abi_itherapy']=np.where((df['abi_therapy'])|(df['abi_therapy'].shift(1)),-200,np.nan)
axes[i,j].plot(df.t/24/60,df.abi_itherapy,color="tab:purple",linewidth=5,label='ABI')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
handles, labels = ax.get_legend_handles_labels()
axes[0,-1].legend(handles=handles[0:4], bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
axes[1,-1].legend(handles=handles[4:], bbox_to_anchor=(1,1), loc="upper left",title='Therapy')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_therapy_1:1:1-2000.pdf')
fig.clf()
plt.close(fig)
#### proportions 8:1:1 Tp:T+:T- - total cell 2000
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../raw_output/EnvEq/All3/therapy-abi-w-delay/'
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=pd.read_csv(path+o2eff+'-'+testeff+'/0.8Tp-AT_nn-0-2000.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
df['abi_itherapy']=np.where((df['abi_therapy'])|(df['abi_therapy'].shift(1)),-200,np.nan)
axes[i,j].plot(df.t/24/60,df.abi_itherapy,color="tab:purple",linewidth=5,label='ABI')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
handles, labels = ax.get_legend_handles_labels()
axes[0,-1].legend(handles=handles[0:4], bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
axes[1,-1].legend(handles=handles[4:], bbox_to_anchor=(1,1), loc="upper left",title='Therapy')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_therapy_8:1:1-2000.pdf')
fig.clf()
plt.close(fig)
#### proportions 1:1:1 Tp:T+:T- - total cell 1000
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../raw_output/EnvEq/All3/therapy-abi-w-delay/'
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=pd.read_csv(path+o2eff+'-'+testeff+'/AT_nn-0-1000.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
df['abi_itherapy']=np.where((df['abi_therapy'])|(df['abi_therapy'].shift(1)),-200,np.nan)
axes[i,j].plot(df.t/24/60,df.abi_itherapy,color="tab:purple",linewidth=5,label='ABI')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
handles, labels = ax.get_legend_handles_labels()
axes[0,-1].legend(handles=handles[0:4], bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
axes[1,-1].legend(handles=handles[4:], bbox_to_anchor=(1,1), loc="upper left",title='Therapy')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_therapy_1:1:1-1000.pdf')
fig.clf()
plt.close(fig)
#### proportions 8:1:1 Tp:T+:T- - total cell 1000
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../raw_output/EnvEq/All3/therapy-abi-w-delay/'
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=pd.read_csv(path+o2eff+'-'+testeff+'/0.8Tp-AT_nn-0-1000.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
df['abi_itherapy']=np.where((df['abi_therapy'])|(df['abi_therapy'].shift(1)),-200,np.nan)
axes[i,j].plot(df.t/24/60,df.abi_itherapy,color="tab:purple",linewidth=5,label='ABI')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
handles, labels = ax.get_legend_handles_labels()
axes[0,-1].legend(handles=handles[0:4], bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
axes[1,-1].legend(handles=handles[4:], bbox_to_anchor=(1,1), loc="upper left",title='Therapy')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_therapy_8:1:1-1000.pdf')
fig.clf()
plt.close(fig)
#### proportions 1:1:1 Tp:T+:T- - total cell 4000
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../raw_output/EnvEq/All3/therapy-abi-w-delay/'
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=pd.read_csv(path+o2eff+'-'+testeff+'/AT_nn-0-4000.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
df['abi_itherapy']=np.where((df['abi_therapy'])|(df['abi_therapy'].shift(1)),-200,np.nan)
axes[i,j].plot(df.t/24/60,df.abi_itherapy,color="tab:purple",linewidth=5,label='ABI')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
handles, labels = ax.get_legend_handles_labels()
axes[0,-1].legend(handles=handles[0:4], bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
axes[1,-1].legend(handles=handles[4:], bbox_to_anchor=(1,1), loc="upper left",title='Therapy')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_therapy_1:1:1-4000.pdf')
fig.clf()
plt.close(fig)
#### proportions 8:1:1 Tp:T+:T- - total cell 4000
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../raw_output/EnvEq/All3/therapy-abi-w-delay/'
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=pd.read_csv(path+o2eff+'-'+testeff+'/0.8Tp-AT_nn-0-4000.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
df['abi_itherapy']=np.where((df['abi_therapy'])|(df['abi_therapy'].shift(1)),-200,np.nan)
axes[i,j].plot(df.t/24/60,df.abi_itherapy,color="tab:purple",linewidth=5,label='ABI')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
handles, labels = ax.get_legend_handles_labels()
axes[0,-1].legend(handles=handles[0:4], bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
axes[1,-1].legend(handles=handles[4:], bbox_to_anchor=(1,1), loc="upper left",title='Therapy')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_therapy_8:1:1-4000.pdf')
fig.clf()
plt.close(fig)
### 200day delay
#### proportions 1:1:1 Tp:T+:T- - total cell 2000
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../raw_output/EnvEq/All3/therapy-abi-w-delay/'
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=pd.read_csv(path+o2eff+'-'+testeff+'/AT_nn-200-2000.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
df['abi_itherapy']=np.where((df['abi_therapy'])|(df['abi_therapy'].shift(1)),-200,np.nan)
axes[i,j].plot(df.t/24/60,df.abi_itherapy,color="tab:purple",linewidth=5,label='ABI')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
handles, labels = ax.get_legend_handles_labels()
axes[0,-1].legend(handles=handles[0:4], bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
axes[1,-1].legend(handles=handles[4:], bbox_to_anchor=(1,1), loc="upper left",title='Therapy')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_therapy_200day_1:1:1.pdf')
fig.clf()
plt.close(fig)
#### proportions 8:1:1 Tp:T+:T- - total cell 2000
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../raw_output/EnvEq/All3/therapy-abi-w-delay/'
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=pd.read_csv(path+o2eff+'-'+testeff+'/0.8Tp-AT_nn-200-2000.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
df['abi_itherapy']=np.where((df['abi_therapy'])|(df['abi_therapy'].shift(1)),-200,np.nan)
axes[i,j].plot(df.t/24/60,df.abi_itherapy,color="tab:purple",linewidth=5,label='ABI')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
handles, labels = ax.get_legend_handles_labels()
axes[0,-1].legend(handles=handles[0:4], bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
axes[1,-1].legend(handles=handles[4:], bbox_to_anchor=(1,1), loc="upper left",title='Therapy')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_therapy_200day_8:1:1.pdf')
fig.clf()
plt.close(fig)
## Therapy-combination
#### proportions 1:1:1 Tp:T+:T- - total cell 2000
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../raw_output/EnvEq/All3/therapy-w-delay/'
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=pd.read_csv(path+o2eff+'-'+testeff+'/AT_nn-0-AT-0-2000.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
df['abi_itherapy']=np.where((df['abi_therapy'])|(df['abi_therapy'].shift(1)),-200,np.nan)
df['dtx_itherapy']=np.where((df['dtx_therapy'])|(df['dtx_therapy'].shift(1)),-200,np.nan)
axes[i,j].plot(df.t/24/60,df.abi_itherapy,color="tab:purple",linewidth=5,label='ABI')
axes[i,j].plot(df.t/24/60,df.dtx_itherapy,color="tab:orange",linewidth=5,label='DTX')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
handles, labels = ax.get_legend_handles_labels()
axes[0,-1].legend(handles=handles[0:4], bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
axes[1,-1].legend(handles=handles[4:], bbox_to_anchor=(1,1), loc="upper left",title='Therapy')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_therapy-combi_1:1:1.pdf')
fig.clf()
plt.close(fig)
#### proportions 8:1:1 Tp:T+:T- - total cell 2000
row_effs=['HE','LE']
rows=['no','moderate']
col_effs=['HE','Null','LE']
cols=['no','low','moderate']
path='../raw_output/EnvEq/All3/therapy-w-delay/'
fig,axes=plt.subplots(2,3,sharex=True,sharey=True,figsize=(15,8))
for i in range(2):
testeff='test_'+row_effs[i]
for j in range(3):
o2eff='o2_'+col_effs[j]
df=pd.read_csv(path+o2eff+'-'+testeff+'/0.8Tp-AT_nn-0-AT-0-2000.csv')
axes[i,j].plot(df.t/24/60,df.Tpos,color="tab:green",label='T+')
axes[i,j].plot(df.t/24/60,df.Tpro,color="tab:blue",label='Tp')
axes[i,j].plot(df.t/24/60,df.Tneg,color="tab:red",label='T-')
axes[i,j].plot(df.t/24/60,df.Tpos+df.Tpro+df.Tneg,color="tab:grey",label='Total')
df['abi_itherapy']=np.where((df['abi_therapy'])|(df['abi_therapy'].shift(1)),-200,np.nan)
df['dtx_itherapy']=np.where((df['dtx_therapy'])|(df['dtx_therapy'].shift(1)),-200,np.nan)
axes[i,j].plot(df.t/24/60,df.abi_itherapy,color="tab:purple",linewidth=5,label='ABI')
axes[i,j].plot(df.t/24/60,df.dtx_itherapy,color="tab:orange",linewidth=5,label='DTX')
pad = 5 # in points
for ax, ax2, col in zip(axes[0], axes[1], cols):
ax.annotate(col, xy=(0.5, 1), xytext=(0, pad),
xycoords='axes fraction', textcoords='offset points',
size='large', ha='center', va='baseline')
ax2.set_xlabel('Time (days)')
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - pad, 0),
xycoords=ax.yaxis.label, textcoords='offset points',
size='large', ha='right', va='center')
ax.set_ylabel('No of Cells')
handles, labels = ax.get_legend_handles_labels()
axes[0,-1].legend(handles=handles[0:4], bbox_to_anchor=(1,1), loc="upper left",title='Cell Type')
axes[1,-1].legend(handles=handles[4:], bbox_to_anchor=(1,1), loc="upper left",title='Therapy')
fig.tight_layout()
fig.savefig('../writing/MSThesis/figures/All3_therapy-combi_8:1:1.pdf')
fig.clf()
plt.close(fig)
| 46.052033
| 137
| 0.632953
| 9,747
| 56,644
| 3.602852
| 0.028522
| 0.008315
| 0.019478
| 0.026654
| 0.967138
| 0.960475
| 0.954951
| 0.943816
| 0.940684
| 0.939944
| 0
| 0.051478
| 0.138179
| 56,644
| 1,229
| 138
| 46.089504
| 0.667882
| 0.031301
| 0
| 0.896491
| 0
| 0
| 0.239971
| 0.073103
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000877
| false
| 0
| 0.005263
| 0
| 0.008772
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
050bd73c0b46e7fbe118b3be2f4661b791d4e37d
| 134,911
|
py
|
Python
|
files/runs_small/cores_8/cholesky/power.py
|
ST4NSB/sniper-simulator-predictions
|
1f0fe2a10fda55fceea053464ea202bfe2effafc
|
[
"MIT"
] | 1
|
2021-03-08T03:39:23.000Z
|
2021-03-08T03:39:23.000Z
|
files/runs_small/cores_8/cholesky/power.py
|
ST4NSB/sniper-simulator-predictions
|
1f0fe2a10fda55fceea053464ea202bfe2effafc
|
[
"MIT"
] | null | null | null |
files/runs_small/cores_8/cholesky/power.py
|
ST4NSB/sniper-simulator-predictions
|
1f0fe2a10fda55fceea053464ea202bfe2effafc
|
[
"MIT"
] | null | null | null |
power = {'BUSES': {'Area': 3.70399,
'Bus/Area': 3.70399,
'Bus/Gate Leakage': 0.00993673,
'Bus/Peak Dynamic': 2.16944,
'Bus/Runtime Dynamic': 0.23296,
'Bus/Subthreshold Leakage': 0.103619,
'Bus/Subthreshold Leakage with power gating': 0.0388573,
'Gate Leakage': 0.00993673,
'Peak Dynamic': 2.16944,
'Runtime Dynamic': 0.23296,
'Subthreshold Leakage': 0.103619,
'Subthreshold Leakage with power gating': 0.0388573},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.348399,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.476336,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.88821,
'Execution Unit/Floating Point Units/Runtime Dynamic': 1.13469,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.867852,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 1.50281,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.902968,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 3.27363,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.568345,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.614298,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 9.40426,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.356724,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0314603,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.357658,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.232668,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.714382,
'Execution Unit/Register Files/Runtime Dynamic': 0.264129,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.959695,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 2.02558,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 7.78866,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00342769,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00342769,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.0029696,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00114087,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.0033423,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0131673,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0334332,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.22367,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.453602,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.759684,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.48356,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0547202,
'L2/Runtime Dynamic': 0.0149325,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 7.10744,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.83657,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.189919,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.189919,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 8.00793,
'Load Store Unit/Runtime Dynamic': 3.9631,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.468308,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.936616,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.166204,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.167002,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0740133,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.845828,
'Memory Management Unit/Runtime Dynamic': 0.241016,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 31.8432,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 1.24453,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0593529,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.424265,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 1.72815,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 15.2194,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.368382,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.492032,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.99408,
'Execution Unit/Floating Point Units/Runtime Dynamic': 1.18126,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.855355,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 1.48117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.88559,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 3.22211,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.539761,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.581912,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 9.53775,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.376725,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0310073,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.361946,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.229318,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.738671,
'Execution Unit/Register Files/Runtime Dynamic': 0.260325,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.975532,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 2.01663,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 7.75427,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00343937,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00343937,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00297863,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00114375,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00329417,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0131515,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0335859,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.220449,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.435764,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.748744,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.45169,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0568373,
'L2/Runtime Dynamic': 0.0144799,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 6.89486,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.73417,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.183041,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.183041,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 7.76274,
'Load Store Unit/Runtime Dynamic': 3.81991,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.451349,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.902698,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.160185,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.161016,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0711029,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.835423,
'Memory Management Unit/Runtime Dynamic': 0.232119,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 31.7232,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 1.31431,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0595536,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.414763,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 1.78862,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 15.0611,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.350202,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.477753,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.89478,
'Execution Unit/Floating Point Units/Runtime Dynamic': 1.13758,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.857552,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 1.48497,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.890667,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 3.23319,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.557155,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.602091,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 9.39517,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.357965,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0310869,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.355761,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.229907,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.713727,
'Execution Unit/Register Files/Runtime Dynamic': 0.260994,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.955607,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 2.00501,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 7.71662,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00340325,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00340325,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.002948,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00113235,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00330263,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0130571,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0332096,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.221015,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.443206,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.750667,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.46116,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0525063,
'L2/Runtime Dynamic': 0.0149291,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 6.97087,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.77157,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.1855,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.1855,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 7.85041,
'Load Store Unit/Runtime Dynamic': 3.8719,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.457413,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.914825,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.162337,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.163104,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0723277,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.839143,
'Memory Management Unit/Runtime Dynamic': 0.235431,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 31.6677,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 1.24886,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0588784,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.418478,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 1.72622,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 15.0263,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.357459,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.483453,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.93612,
'Execution Unit/Floating Point Units/Runtime Dynamic': 1.15576,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.848693,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 1.46963,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.883926,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 3.20225,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.542063,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.596938,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 9.43746,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.365774,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0307658,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.356069,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.227532,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.721843,
'Execution Unit/Register Files/Runtime Dynamic': 0.258298,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.958339,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 2.01611,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 7.71281,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00304375,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00304375,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00263671,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00101284,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00326851,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0119927,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0296977,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.218732,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.453999,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.742913,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.45733,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0517836,
'L2/Runtime Dynamic': 0.0118731,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 6.95299,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.75656,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.184922,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.184922,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 7.82979,
'Load Store Unit/Runtime Dynamic': 3.85346,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.455986,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.911972,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.161831,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.162584,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0740607,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.838268,
'Memory Management Unit/Runtime Dynamic': 0.236645,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 31.6877,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 1.2761,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0587532,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.413777,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 1.74863,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 15.0207,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.345071,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.473723,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.86804,
'Execution Unit/Floating Point Units/Runtime Dynamic': 1.12581,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.818752,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 1.41778,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.847176,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 3.08371,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.522906,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.564846,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 9.27648,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.352914,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0296804,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.34363,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.219505,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.696544,
'Execution Unit/Register Files/Runtime Dynamic': 0.249185,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.924888,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 1.9159,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 7.41318,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00338896,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00338896,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00293488,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.0011269,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00315321,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.012866,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.033097,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.211016,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.417977,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.716704,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.39166,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0577127,
'L2/Runtime Dynamic': 0.0143709,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 6.654,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.6176,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.175249,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.175249,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 7.48493,
'Load Store Unit/Runtime Dynamic': 3.65712,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.432134,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.864267,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.153366,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.154213,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0682333,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.823633,
'Memory Management Unit/Runtime Dynamic': 0.222446,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 31.1732,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 1.23124,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0566823,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.397664,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 1.68558,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 14.3844,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.291437,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.431596,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.58359,
'Execution Unit/Floating Point Units/Runtime Dynamic': 1.00068,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.81455,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 1.41051,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.844715,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 3.06977,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.562362,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.593371,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 8.8417,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.299174,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0295281,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.322238,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.218378,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.621412,
'Execution Unit/Register Files/Runtime Dynamic': 0.247906,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.858502,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 1.82242,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 7.16575,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00392178,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00392178,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00339642,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00130417,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00313702,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.014377,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0382965,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.209933,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.394558,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.713026,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.37019,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0535903,
'L2/Runtime Dynamic': 0.0144107,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 6.75647,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.66742,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.178564,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.178564,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 7.60312,
'Load Store Unit/Runtime Dynamic': 3.7266,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.440309,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.880618,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.156267,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.15705,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0643577,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.828649,
'Memory Management Unit/Runtime Dynamic': 0.221408,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 30.8575,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 1.04375,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0542113,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.400363,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 1.49832,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 13.9967,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.355947,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.482265,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.92486,
'Execution Unit/Floating Point Units/Runtime Dynamic': 1.15081,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.84164,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 1.45742,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.869233,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 3.16829,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.536817,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.571719,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 9.40837,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.363648,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0305101,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.35378,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.225641,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.717428,
'Execution Unit/Register Files/Runtime Dynamic': 0.256151,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.952392,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 1.95997,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 7.58921,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00367168,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00367168,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00317927,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00122049,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00324135,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.013764,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.035874,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.216914,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.417039,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.736739,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.42033,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0545166,
'L2/Runtime Dynamic': 0.0146708,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 6.74955,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.66469,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.17834,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.17834,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 7.59514,
'Load Store Unit/Runtime Dynamic': 3.72254,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.439757,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.879513,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.156071,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.15687,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0680739,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.828311,
'Memory Management Unit/Runtime Dynamic': 0.224944,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 31.4168,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 1.26869,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0583033,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.408042,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 1.73503,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 14.7067,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.309972,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.446154,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.67935,
'Execution Unit/Floating Point Units/Runtime Dynamic': 1.04281,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.831259,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 1.43944,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.856579,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 3.12728,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.564196,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.579743,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 9.02053,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.317266,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0301338,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.333733,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.222858,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.650999,
'Execution Unit/Register Files/Runtime Dynamic': 0.252991,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.891357,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 1.84621,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 7.29518,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00444921,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00444921,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00385138,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00147787,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00320137,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0159512,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0435118,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.214239,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.378992,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.727651,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.38035,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0548678,
'L2/Runtime Dynamic': 0.014323,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 6.73533,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.65757,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.17788,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.17788,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 7.57874,
'Load Store Unit/Runtime Dynamic': 3.71269,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.438623,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.877245,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.155669,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.156474,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0618591,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.827615,
'Memory Management Unit/Runtime Dynamic': 0.218333,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 31.0122,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 1.10687,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0558252,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.405559,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 1.56825,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 14.1891,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 4.002703500014146,
'Runtime Dynamic': 4.002703500014146,
'Subthreshold Leakage': 8.504,
'Subthreshold Leakage with power gating': 8.504},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.223964,
'Runtime Dynamic': 0.133717,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364},
{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.228549,
'Runtime Dynamic': 0.137157,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 388.384,
'Gate Leakage': 3.09074,
'Peak Dynamic': 254.003,
'Peak Power': 320.55,
'Runtime Dynamic': 118.108,
'Subthreshold Leakage': 63.4555,
'Subthreshold Leakage with power gating': 28.0572,
'Total Cores/Area': 260.865,
'Total Cores/Gate Leakage': 2.98397,
'Total Cores/Peak Dynamic': 251.381,
'Total Cores/Runtime Dynamic': 117.604,
'Total Cores/Subthreshold Leakage': 49.7502,
'Total Cores/Subthreshold Leakage with power gating': 20.6649,
'Total L3s/Area': 123.815,
'Total L3s/Gate Leakage': 0.0968273,
'Total L3s/Peak Dynamic': 0.452513,
'Total L3s/Runtime Dynamic': 0.270874,
'Total L3s/Subthreshold Leakage': 13.6017,
'Total L3s/Subthreshold Leakage with power gating': 6.64728,
'Total Leakage': 66.5462,
'Total NoCs/Area': 3.70399,
'Total NoCs/Gate Leakage': 0.00993673,
'Total NoCs/Peak Dynamic': 2.16944,
'Total NoCs/Runtime Dynamic': 0.23296,
'Total NoCs/Subthreshold Leakage': 0.103619,
'Total NoCs/Subthreshold Leakage with power gating': 0.0388573}}
| 75.622758
| 124
| 0.683843
| 15,928
| 134,911
| 5.786163
| 0.043571
| 0.122458
| 0.114581
| 0.09479
| 0.963759
| 0.962284
| 0.956815
| 0.93493
| 0.923645
| 0.917916
| 0
| 0.129561
| 0.222962
| 134,911
| 1,784
| 125
| 75.622758
| 0.749587
| 0
| 0
| 0.736547
| 0
| 0
| 0.661424
| 0.048921
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
05858328cd879a913771f88a47fdd8b847b6d50d
| 9,033
|
py
|
Python
|
tests/lib2to3/test_convert_parameter.py
|
AlainMaesNokia/openapi
|
ea3534efdaeaf82989521d0725773cdf5ed5ea1a
|
[
"BSD-2-Clause"
] | 64
|
2018-12-13T19:46:47.000Z
|
2022-03-25T05:25:56.000Z
|
tests/lib2to3/test_convert_parameter.py
|
AlainMaesNokia/openapi
|
ea3534efdaeaf82989521d0725773cdf5ed5ea1a
|
[
"BSD-2-Clause"
] | 71
|
2018-12-07T10:18:56.000Z
|
2022-01-18T14:31:08.000Z
|
tests/lib2to3/test_convert_parameter.py
|
AlainMaesNokia/openapi
|
ea3534efdaeaf82989521d0725773cdf5ed5ea1a
|
[
"BSD-2-Clause"
] | 40
|
2018-12-19T22:57:13.000Z
|
2022-03-27T14:09:14.000Z
|
""".convert_parameter() test suite."""
import pytest
import sphinxcontrib.openapi._lib2to3 as lib2to3
@pytest.fixture(scope="function")
def convert_parameter(oas_fragment):
def _wrapper(parameter):
oas2 = oas_fragment(
"""
swagger: "2.0"
info:
title: An example spec
version: "1.0"
paths:
/test:
get:
responses:
'200':
description: a response description
"""
)
oas2["paths"]["/test"]["get"]["parameters"] = [parameter]
oas3 = lib2to3.convert(oas2)
return oas3["paths"]["/test"]["get"]["parameters"][0]
return _wrapper
def test_in_header_complete(convert_parameter, oas_fragment):
converted = convert_parameter(
oas_fragment(
"""
description: token to be passed as a header
in: header
items:
format: int64
type: integer
name: token
required: true
type: array
"""
)
)
assert converted == oas_fragment(
"""
description: token to be passed as a header
in: header
name: token
required: true
schema:
items:
format: int64
type: integer
type: array
"""
)
def test_in_path_complete(convert_parameter, oas_fragment):
converted = convert_parameter(
oas_fragment(
"""
description: username to fetch
in: path
name: username
required: true
type: string
"""
),
)
assert converted == oas_fragment(
"""
description: username to fetch
in: path
name: username
required: true
schema:
type: string
"""
)
def test_in_query_complete(convert_parameter, oas_fragment):
converted = convert_parameter(
oas_fragment(
"""
description: ID of the object to fetch
in: query
items:
type: string
name: id
required: false
type: array
"""
),
)
assert converted == oas_fragment(
"""
description: ID of the object to fetch
in: query
name: id
required: false
schema:
items:
type: string
type: array
"""
)
def test_in_header_minimal(convert_parameter, oas_fragment):
converted = convert_parameter(
oas_fragment(
"""
in: header
name: token
type: string
"""
),
)
assert converted == oas_fragment(
"""
in: header
name: token
schema:
type: string
"""
)
def test_in_path_minimal(convert_parameter, oas_fragment):
converted = convert_parameter(
oas_fragment(
"""
in: path
name: username
required: true
type: string
"""
),
)
assert converted == oas_fragment(
"""
in: path
name: username
required: true
schema:
type: string
"""
)
def test_in_query_minimal(convert_parameter, oas_fragment):
converted = convert_parameter(
oas_fragment(
"""
in: query
name: id
type: string
"""
),
)
assert converted == oas_fragment(
"""
in: query
name: id
schema:
type: string
"""
)
def test_collectionFormat_is_csv_path(convert_parameter, oas_fragment):
converted = convert_parameter(
oas_fragment(
"""
collectionFormat: csv
in: path
items:
type: string
name: username
required: true
type: array
"""
),
)
assert converted == oas_fragment(
"""
in: path
name: username
required: true
schema:
items:
type: string
type: array
style: simple
"""
)
def test_collectionFormat_is_csv_header(convert_parameter, oas_fragment):
converted = convert_parameter(
oas_fragment(
"""
collectionFormat: csv
in: header
items:
type: string
name: username
type: array
"""
),
)
assert converted == oas_fragment(
"""
in: header
name: username
schema:
items:
type: string
type: array
style: simple
"""
)
def test_collectionFormat_is_csv(convert_parameter, oas_fragment):
converted = convert_parameter(
oas_fragment(
"""
collectionFormat: csv
in: query
items:
type: string
name: id
type: array
"""
),
)
assert converted == oas_fragment(
"""
explode: false
in: query
name: id
schema:
items:
type: string
type: array
style: form
"""
)
def test_collectionFormat_is_multi(convert_parameter, oas_fragment):
converted = convert_parameter(
oas_fragment(
"""
collectionFormat: multi
in: query
items:
type: string
name: id
type: array
"""
),
)
assert converted == oas_fragment(
"""
explode: true
in: query
name: id
schema:
items:
type: string
type: array
style: form
"""
)
def test_collectionFormat_is_ssv(convert_parameter, oas_fragment):
converted = convert_parameter(
oas_fragment(
"""
collectionFormat: ssv
in: query
items:
type: string
name: id
type: array
"""
),
)
assert converted == oas_fragment(
"""
in: query
name: id
schema:
items:
type: string
type: array
style: spaceDelimited
"""
)
def test_collectionFormat_is_pipes(convert_parameter, oas_fragment):
converted = convert_parameter(
oas_fragment(
"""
collectionFormat: pipes
in: query
items:
type: string
name: id
type: array
"""
),
)
assert converted == oas_fragment(
"""
in: query
name: id
schema:
items:
type: string
type: array
style: pipeDelimited
"""
)
def test_collectionFormat_is_tsv(convert_parameter, oas_fragment):
converted = convert_parameter(
oas_fragment(
"""
collectionFormat: tsv
in: query
items:
type: string
name: id
type: array
"""
),
)
assert converted == oas_fragment(
"""
in: query
name: id
schema:
items:
type: string
type: array
"""
)
def test_in_header_vendor_extensions(convert_parameter, oas_fragment):
converted = convert_parameter(
oas_fragment(
"""
in: header
name: token
type: string
x-vendor-ext: vendor-ext
"""
),
)
assert converted == oas_fragment(
"""
in: header
name: token
schema:
type: string
x-vendor-ext: vendor-ext
"""
)
def test_in_path_vendor_extensions(convert_parameter, oas_fragment):
converted = convert_parameter(
oas_fragment(
"""
in: path
name: username
required: true
type: string
x-vendor-ext: vendor-ext
"""
),
)
assert converted == oas_fragment(
"""
in: path
name: username
required: true
schema:
type: string
x-vendor-ext: vendor-ext
"""
)
def test_in_query_vendor_extensions(convert_parameter, oas_fragment):
converted = convert_parameter(
oas_fragment(
"""
in: query
name: id
type: string
x-vendor-ext: vendor-ext
"""
),
)
assert converted == oas_fragment(
"""
in: query
name: id
schema:
type: string
x-vendor-ext: vendor-ext
"""
)
| 21.10514
| 73
| 0.467951
| 747
| 9,033
| 5.471218
| 0.113788
| 0.134573
| 0.153413
| 0.218008
| 0.850012
| 0.812332
| 0.81013
| 0.787619
| 0.769024
| 0.769024
| 0
| 0.004651
| 0.452563
| 9,033
| 427
| 74
| 21.154567
| 0.82184
| 0.003543
| 0
| 0.512195
| 0
| 0
| 0.014454
| 0
| 0
| 0
| 0
| 0
| 0.130081
| 1
| 0.146341
| false
| 0
| 0.01626
| 0
| 0.178862
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
55a8424eddf5f92c49e26eb49f16eaa4798734cf
| 110
|
py
|
Python
|
api/__init__.py
|
projekt-scope/scope-admin
|
f8fa536f8a620b8047a34d81c9c3004b4ed0b331
|
[
"MIT"
] | 2
|
2020-09-29T14:29:05.000Z
|
2021-01-28T12:57:15.000Z
|
api/__init__.py
|
projekt-scope/scope-admin
|
f8fa536f8a620b8047a34d81c9c3004b4ed0b331
|
[
"MIT"
] | null | null | null |
api/__init__.py
|
projekt-scope/scope-admin
|
f8fa536f8a620b8047a34d81c9c3004b4ed0b331
|
[
"MIT"
] | 1
|
2021-04-16T09:43:46.000Z
|
2021-04-16T09:43:46.000Z
|
from flask import Blueprint
api_bp = Blueprint("api_bp", __name__, url_prefix="/api")
from api import views
| 18.333333
| 57
| 0.763636
| 17
| 110
| 4.529412
| 0.588235
| 0.311688
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 110
| 5
| 58
| 22
| 0.810526
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
e9514cf858051c6998683e35fa251af3ff8353b7
| 109
|
py
|
Python
|
pyforchange/pfcf/__init__.py
|
PythonForChange/pyforchange
|
2cc5afef227ac68147e291e447c57924586a0b12
|
[
"MIT"
] | 1
|
2021-06-07T02:10:41.000Z
|
2021-06-07T02:10:41.000Z
|
pyforchange/pfcf/__init__.py
|
PythonForChange/pyforchange
|
2cc5afef227ac68147e291e447c57924586a0b12
|
[
"MIT"
] | null | null | null |
pyforchange/pfcf/__init__.py
|
PythonForChange/pyforchange
|
2cc5afef227ac68147e291e447c57924586a0b12
|
[
"MIT"
] | null | null | null |
from pyforchange.pfcf.files import *
from pyforchange.pfcf.read import *
from pyforchange.pfcf.utils import *
| 36.333333
| 36
| 0.816514
| 15
| 109
| 5.933333
| 0.466667
| 0.505618
| 0.640449
| 0.561798
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100917
| 109
| 3
| 37
| 36.333333
| 0.908163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
7575f8e81a60c879ee4a219bc690123c0083e6e7
| 102
|
py
|
Python
|
src/utils/__init__.py
|
VictorOnink/Wind-Mixing-Diffusion
|
3a7051efefb6a6f89035099ac4d50ab11f242881
|
[
"MIT"
] | null | null | null |
src/utils/__init__.py
|
VictorOnink/Wind-Mixing-Diffusion
|
3a7051efefb6a6f89035099ac4d50ab11f242881
|
[
"MIT"
] | null | null | null |
src/utils/__init__.py
|
VictorOnink/Wind-Mixing-Diffusion
|
3a7051efefb6a6f89035099ac4d50ab11f242881
|
[
"MIT"
] | null | null | null |
from utils.utils_filenames import *
from utils.utils_files import *
from utils.utils_physics import *
| 25.5
| 35
| 0.823529
| 15
| 102
| 5.4
| 0.4
| 0.333333
| 0.518519
| 0.493827
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 102
| 3
| 36
| 34
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
7588acdd6a9b7c21bde39cb2cf0a5cdad9309b80
| 5,289
|
py
|
Python
|
dJangoAdmin/catalog/numOfLeads.py
|
topseer/django
|
bb8e0841b4cbbd6cadf8cd3e29dbb52ba3109e88
|
[
"MIT"
] | null | null | null |
dJangoAdmin/catalog/numOfLeads.py
|
topseer/django
|
bb8e0841b4cbbd6cadf8cd3e29dbb52ba3109e88
|
[
"MIT"
] | null | null | null |
dJangoAdmin/catalog/numOfLeads.py
|
topseer/django
|
bb8e0841b4cbbd6cadf8cd3e29dbb52ba3109e88
|
[
"MIT"
] | null | null | null |
import pyodbc
import pandas.io.sql as sql
def numOfRouterCalls(startdate,enddate,user_email):
server = '10.203.1.105\\alpha'
database = 'test_yang'
username = 'webuser'
password = 'Changeme1'
cnxn = pyodbc.connect('DRIVER={ODBC Driver 13 for SQL Server};SERVER='+server+';DATABASE='+database+';UID='+username+';PWD='+ password)
cursor = cnxn.cursor()
query = """
select Count(*) Router
from AEPerformanceReport_1 as a
inner join topDownAELookupTable as b
on a.Date >= '@@@startDate@@@'
and a.Date <= '@@@endDate@@@'
and a.PersonID = b.PersonId
and b.Email = 'aaa@aaa.com'
and EventName = 'Router Call'
"""
query = query.replace('aaa@aaa.com',user_email)
query = query.replace('@@@startDate@@@',startdate)
query = query.replace('@@@endDate@@@',enddate)
queryResult = sql.read_sql(query, cnxn)
num_Router_lstWk = queryResult["Router"][0]
return num_Router_lstWk
def numOfWebLeads(startdate,enddate,user_email):
server = '10.203.1.105\\alpha'
database = 'test_yang'
username = 'webuser'
password = 'Changeme1'
cnxn = pyodbc.connect('DRIVER={ODBC Driver 13 for SQL Server};SERVER='+server+';DATABASE='+database+';UID='+username+';PWD='+ password)
cursor = cnxn.cursor()
query = """
select Count(*) Router
from AEPerformanceReport_1 as a
inner join topDownAELookupTable as b
on a.Date >= '@@@startDate@@@'
and a.Date <= '@@@endDate@@@'
and a.PersonID = b.PersonId
and b.Email = 'aaa@aaa.com'
and EventName = 'Web Lead'
"""
query = query.replace('aaa@aaa.com',user_email)
query = query.replace('@@@startDate@@@',startdate)
query = query.replace('@@@endDate@@@',enddate)
queryResult = sql.read_sql(query, cnxn)
numOfWebLeads = queryResult["Router"][0]
return numOfWebLeads
def numOfAOs(startdate,enddate,user_email):
server = '10.203.1.105\\alpha'
database = 'test_yang'
username = 'webuser'
password = 'Changeme1'
cnxn = pyodbc.connect('DRIVER={ODBC Driver 13 for SQL Server};SERVER='+server+';DATABASE='+database+';UID='+username+';PWD='+ password)
cursor = cnxn.cursor()
query = """
select Count(*) Router
from AEPerformanceReport_1 as a
inner join topDownAELookupTable as b
on a.Date >= '@@@startDate@@@'
and a.Date <= '@@@endDate@@@'
and a.PersonID = b.PersonId
and b.Email = 'aaa@aaa.com'
and EventName = 'Appraisal Order'
"""
query = query.replace('aaa@aaa.com',user_email)
query = query.replace('@@@startDate@@@',startdate)
query = query.replace('@@@endDate@@@',enddate)
queryResult = sql.read_sql(query, cnxn)
numOfAOs = queryResult["Router"][0]
return numOfAOs
def numOfIPs(startdate,enddate,user_email):
server = '10.203.1.105\\alpha'
database = 'test_yang'
username = 'webuser'
password = 'Changeme1'
cnxn = pyodbc.connect('DRIVER={ODBC Driver 13 for SQL Server};SERVER='+server+';DATABASE='+database+';UID='+username+';PWD='+ password)
cursor = cnxn.cursor()
query = """
select Count(*) Router
from AEPerformanceReport_1 as a
inner join topDownAELookupTable as b
on a.Date >= '@@@startDate@@@'
and a.Date <= '@@@endDate@@@'
and a.PersonID = b.PersonId
and b.Email = 'aaa@aaa.com'
and EventName = 'IP'
"""
query = query.replace('aaa@aaa.com',user_email)
query = query.replace('@@@startDate@@@',startdate)
query = query.replace('@@@endDate@@@',enddate)
queryResult = sql.read_sql(query, cnxn)
numOfIPs = queryResult["Router"][0]
return numOfIPs
def numOfFunds(startdate,enddate,user_email):
server = '10.203.1.105\\alpha'
database = 'test_yang'
username = 'webuser'
password = 'Changeme1'
cnxn = pyodbc.connect('DRIVER={ODBC Driver 13 for SQL Server};SERVER='+server+';DATABASE='+database+';UID='+username+';PWD='+ password)
cursor = cnxn.cursor()
query = """
select Count(*) Router
from AEPerformanceReport_1 as a
inner join topDownAELookupTable as b
on a.Date >= '@@@startDate@@@'
and a.Date <= '@@@endDate@@@'
and a.PersonID = b.PersonId
and b.Email = 'aaa@aaa.com'
and EventName = 'Fund'
"""
query = query.replace('aaa@aaa.com',user_email)
query = query.replace('@@@startDate@@@',startdate)
query = query.replace('@@@endDate@@@',enddate)
queryResult = sql.read_sql(query, cnxn)
numOfFunds = queryResult["Router"][0]
return numOfFunds
def numOfPitch(startdate,enddate,user_email):
server = '10.203.1.105\\alpha'
database = 'test_yang'
username = 'webuser'
password = 'Changeme1'
cnxn = pyodbc.connect('DRIVER={ODBC Driver 13 for SQL Server};SERVER='+server+';DATABASE='+database+';UID='+username+';PWD='+ password)
cursor = cnxn.cursor()
query = """
select Count(*) Router
from AEPerformanceReport_1 as a
inner join topDownAELookupTable as b
on a.Date >= '@@@startDate@@@'
and a.Date <= '@@@endDate@@@'
and a.PersonID = b.PersonId
and b.Email = 'aaa@aaa.com'
and EventName = 'Pitch'
"""
query = query.replace('aaa@aaa.com',user_email)
query = query.replace('@@@startDate@@@',startdate)
query = query.replace('@@@endDate@@@',enddate)
queryResult = sql.read_sql(query, cnxn)
numOfPitch = queryResult["Router"][0]
return numOfPitch
| 33.264151
| 137
| 0.654755
| 651
| 5,289
| 5.267281
| 0.110599
| 0.052493
| 0.089239
| 0.043745
| 0.874891
| 0.874891
| 0.874891
| 0.874891
| 0.874891
| 0.874891
| 0
| 0.019324
| 0.178106
| 5,289
| 159
| 138
| 33.264151
| 0.769496
| 0
| 0
| 0.814286
| 0
| 0
| 0.487146
| 0.023819
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042857
| false
| 0.085714
| 0.014286
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
759267886adaba53259e0ec5e3f82cd4518e458b
| 170
|
py
|
Python
|
src/ndn/security/validator/__init__.py
|
Zhiyi-Zhang/python-ndn
|
b05ed72dc7f0190e015edecf81d8949fb2eefe64
|
[
"Apache-2.0"
] | null | null | null |
src/ndn/security/validator/__init__.py
|
Zhiyi-Zhang/python-ndn
|
b05ed72dc7f0190e015edecf81d8949fb2eefe64
|
[
"Apache-2.0"
] | null | null | null |
src/ndn/security/validator/__init__.py
|
Zhiyi-Zhang/python-ndn
|
b05ed72dc7f0190e015edecf81d8949fb2eefe64
|
[
"Apache-2.0"
] | null | null | null |
from .digest_validator import sha256_digest_checker, params_sha256_checker, union_checker
__all__ = ['sha256_digest_checker', 'params_sha256_checker', 'union_checker']
| 34
| 89
| 0.841176
| 21
| 170
| 6.095238
| 0.428571
| 0.1875
| 0.296875
| 0.390625
| 0.78125
| 0.78125
| 0.78125
| 0.78125
| 0
| 0
| 0
| 0.076433
| 0.076471
| 170
| 4
| 90
| 42.5
| 0.738854
| 0
| 0
| 0
| 0
| 0
| 0.323529
| 0.247059
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
f9428e75182e917671339d0dae147b9ee5691d62
| 76
|
py
|
Python
|
src/tests/articles/__init__.py
|
samrika25/TRAVIS_HEROKU_GIT
|
bcae6d0422d9a0369810944a91dd03db7df0d058
|
[
"MIT"
] | null | null | null |
src/tests/articles/__init__.py
|
samrika25/TRAVIS_HEROKU_GIT
|
bcae6d0422d9a0369810944a91dd03db7df0d058
|
[
"MIT"
] | 4
|
2021-03-30T12:35:36.000Z
|
2021-06-10T18:11:24.000Z
|
src/tests/articles/__init__.py
|
samrika25/TRAVIS_HEROKU_GIT
|
bcae6d0422d9a0369810944a91dd03db7df0d058
|
[
"MIT"
] | 2
|
2021-02-07T16:16:36.000Z
|
2021-07-13T05:26:51.000Z
|
from .test_article import *
from .test_tag import *
from .test_vote import *
| 25.333333
| 27
| 0.776316
| 12
| 76
| 4.666667
| 0.5
| 0.428571
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144737
| 76
| 3
| 28
| 25.333333
| 0.861538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f987a82ed71db745e1aedc9cea40c42ebe558346
| 263,830
|
py
|
Python
|
metal/models/projects_api.py
|
displague/metal-python
|
96e64e9ac41025d85ff6f61693165e29e1c366db
|
[
"MIT"
] | null | null | null |
metal/models/projects_api.py
|
displague/metal-python
|
96e64e9ac41025d85ff6f61693165e29e1c366db
|
[
"MIT"
] | 3
|
2021-09-27T05:10:36.000Z
|
2021-09-27T06:10:57.000Z
|
metal/models/projects_api.py
|
displague/metal-python
|
96e64e9ac41025d85ff6f61693165e29e1c366db
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Metal API
This is the API for Equinix Metal. The API allows you to programmatically interact with all of your Equinix Metal resources, including devices, networks, addresses, organizations, projects, and your user account. The official API docs are hosted at <https://metal.equinix.com/developers/api>. # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@equinixmetal.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from metal.api_client import ApiClient
from metal.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class ProjectsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_device(self, id, device, **kwargs): # noqa: E501
"""Create a device # noqa: E501
Creates a new device and provisions it in the specified location. Device type-specific options are accepted. For example, `baremetal` devices accept `operating_system`, `hostname`, and `plan`. These parameters may not be accepted for other device types. The default device type is `baremetal`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_device(id, device, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param device: Device to create (required)
:type device: DeviceCreateInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: Device
"""
kwargs['_return_http_data_only'] = True
return self.create_device_with_http_info(id, device, **kwargs) # noqa: E501
def create_device_with_http_info(self, id, device, **kwargs): # noqa: E501
"""Create a device # noqa: E501
Creates a new device and provisions it in the specified location. Device type-specific options are accepted. For example, `baremetal` devices accept `operating_system`, `hostname`, and `plan`. These parameters may not be accepted for other device types. The default device type is `baremetal`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_device_with_http_info(id, device, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param device: Device to create (required)
:type device: DeviceCreateInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(Device, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'device'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_device" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `create_device`") # noqa: E501
# verify the required parameter 'device' is set
if self.api_client.client_side_validation and ('device' not in local_var_params or # noqa: E501
local_var_params['device'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `device` when calling `create_device`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'device' in local_var_params:
body_params = local_var_params['device']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
201: "Device",
401: "Error",
403: "Error",
404: "Error",
422: "Error",
}
return self.api_client.call_api(
'/projects/{id}/devices', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def create_license(self, id, license, **kwargs): # noqa: E501
"""Create a License # noqa: E501
Creates a new license for the given project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_license(id, license, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param license: License to create (required)
:type license: LicenseCreateInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: License
"""
kwargs['_return_http_data_only'] = True
return self.create_license_with_http_info(id, license, **kwargs) # noqa: E501
def create_license_with_http_info(self, id, license, **kwargs): # noqa: E501
"""Create a License # noqa: E501
Creates a new license for the given project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_license_with_http_info(id, license, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param license: License to create (required)
:type license: LicenseCreateInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(License, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'license'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_license" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `create_license`") # noqa: E501
# verify the required parameter 'license' is set
if self.api_client.client_side_validation and ('license' not in local_var_params or # noqa: E501
local_var_params['license'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `license` when calling `create_license`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'license' in local_var_params:
body_params = local_var_params['license']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
201: "License",
401: "Error",
403: "Error",
404: "Error",
422: "Error",
}
return self.api_client.call_api(
'/projects/{id}/licenses', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def create_organization_project(self, id, project, **kwargs): # noqa: E501
"""Create a project for the organization # noqa: E501
Creates a new project for the organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_organization_project(id, project, async_req=True)
>>> result = thread.get()
:param id: Organization UUID (required)
:type id: str
:param project: Project to create (required)
:type project: ProjectCreateInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: Project
"""
kwargs['_return_http_data_only'] = True
return self.create_organization_project_with_http_info(id, project, **kwargs) # noqa: E501
def create_organization_project_with_http_info(self, id, project, **kwargs): # noqa: E501
"""Create a project for the organization # noqa: E501
Creates a new project for the organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_organization_project_with_http_info(id, project, async_req=True)
>>> result = thread.get()
:param id: Organization UUID (required)
:type id: str
:param project: Project to create (required)
:type project: ProjectCreateInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(Project, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'project'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_organization_project" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `create_organization_project`") # noqa: E501
# verify the required parameter 'project' is set
if self.api_client.client_side_validation and ('project' not in local_var_params or # noqa: E501
local_var_params['project'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `project` when calling `create_organization_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'project' in local_var_params:
body_params = local_var_params['project']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
201: "Project",
401: "Error",
422: "Error",
}
return self.api_client.call_api(
'/organizations/{id}/projects', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def create_project(self, project, **kwargs): # noqa: E501
"""Create a project # noqa: E501
Creates a new project for the user default organization. If the user don't have an organization, a new one will be created. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_project(project, async_req=True)
>>> result = thread.get()
:param project: Project to create (required)
:type project: ProjectCreateFromRootInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: Project
"""
kwargs['_return_http_data_only'] = True
return self.create_project_with_http_info(project, **kwargs) # noqa: E501
def create_project_with_http_info(self, project, **kwargs): # noqa: E501
"""Create a project # noqa: E501
Creates a new project for the user default organization. If the user don't have an organization, a new one will be created. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_project_with_http_info(project, async_req=True)
>>> result = thread.get()
:param project: Project to create (required)
:type project: ProjectCreateFromRootInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(Project, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'project'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_project" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'project' is set
if self.api_client.client_side_validation and ('project' not in local_var_params or # noqa: E501
local_var_params['project'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `project` when calling `create_project`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'project' in local_var_params:
body_params = local_var_params['project']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
201: "Project",
401: "Error",
422: "Error",
}
return self.api_client.call_api(
'/projects', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def create_project_invitation(self, project_id, invitation, **kwargs): # noqa: E501
"""Create an invitation for a project # noqa: E501
In order to add a user to a project, they must first be invited. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_project_invitation(project_id, invitation, async_req=True)
>>> result = thread.get()
:param project_id: Project UUID (required)
:type project_id: str
:param invitation: Invitation to create (required)
:type invitation: InvitationInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: Invitation
"""
kwargs['_return_http_data_only'] = True
return self.create_project_invitation_with_http_info(project_id, invitation, **kwargs) # noqa: E501
def create_project_invitation_with_http_info(self, project_id, invitation, **kwargs): # noqa: E501
"""Create an invitation for a project # noqa: E501
In order to add a user to a project, they must first be invited. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_project_invitation_with_http_info(project_id, invitation, async_req=True)
>>> result = thread.get()
:param project_id: Project UUID (required)
:type project_id: str
:param invitation: Invitation to create (required)
:type invitation: InvitationInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(Invitation, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'project_id',
'invitation'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_project_invitation" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'project_id' is set
if self.api_client.client_side_validation and ('project_id' not in local_var_params or # noqa: E501
local_var_params['project_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `project_id` when calling `create_project_invitation`") # noqa: E501
# verify the required parameter 'invitation' is set
if self.api_client.client_side_validation and ('invitation' not in local_var_params or # noqa: E501
local_var_params['invitation'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `invitation` when calling `create_project_invitation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_id' in local_var_params:
path_params['project_id'] = local_var_params['project_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'invitation' in local_var_params:
body_params = local_var_params['invitation']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
201: "Invitation",
401: "Error",
403: "Error",
404: "Error",
422: "Error",
}
return self.api_client.call_api(
'/projects/{project_id}/invitations', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def create_project_ssh_key(self, id, ssh_key, **kwargs): # noqa: E501
"""Create a ssh key for the given project # noqa: E501
Creates a ssh key. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_project_ssh_key(id, ssh_key, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param ssh_key: ssh key to create (required)
:type ssh_key: SSHKeyCreateInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: SSHKey
"""
kwargs['_return_http_data_only'] = True
return self.create_project_ssh_key_with_http_info(id, ssh_key, **kwargs) # noqa: E501
def create_project_ssh_key_with_http_info(self, id, ssh_key, **kwargs): # noqa: E501
"""Create a ssh key for the given project # noqa: E501
Creates a ssh key. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_project_ssh_key_with_http_info(id, ssh_key, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param ssh_key: ssh key to create (required)
:type ssh_key: SSHKeyCreateInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(SSHKey, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'ssh_key'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_project_ssh_key" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `create_project_ssh_key`") # noqa: E501
# verify the required parameter 'ssh_key' is set
if self.api_client.client_side_validation and ('ssh_key' not in local_var_params or # noqa: E501
local_var_params['ssh_key'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `ssh_key` when calling `create_project_ssh_key`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'ssh_key' in local_var_params:
body_params = local_var_params['ssh_key']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
201: "SSHKey",
401: "Error",
422: "Error",
}
return self.api_client.call_api(
'/projects/{id}/ssh-keys', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def create_spot_market_request(self, id, spot_market_request, **kwargs): # noqa: E501
"""Create a spot market request # noqa: E501
Creates a new spot market request. Type-specific options (such as operating_system for baremetal devices) should be included in the main data structure alongside hostname and plan. The features attribute allows you to optionally specify what features your server should have. For example, if you require a server with a TPM chip, you may specify `{ \"features\": { \"tpm\": \"required\" } }` (or `{ \"features\": [\"tpm\"] }` in shorthand). The request will fail if there are no available servers matching your criteria. Alternatively, if you do not require a certain feature, but would prefer to be assigned a server with that feature if there are any available, you may specify that feature with a preferred value (see the example request below). The request will not fail if we have no servers with that feature in our inventory. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_spot_market_request(id, spot_market_request, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param spot_market_request: Spot Market Request to create (required)
:type spot_market_request: SpotMarketRequestCreateInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: SpotMarketRequest
"""
kwargs['_return_http_data_only'] = True
return self.create_spot_market_request_with_http_info(id, spot_market_request, **kwargs) # noqa: E501
def create_spot_market_request_with_http_info(self, id, spot_market_request, **kwargs): # noqa: E501
"""Create a spot market request # noqa: E501
Creates a new spot market request. Type-specific options (such as operating_system for baremetal devices) should be included in the main data structure alongside hostname and plan. The features attribute allows you to optionally specify what features your server should have. For example, if you require a server with a TPM chip, you may specify `{ \"features\": { \"tpm\": \"required\" } }` (or `{ \"features\": [\"tpm\"] }` in shorthand). The request will fail if there are no available servers matching your criteria. Alternatively, if you do not require a certain feature, but would prefer to be assigned a server with that feature if there are any available, you may specify that feature with a preferred value (see the example request below). The request will not fail if we have no servers with that feature in our inventory. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_spot_market_request_with_http_info(id, spot_market_request, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param spot_market_request: Spot Market Request to create (required)
:type spot_market_request: SpotMarketRequestCreateInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(SpotMarketRequest, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'spot_market_request'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_spot_market_request" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `create_spot_market_request`") # noqa: E501
# verify the required parameter 'spot_market_request' is set
if self.api_client.client_side_validation and ('spot_market_request' not in local_var_params or # noqa: E501
local_var_params['spot_market_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `spot_market_request` when calling `create_spot_market_request`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'spot_market_request' in local_var_params:
body_params = local_var_params['spot_market_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
201: "SpotMarketRequest",
401: "Error",
404: "Error",
422: "Error",
}
return self.api_client.call_api(
'/projects/{id}/spot-market-requests', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def create_transfer_request(self, id, transfer_request, **kwargs): # noqa: E501
"""Create a transfer request # noqa: E501
Organization owners can transfer their projects to other organizations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_transfer_request(id, transfer_request, async_req=True)
>>> result = thread.get()
:param id: UUID of the project to be transferred (required)
:type id: str
:param transfer_request: Transfer Request to create (required)
:type transfer_request: TransferRequestInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: TransferRequest
"""
kwargs['_return_http_data_only'] = True
return self.create_transfer_request_with_http_info(id, transfer_request, **kwargs) # noqa: E501
def create_transfer_request_with_http_info(self, id, transfer_request, **kwargs): # noqa: E501
"""Create a transfer request # noqa: E501
Organization owners can transfer their projects to other organizations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_transfer_request_with_http_info(id, transfer_request, async_req=True)
>>> result = thread.get()
:param id: UUID of the project to be transferred (required)
:type id: str
:param transfer_request: Transfer Request to create (required)
:type transfer_request: TransferRequestInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(TransferRequest, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'transfer_request'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_transfer_request" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `create_transfer_request`") # noqa: E501
# verify the required parameter 'transfer_request' is set
if self.api_client.client_side_validation and ('transfer_request' not in local_var_params or # noqa: E501
local_var_params['transfer_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `transfer_request` when calling `create_transfer_request`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'transfer_request' in local_var_params:
body_params = local_var_params['transfer_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
201: "TransferRequest",
401: "Error",
403: "Error",
404: "Error",
422: "Error",
}
return self.api_client.call_api(
'/projects/{id}/transfers', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def create_virtual_network(self, id, virtual_network, **kwargs): # noqa: E501
"""Create a virtual network # noqa: E501
Creates an virtual network. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_virtual_network(id, virtual_network, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param virtual_network: Virtual Network to create (required)
:type virtual_network: VirtualNetworkCreateInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: VirtualNetwork
"""
kwargs['_return_http_data_only'] = True
return self.create_virtual_network_with_http_info(id, virtual_network, **kwargs) # noqa: E501
def create_virtual_network_with_http_info(self, id, virtual_network, **kwargs): # noqa: E501
"""Create a virtual network # noqa: E501
Creates an virtual network. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_virtual_network_with_http_info(id, virtual_network, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param virtual_network: Virtual Network to create (required)
:type virtual_network: VirtualNetworkCreateInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(VirtualNetwork, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'virtual_network'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_virtual_network" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `create_virtual_network`") # noqa: E501
# verify the required parameter 'virtual_network' is set
if self.api_client.client_side_validation and ('virtual_network' not in local_var_params or # noqa: E501
local_var_params['virtual_network'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `virtual_network` when calling `create_virtual_network`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'virtual_network' in local_var_params:
body_params = local_var_params['virtual_network']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
201: "VirtualNetwork",
401: "Error",
403: "Error",
404: "Error",
422: "Error",
}
return self.api_client.call_api(
'/projects/{id}/virtual-networks', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def delete_project(self, id, **kwargs): # noqa: E501
"""Delete the project # noqa: E501
Deletes the project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_project(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.delete_project_with_http_info(id, **kwargs) # noqa: E501
def delete_project_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete the project # noqa: E501
Deletes the project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_project_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_project" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `delete_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/projects/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_batches_by_project(self, id, **kwargs): # noqa: E501
"""Retrieve all batches by project # noqa: E501
Returns all batches for the given project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_batches_by_project(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BatchesList
"""
kwargs['_return_http_data_only'] = True
return self.find_batches_by_project_with_http_info(id, **kwargs) # noqa: E501
def find_batches_by_project_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve all batches by project # noqa: E501
Returns all batches for the given project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_batches_by_project_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BatchesList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'include',
'exclude'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_batches_by_project" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_batches_by_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "BatchesList",
401: "Error",
403: "Error",
404: "Error",
}
return self.api_client.call_api(
'/projects/{id}/batches', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_bgp_config_by_project(self, id, **kwargs): # noqa: E501
"""Retrieve a bgp config # noqa: E501
Returns a bgp config # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_bgp_config_by_project(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BgpConfig
"""
kwargs['_return_http_data_only'] = True
return self.find_bgp_config_by_project_with_http_info(id, **kwargs) # noqa: E501
def find_bgp_config_by_project_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve a bgp config # noqa: E501
Returns a bgp config # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_bgp_config_by_project_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BgpConfig, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'include',
'exclude'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_bgp_config_by_project" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_bgp_config_by_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "BgpConfig",
401: "Error",
403: "Error",
404: "Error",
}
return self.api_client.call_api(
'/projects/{id}/bgp-config', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_device_ssh_keys(self, id, **kwargs): # noqa: E501
"""Retrieve a device's ssh keys # noqa: E501
Returns a collection of the device's ssh keys. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_device_ssh_keys(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param search_string: Search by key, label, or fingerprint
:type search_string: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: SSHKeyList
"""
kwargs['_return_http_data_only'] = True
return self.find_device_ssh_keys_with_http_info(id, **kwargs) # noqa: E501
def find_device_ssh_keys_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve a device's ssh keys # noqa: E501
Returns a collection of the device's ssh keys. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_device_ssh_keys_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param search_string: Search by key, label, or fingerprint
:type search_string: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(SSHKeyList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'search_string',
'include',
'exclude'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_device_ssh_keys" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_device_ssh_keys`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'search_string' in local_var_params and local_var_params['search_string'] is not None: # noqa: E501
query_params.append(('Search string', local_var_params['search_string'])) # noqa: E501
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "SSHKeyList",
401: "Error",
}
return self.api_client.call_api(
'/devices/{id}/ssh-keys', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_facilities_by_project(self, id, **kwargs): # noqa: E501
"""Retrieve all facilities visible by the project # noqa: E501
Returns a listing of available datacenters for the given project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_facilities_by_project(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: FacilityList
"""
kwargs['_return_http_data_only'] = True
return self.find_facilities_by_project_with_http_info(id, **kwargs) # noqa: E501
def find_facilities_by_project_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve all facilities visible by the project # noqa: E501
Returns a listing of available datacenters for the given project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_facilities_by_project_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(FacilityList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'include',
'exclude'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_facilities_by_project" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_facilities_by_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "FacilityList",
401: "Error",
403: "Error",
404: "Error",
}
return self.api_client.call_api(
'/projects/{id}/facilities', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_global_bgp_ranges(self, id, **kwargs): # noqa: E501
"""Retrieve all global bgp ranges # noqa: E501
Returns all global bgp ranges for a project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_global_bgp_ranges(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: GlobalBgpRangeList
"""
kwargs['_return_http_data_only'] = True
return self.find_global_bgp_ranges_with_http_info(id, **kwargs) # noqa: E501
def find_global_bgp_ranges_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve all global bgp ranges # noqa: E501
Returns all global bgp ranges for a project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_global_bgp_ranges_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(GlobalBgpRangeList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_global_bgp_ranges" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_global_bgp_ranges`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "GlobalBgpRangeList",
401: "Error",
403: "Error",
404: "Error",
}
return self.api_client.call_api(
'/projects/{id}/global-bgp-ranges', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_ip_reservation_customdata(self, project_id, id, **kwargs): # noqa: E501
"""Retrieve the custom metadata of an IP Reservation # noqa: E501
Provides the custom metadata stored for this IP Reservation in json format # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_ip_reservation_customdata(project_id, id, async_req=True)
>>> result = thread.get()
:param project_id: Project UUID (required)
:type project_id: str
:param id: Ip Reservation UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.find_ip_reservation_customdata_with_http_info(project_id, id, **kwargs) # noqa: E501
def find_ip_reservation_customdata_with_http_info(self, project_id, id, **kwargs): # noqa: E501
"""Retrieve the custom metadata of an IP Reservation # noqa: E501
Provides the custom metadata stored for this IP Reservation in json format # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_ip_reservation_customdata_with_http_info(project_id, id, async_req=True)
>>> result = thread.get()
:param project_id: Project UUID (required)
:type project_id: str
:param id: Ip Reservation UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'project_id',
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_ip_reservation_customdata" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'project_id' is set
if self.api_client.client_side_validation and ('project_id' not in local_var_params or # noqa: E501
local_var_params['project_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `project_id` when calling `find_ip_reservation_customdata`") # noqa: E501
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_ip_reservation_customdata`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_id' in local_var_params:
path_params['project_id'] = local_var_params['project_id'] # noqa: E501
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/projects/{project_id}/ips/{id}/customdata', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_ip_reservations(self, id, **kwargs): # noqa: E501
"""Retrieve all ip reservations # noqa: E501
Provides a list of IP resevations for a single project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_ip_reservations(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: IPReservationList
"""
kwargs['_return_http_data_only'] = True
return self.find_ip_reservations_with_http_info(id, **kwargs) # noqa: E501
def find_ip_reservations_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve all ip reservations # noqa: E501
Provides a list of IP resevations for a single project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_ip_reservations_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(IPReservationList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'include',
'exclude'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_ip_reservations" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_ip_reservations`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "IPReservationList",
401: "Error",
403: "Error",
404: "Error",
}
return self.api_client.call_api(
'/projects/{id}/ips', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_organization_projects(self, id, **kwargs): # noqa: E501
"""Retrieve all projects of an organization # noqa: E501
Returns a collection of projects that belong to the organization. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_organization_projects(id, async_req=True)
>>> result = thread.get()
:param id: Organization UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param page: Page to return
:type page: int
:param per_page: Items returned per page
:type per_page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: ProjectList
"""
kwargs['_return_http_data_only'] = True
return self.find_organization_projects_with_http_info(id, **kwargs) # noqa: E501
def find_organization_projects_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve all projects of an organization # noqa: E501
Returns a collection of projects that belong to the organization. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_organization_projects_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Organization UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param page: Page to return
:type page: int
:param per_page: Items returned per page
:type per_page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(ProjectList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'include',
'exclude',
'page',
'per_page'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_organization_projects" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_organization_projects`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] > 100000: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `find_organization_projects`, must be a value less than or equal to `100000`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `find_organization_projects`, must be a value greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'per_page' in local_var_params and local_var_params['per_page'] > 1000: # noqa: E501
raise ApiValueError("Invalid value for parameter `per_page` when calling `find_organization_projects`, must be a value less than or equal to `1000`") # noqa: E501
if self.api_client.client_side_validation and 'per_page' in local_var_params and local_var_params['per_page'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `per_page` when calling `find_organization_projects`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
if 'page' in local_var_params and local_var_params['page'] is not None: # noqa: E501
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'per_page' in local_var_params and local_var_params['per_page'] is not None: # noqa: E501
query_params.append(('per_page', local_var_params['per_page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "ProjectList",
401: "Error",
}
return self.api_client.call_api(
'/organizations/{id}/projects', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_plans_by_project(self, id, **kwargs): # noqa: E501
"""Retrieve all plans visible by the project # noqa: E501
Returns a listing of available plans for the given project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_plans_by_project(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: PlanList
"""
kwargs['_return_http_data_only'] = True
return self.find_plans_by_project_with_http_info(id, **kwargs) # noqa: E501
def find_plans_by_project_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve all plans visible by the project # noqa: E501
Returns a listing of available plans for the given project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_plans_by_project_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(PlanList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'include',
'exclude'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_plans_by_project" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_plans_by_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "PlanList",
401: "Error",
403: "Error",
404: "Error",
}
return self.api_client.call_api(
'/projects/{id}/plans', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_project_bgp_sessions(self, id, **kwargs): # noqa: E501
"""Retrieve all BGP sessions for project # noqa: E501
Provides a listing of available BGP sessions for the project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_bgp_sessions(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BgpSessionList
"""
kwargs['_return_http_data_only'] = True
return self.find_project_bgp_sessions_with_http_info(id, **kwargs) # noqa: E501
def find_project_bgp_sessions_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve all BGP sessions for project # noqa: E501
Provides a listing of available BGP sessions for the project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_bgp_sessions_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BgpSessionList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_project_bgp_sessions" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_project_bgp_sessions`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "BgpSessionList",
401: "Error",
404: "Error",
}
return self.api_client.call_api(
'/projects/{id}/bgp/sessions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_project_by_id(self, id, **kwargs): # noqa: E501
"""Retrieve a project # noqa: E501
Returns a single project if the user has access # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_by_id(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: Project
"""
kwargs['_return_http_data_only'] = True
return self.find_project_by_id_with_http_info(id, **kwargs) # noqa: E501
def find_project_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve a project # noqa: E501
Returns a single project if the user has access # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(Project, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'include',
'exclude'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_project_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_project_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "Project",
401: "Error",
403: "Error",
404: "Error",
}
return self.api_client.call_api(
'/projects/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_project_customdata(self, id, **kwargs): # noqa: E501
"""Retrieve the custom metadata of a project # noqa: E501
Provides the custom metadata stored for this project in json format # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_customdata(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.find_project_customdata_with_http_info(id, **kwargs) # noqa: E501
def find_project_customdata_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve the custom metadata of a project # noqa: E501
Provides the custom metadata stored for this project in json format # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_customdata_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_project_customdata" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_project_customdata`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/projects/{id}/customdata', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_project_devices(self, id, **kwargs): # noqa: E501
"""Retrieve all devices of a project # noqa: E501
Provides a collection of devices for a given project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_devices(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param page: Page to return
:type page: int
:param per_page: Items returned per page
:type per_page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: DeviceList
"""
kwargs['_return_http_data_only'] = True
return self.find_project_devices_with_http_info(id, **kwargs) # noqa: E501
def find_project_devices_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve all devices of a project # noqa: E501
Provides a collection of devices for a given project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_devices_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param page: Page to return
:type page: int
:param per_page: Items returned per page
:type per_page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(DeviceList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'include',
'exclude',
'page',
'per_page'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_project_devices" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_project_devices`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] > 100000: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `find_project_devices`, must be a value less than or equal to `100000`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `find_project_devices`, must be a value greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'per_page' in local_var_params and local_var_params['per_page'] > 1000: # noqa: E501
raise ApiValueError("Invalid value for parameter `per_page` when calling `find_project_devices`, must be a value less than or equal to `1000`") # noqa: E501
if self.api_client.client_side_validation and 'per_page' in local_var_params and local_var_params['per_page'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `per_page` when calling `find_project_devices`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
if 'page' in local_var_params and local_var_params['page'] is not None: # noqa: E501
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'per_page' in local_var_params and local_var_params['per_page'] is not None: # noqa: E501
query_params.append(('per_page', local_var_params['per_page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "DeviceList",
401: "Error",
403: "Error",
404: "Error",
}
return self.api_client.call_api(
'/projects/{id}/devices', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_project_events(self, id, **kwargs): # noqa: E501
"""Retrieve project's events # noqa: E501
Returns a list of events for a single project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_events(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param page: Page to return
:type page: int
:param per_page: Items returned per page
:type per_page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: EventList
"""
kwargs['_return_http_data_only'] = True
return self.find_project_events_with_http_info(id, **kwargs) # noqa: E501
def find_project_events_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve project's events # noqa: E501
Returns a list of events for a single project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_events_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param page: Page to return
:type page: int
:param per_page: Items returned per page
:type per_page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(EventList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'include',
'exclude',
'page',
'per_page'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_project_events" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_project_events`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] > 100000: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `find_project_events`, must be a value less than or equal to `100000`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `find_project_events`, must be a value greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'per_page' in local_var_params and local_var_params['per_page'] > 1000: # noqa: E501
raise ApiValueError("Invalid value for parameter `per_page` when calling `find_project_events`, must be a value less than or equal to `1000`") # noqa: E501
if self.api_client.client_side_validation and 'per_page' in local_var_params and local_var_params['per_page'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `per_page` when calling `find_project_events`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
if 'page' in local_var_params and local_var_params['page'] is not None: # noqa: E501
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'per_page' in local_var_params and local_var_params['per_page'] is not None: # noqa: E501
query_params.append(('per_page', local_var_params['per_page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "EventList",
401: "Error",
403: "Error",
404: "Error",
}
return self.api_client.call_api(
'/projects/{id}/events', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_project_hardware_reservations(self, id, **kwargs): # noqa: E501
"""Retrieve all hardware reservations for a given project # noqa: E501
Provides a collection of hardware reservations for a given project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_hardware_reservations(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param page: Page to return
:type page: int
:param per_page: Items returned per page
:type per_page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: HardwareReservationList
"""
kwargs['_return_http_data_only'] = True
return self.find_project_hardware_reservations_with_http_info(id, **kwargs) # noqa: E501
def find_project_hardware_reservations_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve all hardware reservations for a given project # noqa: E501
Provides a collection of hardware reservations for a given project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_hardware_reservations_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param page: Page to return
:type page: int
:param per_page: Items returned per page
:type per_page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(HardwareReservationList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'include',
'exclude',
'page',
'per_page'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_project_hardware_reservations" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_project_hardware_reservations`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] > 100000: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `find_project_hardware_reservations`, must be a value less than or equal to `100000`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `find_project_hardware_reservations`, must be a value greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'per_page' in local_var_params and local_var_params['per_page'] > 1000: # noqa: E501
raise ApiValueError("Invalid value for parameter `per_page` when calling `find_project_hardware_reservations`, must be a value less than or equal to `1000`") # noqa: E501
if self.api_client.client_side_validation and 'per_page' in local_var_params and local_var_params['per_page'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `per_page` when calling `find_project_hardware_reservations`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
if 'page' in local_var_params and local_var_params['page'] is not None: # noqa: E501
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'per_page' in local_var_params and local_var_params['per_page'] is not None: # noqa: E501
query_params.append(('per_page', local_var_params['per_page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "HardwareReservationList",
401: "Error",
403: "Error",
404: "Error",
}
return self.api_client.call_api(
'/projects/{id}/hardware-reservations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_project_invitations(self, project_id, **kwargs): # noqa: E501
"""Retrieve project invitations # noqa: E501
Returns all invitations in a project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_invitations(project_id, async_req=True)
>>> result = thread.get()
:param project_id: Project UUID (required)
:type project_id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param page: Page to return
:type page: int
:param per_page: Items returned per page
:type per_page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: InvitationList
"""
kwargs['_return_http_data_only'] = True
return self.find_project_invitations_with_http_info(project_id, **kwargs) # noqa: E501
def find_project_invitations_with_http_info(self, project_id, **kwargs): # noqa: E501
"""Retrieve project invitations # noqa: E501
Returns all invitations in a project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_invitations_with_http_info(project_id, async_req=True)
>>> result = thread.get()
:param project_id: Project UUID (required)
:type project_id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param page: Page to return
:type page: int
:param per_page: Items returned per page
:type per_page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(InvitationList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'project_id',
'include',
'exclude',
'page',
'per_page'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_project_invitations" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'project_id' is set
if self.api_client.client_side_validation and ('project_id' not in local_var_params or # noqa: E501
local_var_params['project_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `project_id` when calling `find_project_invitations`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] > 100000: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `find_project_invitations`, must be a value less than or equal to `100000`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `find_project_invitations`, must be a value greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'per_page' in local_var_params and local_var_params['per_page'] > 1000: # noqa: E501
raise ApiValueError("Invalid value for parameter `per_page` when calling `find_project_invitations`, must be a value less than or equal to `1000`") # noqa: E501
if self.api_client.client_side_validation and 'per_page' in local_var_params and local_var_params['per_page'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `per_page` when calling `find_project_invitations`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_id' in local_var_params:
path_params['project_id'] = local_var_params['project_id'] # noqa: E501
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
if 'page' in local_var_params and local_var_params['page'] is not None: # noqa: E501
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'per_page' in local_var_params and local_var_params['per_page'] is not None: # noqa: E501
query_params.append(('per_page', local_var_params['per_page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "InvitationList",
401: "Error",
403: "Error",
404: "Error",
}
return self.api_client.call_api(
'/projects/{project_id}/invitations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_project_licenses(self, id, **kwargs): # noqa: E501
"""Retrieve all licenses # noqa: E501
Provides a collection of licenses for a given project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_licenses(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param page: Page to return
:type page: int
:param per_page: Items returned per page
:type per_page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: LicenseList
"""
kwargs['_return_http_data_only'] = True
return self.find_project_licenses_with_http_info(id, **kwargs) # noqa: E501
def find_project_licenses_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve all licenses # noqa: E501
Provides a collection of licenses for a given project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_licenses_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param page: Page to return
:type page: int
:param per_page: Items returned per page
:type per_page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(LicenseList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'include',
'exclude',
'page',
'per_page'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_project_licenses" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_project_licenses`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] > 100000: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `find_project_licenses`, must be a value less than or equal to `100000`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `find_project_licenses`, must be a value greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'per_page' in local_var_params and local_var_params['per_page'] > 1000: # noqa: E501
raise ApiValueError("Invalid value for parameter `per_page` when calling `find_project_licenses`, must be a value less than or equal to `1000`") # noqa: E501
if self.api_client.client_side_validation and 'per_page' in local_var_params and local_var_params['per_page'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `per_page` when calling `find_project_licenses`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
if 'page' in local_var_params and local_var_params['page'] is not None: # noqa: E501
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'per_page' in local_var_params and local_var_params['per_page'] is not None: # noqa: E501
query_params.append(('per_page', local_var_params['per_page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "LicenseList",
401: "Error",
403: "Error",
404: "Error",
}
return self.api_client.call_api(
'/projects/{id}/licenses', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_project_memberships(self, project_id, **kwargs): # noqa: E501
"""Retrieve project memberships # noqa: E501
Returns all memberships in a project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_memberships(project_id, async_req=True)
>>> result = thread.get()
:param project_id: Project UUID (required)
:type project_id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param page: Page to return
:type page: int
:param per_page: Items returned per page
:type per_page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: MembershipList
"""
kwargs['_return_http_data_only'] = True
return self.find_project_memberships_with_http_info(project_id, **kwargs) # noqa: E501
def find_project_memberships_with_http_info(self, project_id, **kwargs): # noqa: E501
"""Retrieve project memberships # noqa: E501
Returns all memberships in a project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_memberships_with_http_info(project_id, async_req=True)
>>> result = thread.get()
:param project_id: Project UUID (required)
:type project_id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param page: Page to return
:type page: int
:param per_page: Items returned per page
:type per_page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(MembershipList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'project_id',
'include',
'exclude',
'page',
'per_page'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_project_memberships" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'project_id' is set
if self.api_client.client_side_validation and ('project_id' not in local_var_params or # noqa: E501
local_var_params['project_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `project_id` when calling `find_project_memberships`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] > 100000: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `find_project_memberships`, must be a value less than or equal to `100000`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `find_project_memberships`, must be a value greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'per_page' in local_var_params and local_var_params['per_page'] > 1000: # noqa: E501
raise ApiValueError("Invalid value for parameter `per_page` when calling `find_project_memberships`, must be a value less than or equal to `1000`") # noqa: E501
if self.api_client.client_side_validation and 'per_page' in local_var_params and local_var_params['per_page'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `per_page` when calling `find_project_memberships`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_id' in local_var_params:
path_params['project_id'] = local_var_params['project_id'] # noqa: E501
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
if 'page' in local_var_params and local_var_params['page'] is not None: # noqa: E501
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'per_page' in local_var_params and local_var_params['per_page'] is not None: # noqa: E501
query_params.append(('per_page', local_var_params['per_page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "MembershipList",
401: "Error",
403: "Error",
404: "Error",
}
return self.api_client.call_api(
'/projects/{project_id}/memberships', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_project_ssh_keys(self, id, **kwargs): # noqa: E501
"""Retrieve a project's ssh keys # noqa: E501
Returns a collection of the project's ssh keys. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_ssh_keys(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param search_string: Search by key, label, or fingerprint
:type search_string: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: SSHKeyList
"""
kwargs['_return_http_data_only'] = True
return self.find_project_ssh_keys_with_http_info(id, **kwargs) # noqa: E501
def find_project_ssh_keys_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve a project's ssh keys # noqa: E501
Returns a collection of the project's ssh keys. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_project_ssh_keys_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param search_string: Search by key, label, or fingerprint
:type search_string: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(SSHKeyList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'search_string',
'include',
'exclude'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_project_ssh_keys" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_project_ssh_keys`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'search_string' in local_var_params and local_var_params['search_string'] is not None: # noqa: E501
query_params.append(('Search string', local_var_params['search_string'])) # noqa: E501
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "SSHKeyList",
401: "Error",
}
return self.api_client.call_api(
'/projects/{id}/ssh-keys', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_projects(self, **kwargs): # noqa: E501
"""Retrieve all projects # noqa: E501
Returns a collection of projects that the current user is a member of. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_projects(async_req=True)
>>> result = thread.get()
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param page: Page to return
:type page: int
:param per_page: Items returned per page
:type per_page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: ProjectList
"""
kwargs['_return_http_data_only'] = True
return self.find_projects_with_http_info(**kwargs) # noqa: E501
def find_projects_with_http_info(self, **kwargs): # noqa: E501
"""Retrieve all projects # noqa: E501
Returns a collection of projects that the current user is a member of. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_projects_with_http_info(async_req=True)
>>> result = thread.get()
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param page: Page to return
:type page: int
:param per_page: Items returned per page
:type per_page: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(ProjectList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'include',
'exclude',
'page',
'per_page'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_projects" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] > 100000: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `find_projects`, must be a value less than or equal to `100000`") # noqa: E501
if self.api_client.client_side_validation and 'page' in local_var_params and local_var_params['page'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `find_projects`, must be a value greater than or equal to `1`") # noqa: E501
if self.api_client.client_side_validation and 'per_page' in local_var_params and local_var_params['per_page'] > 1000: # noqa: E501
raise ApiValueError("Invalid value for parameter `per_page` when calling `find_projects`, must be a value less than or equal to `1000`") # noqa: E501
if self.api_client.client_side_validation and 'per_page' in local_var_params and local_var_params['per_page'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `per_page` when calling `find_projects`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
if 'page' in local_var_params and local_var_params['page'] is not None: # noqa: E501
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'per_page' in local_var_params and local_var_params['per_page'] is not None: # noqa: E501
query_params.append(('per_page', local_var_params['per_page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "ProjectList",
401: "Error",
}
return self.api_client.call_api(
'/projects', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_virtual_networks(self, id, **kwargs): # noqa: E501
"""Retrieve all virtual networks # noqa: E501
Provides a list of virtual networks for a single project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_virtual_networks(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param facility: Filter by Facility ID (uuid) or Facility Code
:type facility: str
:param metro: Filter by Metro ID (uuid) or Metro Code
:type metro: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: VirtualNetworkList
"""
kwargs['_return_http_data_only'] = True
return self.find_virtual_networks_with_http_info(id, **kwargs) # noqa: E501
def find_virtual_networks_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve all virtual networks # noqa: E501
Provides a list of virtual networks for a single project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_virtual_networks_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param facility: Filter by Facility ID (uuid) or Facility Code
:type facility: str
:param metro: Filter by Metro ID (uuid) or Metro Code
:type metro: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(VirtualNetworkList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'include',
'exclude',
'facility',
'metro'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_virtual_networks" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_virtual_networks`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
if 'facility' in local_var_params and local_var_params['facility'] is not None: # noqa: E501
query_params.append(('facility', local_var_params['facility'])) # noqa: E501
if 'metro' in local_var_params and local_var_params['metro'] is not None: # noqa: E501
query_params.append(('metro', local_var_params['metro'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "VirtualNetworkList",
401: "Error",
403: "Error",
404: "Error",
}
return self.api_client.call_api(
'/projects/{id}/virtual-networks', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def list_spot_market_requests(self, id, **kwargs): # noqa: E501
"""List spot market requests # noqa: E501
View all spot market requests for a given project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_spot_market_requests(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: SpotMarketRequestList
"""
kwargs['_return_http_data_only'] = True
return self.list_spot_market_requests_with_http_info(id, **kwargs) # noqa: E501
def list_spot_market_requests_with_http_info(self, id, **kwargs): # noqa: E501
"""List spot market requests # noqa: E501
View all spot market requests for a given project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_spot_market_requests_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(SpotMarketRequestList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_spot_market_requests" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `list_spot_market_requests`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "SpotMarketRequestList",
401: "Error",
404: "Error",
}
return self.api_client.call_api(
'/projects/{id}/spot-market-requests', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def request_bgp_config(self, id, bgp_config_request, **kwargs): # noqa: E501
"""Requesting bgp config # noqa: E501
Requests to enable bgp configuration for a project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.request_bgp_config(id, bgp_config_request, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param bgp_config_request: BGP config Request to create (required)
:type bgp_config_request: BgpConfigRequestInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.request_bgp_config_with_http_info(id, bgp_config_request, **kwargs) # noqa: E501
def request_bgp_config_with_http_info(self, id, bgp_config_request, **kwargs): # noqa: E501
"""Requesting bgp config # noqa: E501
Requests to enable bgp configuration for a project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.request_bgp_config_with_http_info(id, bgp_config_request, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param bgp_config_request: BGP config Request to create (required)
:type bgp_config_request: BgpConfigRequestInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'id',
'bgp_config_request'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method request_bgp_config" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `request_bgp_config`") # noqa: E501
# verify the required parameter 'bgp_config_request' is set
if self.api_client.client_side_validation and ('bgp_config_request' not in local_var_params or # noqa: E501
local_var_params['bgp_config_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `bgp_config_request` when calling `request_bgp_config`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'bgp_config_request' in local_var_params:
body_params = local_var_params['bgp_config_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/projects/{id}/bgp-configs', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def request_ip_reservation(self, id, ip_reservation_request, **kwargs): # noqa: E501
"""Requesting IP reservations # noqa: E501
Request more IP space for a project in order to have additional IP addresses to assign to devices. If the request is within the max quota, an IP reservation will be created. If the project will exceed its IP quota, a request will be submitted for review, and will return an IP Reservation with a `state` of `pending`. You can automatically have the request fail with HTTP status 422 instead of triggering the review process by providing the `fail_on_approval_required` parameter set to `true` in the request. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.request_ip_reservation(id, ip_reservation_request, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param ip_reservation_request: IP Reservation Request to create (required)
:type ip_reservation_request: IPReservationRequestInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: IPReservation
"""
kwargs['_return_http_data_only'] = True
return self.request_ip_reservation_with_http_info(id, ip_reservation_request, **kwargs) # noqa: E501
def request_ip_reservation_with_http_info(self, id, ip_reservation_request, **kwargs): # noqa: E501
"""Requesting IP reservations # noqa: E501
Request more IP space for a project in order to have additional IP addresses to assign to devices. If the request is within the max quota, an IP reservation will be created. If the project will exceed its IP quota, a request will be submitted for review, and will return an IP Reservation with a `state` of `pending`. You can automatically have the request fail with HTTP status 422 instead of triggering the review process by providing the `fail_on_approval_required` parameter set to `true` in the request. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.request_ip_reservation_with_http_info(id, ip_reservation_request, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param ip_reservation_request: IP Reservation Request to create (required)
:type ip_reservation_request: IPReservationRequestInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(IPReservation, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'ip_reservation_request'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method request_ip_reservation" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `request_ip_reservation`") # noqa: E501
# verify the required parameter 'ip_reservation_request' is set
if self.api_client.client_side_validation and ('ip_reservation_request' not in local_var_params or # noqa: E501
local_var_params['ip_reservation_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `ip_reservation_request` when calling `request_ip_reservation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'ip_reservation_request' in local_var_params:
body_params = local_var_params['ip_reservation_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
201: "IPReservation",
401: "Error",
403: "Error",
404: "Error",
422: "Error",
}
return self.api_client.call_api(
'/projects/{id}/ips', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def update_project(self, id, project, **kwargs): # noqa: E501
"""Update the project # noqa: E501
Updates the project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_project(id, project, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param project: Project to update (required)
:type project: ProjectUpdateInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: Project
"""
kwargs['_return_http_data_only'] = True
return self.update_project_with_http_info(id, project, **kwargs) # noqa: E501
def update_project_with_http_info(self, id, project, **kwargs): # noqa: E501
"""Update the project # noqa: E501
Updates the project. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_project_with_http_info(id, project, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param project: Project to update (required)
:type project: ProjectUpdateInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(Project, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'project'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_project" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `update_project`") # noqa: E501
# verify the required parameter 'project' is set
if self.api_client.client_side_validation and ('project' not in local_var_params or # noqa: E501
local_var_params['project'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `project` when calling `update_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'project' in local_var_params:
body_params = local_var_params['project']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "Project",
401: "Error",
403: "Error",
404: "Error",
422: "Error",
}
return self.api_client.call_api(
'/projects/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
| 48.07398
| 858
| 0.604457
| 30,425
| 263,830
| 5.010781
| 0.0141
| 0.041718
| 0.066578
| 0.024795
| 0.981935
| 0.978413
| 0.972615
| 0.96761
| 0.964094
| 0.956439
| 0
| 0.01658
| 0.323079
| 263,830
| 5,487
| 859
| 48.082741
| 0.837059
| 0.477038
| 0
| 0.78221
| 0
| 0.012764
| 0.196006
| 0.045538
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028321
| false
| 0
| 0.001994
| 0
| 0.058636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f9acae6d637b169d910d91f5808b26a4420bd437
| 3,240
|
py
|
Python
|
tests/api_url_test.py
|
PiyushDeshmukh/camgor
|
bcf070840fa9e9df62dfceb73166244076bc3f43
|
[
"MIT"
] | 2
|
2016-04-03T11:09:17.000Z
|
2016-12-25T07:25:28.000Z
|
tests/api_url_test.py
|
PiyushDeshmukh/locawecomm
|
bcf070840fa9e9df62dfceb73166244076bc3f43
|
[
"MIT"
] | null | null | null |
tests/api_url_test.py
|
PiyushDeshmukh/locawecomm
|
bcf070840fa9e9df62dfceb73166244076bc3f43
|
[
"MIT"
] | null | null | null |
from camgor.camgor import *
"""
Tests whether Camgor can generate the api url corresponding to the
mentioned git url
"""
def test_accepts_contributors():
"""
checks if contributor urls are generated
"""
api_url = get_api_url("https://github.com/python/pythondotorg.git", "contributors")
api_url_without_key = api_url[:len(api_url)-(40+14)]
assert api_url_without_key == "https://api.github.com/repos/python/pythondotorg/contributors"
api_url = get_api_url("https://github.com/facebook/react.git", "contributors")
api_url_without_key = api_url[:len(api_url)-(40+14)]
assert api_url_without_key == "https://api.github.com/repos/facebook/react/contributors"
api_url = get_api_url("https://github.com/torvalds/linux.git", "contributors")
api_url_without_key = api_url[:len(api_url)-(40+14)]
assert api_url_without_key == "https://api.github.com/repos/torvalds/linux/contributors"
api_url = get_api_url("https://github.com/PiyushDeshmukh/camgor.git", "contributors")
api_url_without_key = api_url[:len(api_url)-(40+14)]
assert api_url_without_key == "https://api.github.com/repos/PiyushDeshmukh/camgor/contributors"
def test_accepts_stargazers():
"""
checks if stargazers urls are generated
"""
api_url = get_api_url("https://github.com/python/pythondotorg.git", "stargazers")
api_url_without_key = api_url[:len(api_url)-(40+14)]
assert api_url_without_key == "https://api.github.com/repos/python/pythondotorg/stargazers"
api_url = get_api_url("https://github.com/facebook/react.git", "stargazers")
api_url_without_key = api_url[:len(api_url)-(40+14)]
assert api_url_without_key == "https://api.github.com/repos/facebook/react/stargazers"
api_url = get_api_url("https://github.com/torvalds/linux.git", "stargazers")
api_url_without_key = api_url[:len(api_url)-(40+14)]
assert api_url_without_key == "https://api.github.com/repos/torvalds/linux/stargazers"
api_url = get_api_url("https://github.com/PiyushDeshmukh/camgor.git", "stargazers")
api_url_without_key = api_url[:len(api_url)-(40+14)]
assert api_url_without_key == "https://api.github.com/repos/PiyushDeshmukh/camgor/stargazers"
def test_accepts_watchers():
"""
checks if stargazers urls are generated
"""
api_url = get_api_url("https://github.com/python/pythondotorg.git", "watchers")
api_url_without_key = api_url[:len(api_url)-(40+14)]
assert api_url_without_key == "https://api.github.com/repos/python/pythondotorg/watchers"
api_url = get_api_url("https://github.com/facebook/react.git", "watchers")
api_url_without_key = api_url[:len(api_url)-(40+14)]
assert api_url_without_key == "https://api.github.com/repos/facebook/react/watchers"
api_url = get_api_url("https://github.com/torvalds/linux.git", "watchers")
api_url_without_key = api_url[:len(api_url)-(40+14)]
assert api_url_without_key == "https://api.github.com/repos/torvalds/linux/watchers"
api_url = get_api_url("https://github.com/PiyushDeshmukh/camgor.git", "watchers")
api_url_without_key = api_url[:len(api_url)-(40+14)]
assert api_url_without_key == "https://api.github.com/repos/PiyushDeshmukh/camgor/watchers"
| 48.358209
| 100
| 0.729012
| 480
| 3,240
| 4.633333
| 0.091667
| 0.196942
| 0.140288
| 0.172662
| 0.903777
| 0.903777
| 0.903777
| 0.903777
| 0.903777
| 0.863309
| 0
| 0.016824
| 0.119444
| 3,240
| 66
| 101
| 49.090909
| 0.762706
| 0.037037
| 0
| 0.3
| 1
| 0
| 0.430584
| 0
| 0
| 0
| 0
| 0
| 0.3
| 1
| 0.075
| false
| 0
| 0.025
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dded76cb837db208879939476d456f11e028cf8b
| 3,731
|
py
|
Python
|
python/data_structures/tree/fizz_buzz_tree.py
|
M7madMomani2/data-structures-and-algorithms
|
35ba48973d45f6972d097e4aaac7cfb7147a83a2
|
[
"MIT"
] | null | null | null |
python/data_structures/tree/fizz_buzz_tree.py
|
M7madMomani2/data-structures-and-algorithms
|
35ba48973d45f6972d097e4aaac7cfb7147a83a2
|
[
"MIT"
] | null | null | null |
python/data_structures/tree/fizz_buzz_tree.py
|
M7madMomani2/data-structures-and-algorithms
|
35ba48973d45f6972d097e4aaac7cfb7147a83a2
|
[
"MIT"
] | 1
|
2021-08-29T20:16:19.000Z
|
2021-08-29T20:16:19.000Z
|
class Node:
def __init__(self, value):
self.value = value
self.right = None
self.left = None
class BinaryTree:
def __init__(self):
self.root = None
self.maxVal = 0
def pre_order(self):
output = []
def _walk(node):
output.append(node.value)
if node.left:
_walk(node.left)
if node.right:
_walk(node.right)
_walk(self.root)
return output
def in_order(self):
output = []
def _walk(node):
if node.left:
_walk(node.left)
output.append(node.value)
if node.right:
_walk(node.right)
_walk(self.root)
return output
def post_order(self):
output = []
def _walk(node):
if node.left:
_walk(node.left)
if node.right:
_walk(node.right)
output.append(node.value)
_walk(self.root)
return output
def find_maximum_value(self, tree):
def _walk(node):
if self.maxVal < node.value:
self.maxVal = node.value
if node.left:
_walk(node.left)
if node.right:
_walk(node.right)
_walk(self.root)
return self.maxVal
def breadth_first(self, tree):
temp = []
results = []
if self.root:
temp.append(self.root)
while temp:
node = temp.pop()
results.append(node.value)
if node.left:
temp.append(node.left)
if node.right:
temp.append(node.right)
return results
else:
return 'Empty'
class BinarySearchTree(BinaryTree):
def __init__(self):
self.root = None
self.maxVal = 0
def add(self, value):
if not self.root:
self.root = Node(value)
else:
def _walk(node):
if value < node.value:
# go left
if not node.left:
node.left = Node(value)
return
else:
_walk(node.left)
else:
# go right
if not node.right:
node.right = Node(value)
return
else:
_walk(node.right)
_walk(self.root)
def contains(self, value):
output = []
def _walk(node):
output.append(node.value)
if node.left:
_walk(node.left)
if node.right:
_walk(node.right)
_walk(self.root)
if value in output:
return True
else:
return False
def breadth_first(self, tree):
temp = []
results = []
if self.root:
temp.append(self.root)
while temp:
node = temp.pop()
results.append(node.value)
if node.left:
temp.append(node.left)
if node.right:
temp.append(node.right)
return results
else:
return 'Empty'
def find_maximum_value(self, tree):
def _walk(node):
if self.maxVal < node.value:
self.maxVal = node.value
if node.left:
_walk(node.left)
if node.right:
_walk(node.right)
_walk(self.root)
return self.maxVal
| 21.32
| 48
| 0.441705
| 371
| 3,731
| 4.309973
| 0.107817
| 0.105066
| 0.097561
| 0.065666
| 0.801126
| 0.801126
| 0.722952
| 0.722952
| 0.722952
| 0.722952
| 0
| 0.001019
| 0.473868
| 3,731
| 174
| 49
| 21.442529
| 0.813551
| 0.004288
| 0
| 0.822581
| 0
| 0
| 0.002694
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153226
| false
| 0
| 0
| 0
| 0.282258
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
34b6a4121a24dd9ada2b5e7bcb6c50ea1f57af84
| 54,795
|
py
|
Python
|
simulation/Tempo2.py
|
vivirodrigues/carrinheiros-heuristics
|
92c8c4a8384f8e3a86e9c53b41bcb2ab001de5f5
|
[
"MIT"
] | null | null | null |
simulation/Tempo2.py
|
vivirodrigues/carrinheiros-heuristics
|
92c8c4a8384f8e3a86e9c53b41bcb2ab001de5f5
|
[
"MIT"
] | null | null | null |
simulation/Tempo2.py
|
vivirodrigues/carrinheiros-heuristics
|
92c8c4a8384f8e3a86e9c53b41bcb2ab001de5f5
|
[
"MIT"
] | null | null | null |
import json
import scipy.stats
import matplotlib.pyplot as plt
import numpy as np
def open_file(nameFile):
try:
f = open(nameFile + ".json", "r")
dados = json.loads(f.read())
f.close()
except:
dados = 0
pass
return dados
def mean_confidence_interval(data, confidence=0.90):
a = 1.0 * np.array(data)
n = len(a)
m, se = np.mean(a), scipy.stats.sem(a)
h = se * scipy.stats.t.ppf((1 + confidence) / 2., n - 1)
#return m, m - h, m + h
return m, h
files = [
'../data/results/m38.49999603681327_m12.962358080558504_m38.47398437502447_m12.932893255527242_0_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.500671812913836_m12.96339552158351_m38.47352508877093_m12.932765988234031_1_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.50194412971296_m12.961982380453897_m38.472997875909336_m12.933973466644028_2_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.5014109499298_m12.960872502034725_m38.47423998586774_m12.935033565792027_3_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.50102106363388_m12.962638092503209_m38.474525144844954_m12.932374557163948_4_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.49922134252434_m12.962995897766534_m38.47172032605714_m12.933032796134958_5_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.49989452416794_m12.961981434109553_m38.47288011285585_m12.932171368514155_6_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.50237887905613_m12.960648819826947_m38.472913582758295_m12.934273386456828_7_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.5015370998344_m12.962186005531471_m38.47261478466609_m12.934002015361491_8_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.50073006631474_m12.961333960783888_m38.4725327574897_m12.932373724953635_9_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.50096584572687_m12.96121100042776_m38.47440076442133_m12.934017719276726_10_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.50082829471482_m12.960720017172312_m38.47384043859295_m12.933596799909374_11_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.501118552381065_m12.962947784137462_m38.47426226643149_m12.932564078786635_12_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.502373456830234_m12.962333491657414_m38.47477812160141_m12.93271906374045_13_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.50148403583942_m12.965290796965846_m38.471909395581456_m12.932729360653218_14_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.501890924160584_m12.961062102765782_m38.4732392389171_m12.933884816602236_15_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.5007597052321_m12.961099590741043_m38.473517022103756_m12.933269493665131_16_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.50151426278066_m12.96224952417061_m38.473343947418165_m12.932595128870267_17_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.50340379765633_m12.963068504924866_m38.473898022861405_m12.932939179700924_18_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.501402782516365_m12.962743981859667_m38.47361068224981_m12.929892203606808_19_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.500951062259055_m12.964628446152132_m38.47375669394401_m12.93455351878407_20_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.500486678608006_m12.963212145332431_m38.474758327361364_m12.933328833777356_21_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.50234447884447_m12.961648999633914_m38.474661277554_m12.93489642987398_22_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.50229159113205_m12.961490473565357_m38.474209563384555_m12.933428060221484_23_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.500568338650666_m12.963562146885746_m38.47357849097421_m12.93225101151055_24_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.50115701483925_m12.9612635544437_m38.47509217365817_m12.933188948092502_25_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.50186554346796_m12.961718758432754_m38.47355380440904_m12.934289622568668_26_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.50165434807298_m12.96187628063375_m38.47332172286755_m12.933277161490693_27_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.50177737556065_m12.962596650290932_m38.472904517360526_m12.933331456516722_28_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.50009702898103_m12.96036292373261_m38.47412281703678_m12.934711892250165_29_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra',
'../data/results/m38.500734794836475_m12.961295117029927_m38.473498428492356_m12.932937589096973_30_30_length_heuristic_dijkstra_nearest_neighbor_coords_length_heuristic_dijkstra'
]
files_s = [
'../data/results/m38.49999603681327_m12.962358080558504_m38.47398437502447_m12.932893255527242_0_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.500671812913836_m12.96339552158351_m38.47352508877093_m12.932765988234031_1_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.50194412971296_m12.961982380453897_m38.472997875909336_m12.933973466644028_2_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.5014109499298_m12.960872502034725_m38.47423998586774_m12.935033565792027_3_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.50102106363388_m12.962638092503209_m38.474525144844954_m12.932374557163948_4_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.49922134252434_m12.962995897766534_m38.47172032605714_m12.933032796134958_5_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.49989452416794_m12.961981434109553_m38.47288011285585_m12.932171368514155_6_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.50237887905613_m12.960648819826947_m38.472913582758295_m12.934273386456828_7_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.5015370998344_m12.962186005531471_m38.47261478466609_m12.934002015361491_8_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.50073006631474_m12.961333960783888_m38.4725327574897_m12.932373724953635_9_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.50096584572687_m12.96121100042776_m38.47440076442133_m12.934017719276726_10_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.50082829471482_m12.960720017172312_m38.47384043859295_m12.933596799909374_11_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.501118552381065_m12.962947784137462_m38.47426226643149_m12.932564078786635_12_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.502373456830234_m12.962333491657414_m38.47477812160141_m12.93271906374045_13_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.50148403583942_m12.965290796965846_m38.471909395581456_m12.932729360653218_14_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.501890924160584_m12.961062102765782_m38.4732392389171_m12.933884816602236_15_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.5007597052321_m12.961099590741043_m38.473517022103756_m12.933269493665131_16_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.50151426278066_m12.96224952417061_m38.473343947418165_m12.932595128870267_17_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.50340379765633_m12.963068504924866_m38.473898022861405_m12.932939179700924_18_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.501402782516365_m12.962743981859667_m38.47361068224981_m12.929892203606808_19_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.500951062259055_m12.964628446152132_m38.47375669394401_m12.93455351878407_20_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.500486678608006_m12.963212145332431_m38.474758327361364_m12.933328833777356_21_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.50234447884447_m12.961648999633914_m38.474661277554_m12.93489642987398_22_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.50229159113205_m12.961490473565357_m38.474209563384555_m12.933428060221484_23_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.500568338650666_m12.963562146885746_m38.47357849097421_m12.93225101151055_24_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.50115701483925_m12.9612635544437_m38.47509217365817_m12.933188948092502_25_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.50186554346796_m12.961718758432754_m38.47355380440904_m12.934289622568668_26_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.50165434807298_m12.96187628063375_m38.47332172286755_m12.933277161490693_27_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.50177737556065_m12.962596650290932_m38.472904517360526_m12.933331456516722_28_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.50009702898103_m12.96036292373261_m38.47412281703678_m12.934711892250165_29_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA',
'../data/results/m38.500734794836475_m12.961295117029927_m38.473498428492356_m12.932937589096973_30_30_length_heuristic_SPFA_nearest_neighbor_coords_length_heuristic_SPFA'
]
files_a = [
'../data/results/m38.49999603681327_m12.962358080558504_m38.47398437502447_m12.932893255527242_0_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.500671812913836_m12.96339552158351_m38.47352508877093_m12.932765988234031_1_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.50194412971296_m12.961982380453897_m38.472997875909336_m12.933973466644028_2_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.5014109499298_m12.960872502034725_m38.47423998586774_m12.935033565792027_3_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.50102106363388_m12.962638092503209_m38.474525144844954_m12.932374557163948_4_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.49922134252434_m12.962995897766534_m38.47172032605714_m12.933032796134958_5_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.49989452416794_m12.961981434109553_m38.47288011285585_m12.932171368514155_6_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.50237887905613_m12.960648819826947_m38.472913582758295_m12.934273386456828_7_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.5015370998344_m12.962186005531471_m38.47261478466609_m12.934002015361491_8_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.50073006631474_m12.961333960783888_m38.4725327574897_m12.932373724953635_9_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.50096584572687_m12.96121100042776_m38.47440076442133_m12.934017719276726_10_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.50082829471482_m12.960720017172312_m38.47384043859295_m12.933596799909374_11_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.501118552381065_m12.962947784137462_m38.47426226643149_m12.932564078786635_12_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.502373456830234_m12.962333491657414_m38.47477812160141_m12.93271906374045_13_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.50148403583942_m12.965290796965846_m38.471909395581456_m12.932729360653218_14_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.501890924160584_m12.961062102765782_m38.4732392389171_m12.933884816602236_15_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.5007597052321_m12.961099590741043_m38.473517022103756_m12.933269493665131_16_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.50151426278066_m12.96224952417061_m38.473343947418165_m12.932595128870267_17_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.50340379765633_m12.963068504924866_m38.473898022861405_m12.932939179700924_18_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.501402782516365_m12.962743981859667_m38.47361068224981_m12.929892203606808_19_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.500951062259055_m12.964628446152132_m38.47375669394401_m12.93455351878407_20_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.500486678608006_m12.963212145332431_m38.474758327361364_m12.933328833777356_21_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.50234447884447_m12.961648999633914_m38.474661277554_m12.93489642987398_22_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.50229159113205_m12.961490473565357_m38.474209563384555_m12.933428060221484_23_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.500568338650666_m12.963562146885746_m38.47357849097421_m12.93225101151055_24_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.50115701483925_m12.9612635544437_m38.47509217365817_m12.933188948092502_25_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.50186554346796_m12.961718758432754_m38.47355380440904_m12.934289622568668_26_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.50165434807298_m12.96187628063375_m38.47332172286755_m12.933277161490693_27_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.50177737556065_m12.962596650290932_m38.472904517360526_m12.933331456516722_28_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.50009702898103_m12.96036292373261_m38.47412281703678_m12.934711892250165_29_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar',
'../data/results/m38.500734794836475_m12.961295117029927_m38.473498428492356_m12.932937589096973_30_30_length_heuristic_astar_nearest_neighbor_coords_length_heuristic_astar'
]
files_ci = [
'../data/results/m38.49999603681327_m12.962358080558504_m38.47398437502447_m12.932893255527242_0_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.500671812913836_m12.96339552158351_m38.47352508877093_m12.932765988234031_1_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50194412971296_m12.961982380453897_m38.472997875909336_m12.933973466644028_2_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.5014109499298_m12.960872502034725_m38.47423998586774_m12.935033565792027_3_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50102106363388_m12.962638092503209_m38.474525144844954_m12.932374557163948_4_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.49922134252434_m12.962995897766534_m38.47172032605714_m12.933032796134958_5_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.49989452416794_m12.961981434109553_m38.47288011285585_m12.932171368514155_6_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50237887905613_m12.960648819826947_m38.472913582758295_m12.934273386456828_7_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.5015370998344_m12.962186005531471_m38.47261478466609_m12.934002015361491_8_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50073006631474_m12.961333960783888_m38.4725327574897_m12.932373724953635_9_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50096584572687_m12.96121100042776_m38.47440076442133_m12.934017719276726_10_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50082829471482_m12.960720017172312_m38.47384043859295_m12.933596799909374_11_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.501118552381065_m12.962947784137462_m38.47426226643149_m12.932564078786635_12_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.502373456830234_m12.962333491657414_m38.47477812160141_m12.93271906374045_13_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50148403583942_m12.965290796965846_m38.471909395581456_m12.932729360653218_14_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.501890924160584_m12.961062102765782_m38.4732392389171_m12.933884816602236_15_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.5007597052321_m12.961099590741043_m38.473517022103756_m12.933269493665131_16_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50151426278066_m12.96224952417061_m38.473343947418165_m12.932595128870267_17_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50340379765633_m12.963068504924866_m38.473898022861405_m12.932939179700924_18_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.501402782516365_m12.962743981859667_m38.47361068224981_m12.929892203606808_19_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.500951062259055_m12.964628446152132_m38.47375669394401_m12.93455351878407_20_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.500486678608006_m12.963212145332431_m38.474758327361364_m12.933328833777356_21_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50234447884447_m12.961648999633914_m38.474661277554_m12.93489642987398_22_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50229159113205_m12.961490473565357_m38.474209563384555_m12.933428060221484_23_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.500568338650666_m12.963562146885746_m38.47357849097421_m12.93225101151055_24_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50115701483925_m12.9612635544437_m38.47509217365817_m12.933188948092502_25_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50186554346796_m12.961718758432754_m38.47355380440904_m12.934289622568668_26_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50165434807298_m12.96187628063375_m38.47332172286755_m12.933277161490693_27_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50177737556065_m12.962596650290932_m38.472904517360526_m12.933331456516722_28_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50009702898103_m12.96036292373261_m38.47412281703678_m12.934711892250165_29_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.500734794836475_m12.961295117029927_m38.473498428492356_m12.932937589096973_30_30_length_heuristic_dijkstra_closest_insertion_coords_length_heuristic_dijkstra'
]
files_s_ci = [
'../data/results/m38.49999603681327_m12.962358080558504_m38.47398437502447_m12.932893255527242_0_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.500671812913836_m12.96339552158351_m38.47352508877093_m12.932765988234031_1_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50194412971296_m12.961982380453897_m38.472997875909336_m12.933973466644028_2_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.5014109499298_m12.960872502034725_m38.47423998586774_m12.935033565792027_3_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50102106363388_m12.962638092503209_m38.474525144844954_m12.932374557163948_4_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.49922134252434_m12.962995897766534_m38.47172032605714_m12.933032796134958_5_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.49989452416794_m12.961981434109553_m38.47288011285585_m12.932171368514155_6_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50237887905613_m12.960648819826947_m38.472913582758295_m12.934273386456828_7_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.5015370998344_m12.962186005531471_m38.47261478466609_m12.934002015361491_8_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50073006631474_m12.961333960783888_m38.4725327574897_m12.932373724953635_9_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50096584572687_m12.96121100042776_m38.47440076442133_m12.934017719276726_10_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50082829471482_m12.960720017172312_m38.47384043859295_m12.933596799909374_11_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.501118552381065_m12.962947784137462_m38.47426226643149_m12.932564078786635_12_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.502373456830234_m12.962333491657414_m38.47477812160141_m12.93271906374045_13_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50148403583942_m12.965290796965846_m38.471909395581456_m12.932729360653218_14_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.501890924160584_m12.961062102765782_m38.4732392389171_m12.933884816602236_15_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.5007597052321_m12.961099590741043_m38.473517022103756_m12.933269493665131_16_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50151426278066_m12.96224952417061_m38.473343947418165_m12.932595128870267_17_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50340379765633_m12.963068504924866_m38.473898022861405_m12.932939179700924_18_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.501402782516365_m12.962743981859667_m38.47361068224981_m12.929892203606808_19_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.500951062259055_m12.964628446152132_m38.47375669394401_m12.93455351878407_20_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.500486678608006_m12.963212145332431_m38.474758327361364_m12.933328833777356_21_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50234447884447_m12.961648999633914_m38.474661277554_m12.93489642987398_22_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50229159113205_m12.961490473565357_m38.474209563384555_m12.933428060221484_23_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.500568338650666_m12.963562146885746_m38.47357849097421_m12.93225101151055_24_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50115701483925_m12.9612635544437_m38.47509217365817_m12.933188948092502_25_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50186554346796_m12.961718758432754_m38.47355380440904_m12.934289622568668_26_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50165434807298_m12.96187628063375_m38.47332172286755_m12.933277161490693_27_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50177737556065_m12.962596650290932_m38.472904517360526_m12.933331456516722_28_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50009702898103_m12.96036292373261_m38.47412281703678_m12.934711892250165_29_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.500734794836475_m12.961295117029927_m38.473498428492356_m12.932937589096973_30_30_length_heuristic_SPFA_closest_insertion_coords_length_heuristic_SPFA'
]
files_a_ci = [
'../data/results/m38.49999603681327_m12.962358080558504_m38.47398437502447_m12.932893255527242_0_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.500671812913836_m12.96339552158351_m38.47352508877093_m12.932765988234031_1_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.50194412971296_m12.961982380453897_m38.472997875909336_m12.933973466644028_2_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.5014109499298_m12.960872502034725_m38.47423998586774_m12.935033565792027_3_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.50102106363388_m12.962638092503209_m38.474525144844954_m12.932374557163948_4_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.49922134252434_m12.962995897766534_m38.47172032605714_m12.933032796134958_5_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.49989452416794_m12.961981434109553_m38.47288011285585_m12.932171368514155_6_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.50237887905613_m12.960648819826947_m38.472913582758295_m12.934273386456828_7_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.5015370998344_m12.962186005531471_m38.47261478466609_m12.934002015361491_8_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.50073006631474_m12.961333960783888_m38.4725327574897_m12.932373724953635_9_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.50096584572687_m12.96121100042776_m38.47440076442133_m12.934017719276726_10_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.50082829471482_m12.960720017172312_m38.47384043859295_m12.933596799909374_11_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.501118552381065_m12.962947784137462_m38.47426226643149_m12.932564078786635_12_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.502373456830234_m12.962333491657414_m38.47477812160141_m12.93271906374045_13_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.50148403583942_m12.965290796965846_m38.471909395581456_m12.932729360653218_14_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.501890924160584_m12.961062102765782_m38.4732392389171_m12.933884816602236_15_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.5007597052321_m12.961099590741043_m38.473517022103756_m12.933269493665131_16_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.50151426278066_m12.96224952417061_m38.473343947418165_m12.932595128870267_17_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.50340379765633_m12.963068504924866_m38.473898022861405_m12.932939179700924_18_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.501402782516365_m12.962743981859667_m38.47361068224981_m12.929892203606808_19_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.500951062259055_m12.964628446152132_m38.47375669394401_m12.93455351878407_20_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.500486678608006_m12.963212145332431_m38.474758327361364_m12.933328833777356_21_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.50234447884447_m12.961648999633914_m38.474661277554_m12.93489642987398_22_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.50229159113205_m12.961490473565357_m38.474209563384555_m12.933428060221484_23_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.500568338650666_m12.963562146885746_m38.47357849097421_m12.93225101151055_24_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.50115701483925_m12.9612635544437_m38.47509217365817_m12.933188948092502_25_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.50186554346796_m12.961718758432754_m38.47355380440904_m12.934289622568668_26_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.50165434807298_m12.96187628063375_m38.47332172286755_m12.933277161490693_27_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.50177737556065_m12.962596650290932_m38.472904517360526_m12.933331456516722_28_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.50009702898103_m12.96036292373261_m38.47412281703678_m12.934711892250165_29_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar',
'../data/results/m38.500734794836475_m12.961295117029927_m38.473498428492356_m12.932937589096973_30_30_length_heuristic_astar_closest_insertion_coords_length_heuristic_astar'
]
files_fi = [
'../data/results/m38.49999603681327_m12.962358080558504_m38.47398437502447_m12.932893255527242_0_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.500671812913836_m12.96339552158351_m38.47352508877093_m12.932765988234031_1_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50194412971296_m12.961982380453897_m38.472997875909336_m12.933973466644028_2_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.5014109499298_m12.960872502034725_m38.47423998586774_m12.935033565792027_3_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50102106363388_m12.962638092503209_m38.474525144844954_m12.932374557163948_4_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.49922134252434_m12.962995897766534_m38.47172032605714_m12.933032796134958_5_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.49989452416794_m12.961981434109553_m38.47288011285585_m12.932171368514155_6_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50237887905613_m12.960648819826947_m38.472913582758295_m12.934273386456828_7_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.5015370998344_m12.962186005531471_m38.47261478466609_m12.934002015361491_8_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50073006631474_m12.961333960783888_m38.4725327574897_m12.932373724953635_9_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50096584572687_m12.96121100042776_m38.47440076442133_m12.934017719276726_10_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50082829471482_m12.960720017172312_m38.47384043859295_m12.933596799909374_11_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.501118552381065_m12.962947784137462_m38.47426226643149_m12.932564078786635_12_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.502373456830234_m12.962333491657414_m38.47477812160141_m12.93271906374045_13_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50148403583942_m12.965290796965846_m38.471909395581456_m12.932729360653218_14_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.501890924160584_m12.961062102765782_m38.4732392389171_m12.933884816602236_15_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.5007597052321_m12.961099590741043_m38.473517022103756_m12.933269493665131_16_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50151426278066_m12.96224952417061_m38.473343947418165_m12.932595128870267_17_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50340379765633_m12.963068504924866_m38.473898022861405_m12.932939179700924_18_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.501402782516365_m12.962743981859667_m38.47361068224981_m12.929892203606808_19_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.500951062259055_m12.964628446152132_m38.47375669394401_m12.93455351878407_20_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.500486678608006_m12.963212145332431_m38.474758327361364_m12.933328833777356_21_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50234447884447_m12.961648999633914_m38.474661277554_m12.93489642987398_22_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50229159113205_m12.961490473565357_m38.474209563384555_m12.933428060221484_23_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.500568338650666_m12.963562146885746_m38.47357849097421_m12.93225101151055_24_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50115701483925_m12.9612635544437_m38.47509217365817_m12.933188948092502_25_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50186554346796_m12.961718758432754_m38.47355380440904_m12.934289622568668_26_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50165434807298_m12.96187628063375_m38.47332172286755_m12.933277161490693_27_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50177737556065_m12.962596650290932_m38.472904517360526_m12.933331456516722_28_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.50009702898103_m12.96036292373261_m38.47412281703678_m12.934711892250165_29_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra',
'../data/results/m38.500734794836475_m12.961295117029927_m38.473498428492356_m12.932937589096973_30_30_length_heuristic_dijkstra_further_insertion_coords_length_heuristic_dijkstra'
]
files_s_fi = [#'../../data/results/m43.957018117658315_m19.931545102455843_m43.931890481507786_m19.907162672548026_0_distance_heuristic_SPFA_nn'
'../data/results/m38.49999603681327_m12.962358080558504_m38.47398437502447_m12.932893255527242_0_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.500671812913836_m12.96339552158351_m38.47352508877093_m12.932765988234031_1_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50194412971296_m12.961982380453897_m38.472997875909336_m12.933973466644028_2_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.5014109499298_m12.960872502034725_m38.47423998586774_m12.935033565792027_3_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50102106363388_m12.962638092503209_m38.474525144844954_m12.932374557163948_4_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.49922134252434_m12.962995897766534_m38.47172032605714_m12.933032796134958_5_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.49989452416794_m12.961981434109553_m38.47288011285585_m12.932171368514155_6_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50237887905613_m12.960648819826947_m38.472913582758295_m12.934273386456828_7_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.5015370998344_m12.962186005531471_m38.47261478466609_m12.934002015361491_8_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50073006631474_m12.961333960783888_m38.4725327574897_m12.932373724953635_9_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50096584572687_m12.96121100042776_m38.47440076442133_m12.934017719276726_10_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50082829471482_m12.960720017172312_m38.47384043859295_m12.933596799909374_11_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.501118552381065_m12.962947784137462_m38.47426226643149_m12.932564078786635_12_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.502373456830234_m12.962333491657414_m38.47477812160141_m12.93271906374045_13_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50148403583942_m12.965290796965846_m38.471909395581456_m12.932729360653218_14_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.501890924160584_m12.961062102765782_m38.4732392389171_m12.933884816602236_15_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.5007597052321_m12.961099590741043_m38.473517022103756_m12.933269493665131_16_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50151426278066_m12.96224952417061_m38.473343947418165_m12.932595128870267_17_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50340379765633_m12.963068504924866_m38.473898022861405_m12.932939179700924_18_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.501402782516365_m12.962743981859667_m38.47361068224981_m12.929892203606808_19_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.500951062259055_m12.964628446152132_m38.47375669394401_m12.93455351878407_20_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.500486678608006_m12.963212145332431_m38.474758327361364_m12.933328833777356_21_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50234447884447_m12.961648999633914_m38.474661277554_m12.93489642987398_22_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50229159113205_m12.961490473565357_m38.474209563384555_m12.933428060221484_23_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.500568338650666_m12.963562146885746_m38.47357849097421_m12.93225101151055_24_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50115701483925_m12.9612635544437_m38.47509217365817_m12.933188948092502_25_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50186554346796_m12.961718758432754_m38.47355380440904_m12.934289622568668_26_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50165434807298_m12.96187628063375_m38.47332172286755_m12.933277161490693_27_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50177737556065_m12.962596650290932_m38.472904517360526_m12.933331456516722_28_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.50009702898103_m12.96036292373261_m38.47412281703678_m12.934711892250165_29_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA',
'../data/results/m38.500734794836475_m12.961295117029927_m38.473498428492356_m12.932937589096973_30_30_length_heuristic_SPFA_further_insertion_coords_length_heuristic_SPFA'
]
files_a_fi = [
'../data/results/m38.49999603681327_m12.962358080558504_m38.47398437502447_m12.932893255527242_0_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.500671812913836_m12.96339552158351_m38.47352508877093_m12.932765988234031_1_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.50194412971296_m12.961982380453897_m38.472997875909336_m12.933973466644028_2_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.5014109499298_m12.960872502034725_m38.47423998586774_m12.935033565792027_3_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.50102106363388_m12.962638092503209_m38.474525144844954_m12.932374557163948_4_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.49922134252434_m12.962995897766534_m38.47172032605714_m12.933032796134958_5_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.49989452416794_m12.961981434109553_m38.47288011285585_m12.932171368514155_6_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.50237887905613_m12.960648819826947_m38.472913582758295_m12.934273386456828_7_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.5015370998344_m12.962186005531471_m38.47261478466609_m12.934002015361491_8_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.50073006631474_m12.961333960783888_m38.4725327574897_m12.932373724953635_9_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.50096584572687_m12.96121100042776_m38.47440076442133_m12.934017719276726_10_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.50082829471482_m12.960720017172312_m38.47384043859295_m12.933596799909374_11_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.501118552381065_m12.962947784137462_m38.47426226643149_m12.932564078786635_12_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.502373456830234_m12.962333491657414_m38.47477812160141_m12.93271906374045_13_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.50148403583942_m12.965290796965846_m38.471909395581456_m12.932729360653218_14_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.501890924160584_m12.961062102765782_m38.4732392389171_m12.933884816602236_15_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.5007597052321_m12.961099590741043_m38.473517022103756_m12.933269493665131_16_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.50151426278066_m12.96224952417061_m38.473343947418165_m12.932595128870267_17_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.50340379765633_m12.963068504924866_m38.473898022861405_m12.932939179700924_18_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.501402782516365_m12.962743981859667_m38.47361068224981_m12.929892203606808_19_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.500951062259055_m12.964628446152132_m38.47375669394401_m12.93455351878407_20_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.500486678608006_m12.963212145332431_m38.474758327361364_m12.933328833777356_21_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.50234447884447_m12.961648999633914_m38.474661277554_m12.93489642987398_22_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.50229159113205_m12.961490473565357_m38.474209563384555_m12.933428060221484_23_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.500568338650666_m12.963562146885746_m38.47357849097421_m12.93225101151055_24_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.50115701483925_m12.9612635544437_m38.47509217365817_m12.933188948092502_25_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.50186554346796_m12.961718758432754_m38.47355380440904_m12.934289622568668_26_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.50165434807298_m12.96187628063375_m38.47332172286755_m12.933277161490693_27_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.50177737556065_m12.962596650290932_m38.472904517360526_m12.933331456516722_28_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.5000970289810360_m12.96036292373261_m38.47412281703678_m12.934711892250165_29_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar',
'../data/results/m38.500734794836475_m12.961295117029927_m38.473498428492356_m12.932937589096973_30_30_length_heuristic_astar_further_insertion_coords_length_heuristic_astar'
]
todas_medias = []
todas_medias_s = []
todas_medias_a = []
todas_medias_ci = []
todas_medias_s_ci = []
todas_medias_a_ci = []
todas_medias_fi = []
todas_medias_s_fi = []
todas_medias_a_fi = []
for a in range(len(files)):
dados_nn = dict(open_file(files[a]))
dados_ci = dict(open_file(files_s[a]))
dados_fi = dict(open_file(files_a[a]))
todas_medias.append(float(dados_nn.get('total_time'))*1000)
todas_medias_s.append(float(dados_ci.get('total_time'))*1000)
todas_medias_a.append(float(dados_fi.get('total_time'))*1000)
dados_ci_1 = dict(open_file(files_ci[a]))
dados_ci_2 = dict(open_file(files_s_ci[a]))
dados_ci_3 = dict(open_file(files_a_ci[a]))
todas_medias_ci.append(float(dados_ci_1.get('total_time'))*1000)
todas_medias_s_ci.append(float(dados_ci_2.get('total_time'))*1000)
todas_medias_a_ci.append(float(dados_ci_3.get('total_time'))*1000)
dados_fi_1 = dict(open_file(files_fi[a]))
dados_fi_2 = dict(open_file(files_s_fi[a]))
dados_fi_3 = dict(open_file(files_a_fi[a]))
todas_medias_fi.append(float(dados_fi_1.get('total_time'))*1000)
todas_medias_s_fi.append(float(dados_fi_2.get('total_time'))*1000)
todas_medias_a_fi.append(float(dados_fi_3.get('total_time'))*1000)
m, h = mean_confidence_interval(todas_medias, 0.95)
m1, h1 = mean_confidence_interval(todas_medias_s, 0.95)
m2, h2 = mean_confidence_interval(todas_medias_a, 0.95)
m_ci, h_ci = mean_confidence_interval(todas_medias_ci, 0.95)
m1_ci, h1_ci = mean_confidence_interval(todas_medias_s_ci, 0.95)
m2_ci, h2_ci = mean_confidence_interval(todas_medias_a_ci, 0.95)
m_fi, h_fi = mean_confidence_interval(todas_medias_fi, 0.95)
m1_fi, h1_fi = mean_confidence_interval(todas_medias_s_fi, 0.95)
m2_fi, h2_fi = mean_confidence_interval(todas_medias_a_fi, 0.95)
medias = [m, m1, m2]
erros = [h, h1, h2]
medias_ci = [m_ci, m1_ci, m2_ci]
erros_ci = [h_ci, h1_ci, h2_ci]
medias_fi = [m_fi, m1_fi, m2_fi]
erros_fi = [h_fi, h1_fi, h2_fi]
labels = ['Bidirectional Dijkstra', 'SPFA', 'A-star']
x = np.arange(len(labels)) # the label locations
width = 0.45 # 0.35 # the width of the bars
print(medias, medias_ci, medias_fi)
fig, ax = plt.subplots()
r1 = ax.bar(x - width/3, medias, width/3, yerr=erros, label='Nearest Neighbor', zorder=10)
r2 = ax.bar(x, medias_ci, width/3, yerr=erros_ci, label='Closest Insertion', zorder=10)
r3 = ax.bar(x + width/3, medias_fi, width/3, yerr=erros_fi, label='Further Insertion', zorder=10)
plt.yscale('log')
# Add some text for labels, title and custom x-axis tick labels, etc.
#ax.set_ylabel('Potência média (W)', fontdict='bold')
plt.ylabel('Time [ms]', fontweight="bold", fontsize=11)
plt.ylim(10**(1), 10**(6)) #max(medias_ci) + 5)
plt.grid(True, which="both", ls="-", linewidth=0.1, color='0.10', zorder=0.5)
ax.set_xticks(x)
ax.set_xticklabels(labels)
#plt.xlabel([], )
ax.legend(numpoints=1, loc="upper right", ncol=3, prop={'size': 9})
fig.tight_layout()
plt.show()
| 129.539007
| 189
| 0.871594
| 6,516
| 54,795
| 6.698895
| 0.046961
| 0.191753
| 0.089485
| 0.127835
| 0.9652
| 0.95803
| 0.948729
| 0.946896
| 0.944238
| 0.944238
| 0
| 0.380698
| 0.056775
| 54,795
| 423
| 190
| 129.539007
| 0.463857
| 0.006479
| 0
| 0
| 0
| 0
| 0.879207
| 0.875202
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005319
| false
| 0.00266
| 0.010638
| 0
| 0.021277
| 0.00266
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
34d2758eee80150f0f5d6c1bd0eead887b4b6947
| 2,924
|
py
|
Python
|
backend/stock/migrations/0008_dailypricehistory_hourlypricehistory_monthlypricehistory_weeklypricehistory.py
|
TopWebFullDev/Stock-chart-1
|
947f72fbb71af9762a12aa2b2c7d4aa2824cd165
|
[
"MIT"
] | null | null | null |
backend/stock/migrations/0008_dailypricehistory_hourlypricehistory_monthlypricehistory_weeklypricehistory.py
|
TopWebFullDev/Stock-chart-1
|
947f72fbb71af9762a12aa2b2c7d4aa2824cd165
|
[
"MIT"
] | null | null | null |
backend/stock/migrations/0008_dailypricehistory_hourlypricehistory_monthlypricehistory_weeklypricehistory.py
|
TopWebFullDev/Stock-chart-1
|
947f72fbb71af9762a12aa2b2c7d4aa2824cd165
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2.10 on 2020-09-30 21:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('stock', '0007_pricehistory_close'),
]
operations = [
migrations.CreateModel(
name='DailyPriceHistory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('symbol', models.CharField(max_length=50)),
('volume', models.FloatField()),
('high', models.FloatField()),
('low', models.FloatField()),
('open', models.FloatField()),
('datetime', models.IntegerField()),
('close', models.FloatField()),
],
options={
'db_table': 'daily_price_history',
},
),
migrations.CreateModel(
name='HourlyPriceHistory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('symbol', models.CharField(max_length=50)),
('volume', models.FloatField()),
('high', models.FloatField()),
('low', models.FloatField()),
('open', models.FloatField()),
('datetime', models.IntegerField()),
('close', models.FloatField()),
],
options={
'db_table': 'hourly_price_history',
},
),
migrations.CreateModel(
name='MonthlyPriceHistory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('symbol', models.CharField(max_length=50)),
('volume', models.FloatField()),
('high', models.FloatField()),
('low', models.FloatField()),
('open', models.FloatField()),
('datetime', models.IntegerField()),
('close', models.FloatField()),
],
options={
'db_table': 'monthly_price_history',
},
),
migrations.CreateModel(
name='WeeklyPriceHistory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('symbol', models.CharField(max_length=50)),
('volume', models.FloatField()),
('high', models.FloatField()),
('low', models.FloatField()),
('open', models.FloatField()),
('datetime', models.IntegerField()),
('close', models.FloatField()),
],
options={
'db_table': 'weekly_price_history',
},
),
]
| 37.487179
| 114
| 0.491108
| 224
| 2,924
| 6.276786
| 0.272321
| 0.227596
| 0.071124
| 0.065434
| 0.79872
| 0.719772
| 0.719772
| 0.719772
| 0.719772
| 0.719772
| 0
| 0.014909
| 0.357729
| 2,924
| 77
| 115
| 37.974026
| 0.733759
| 0.015732
| 0
| 0.732394
| 1
| 0
| 0.129346
| 0.015299
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.014085
| 0
| 0.056338
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
551d206c196facc257f726dcaf7f8ae2b096b5cc
| 56,597
|
py
|
Python
|
tests/bugs/core_5970_test.py
|
FirebirdSQL/firebird-qa
|
96af2def7f905a06f178e2a80a2c8be4a4b44782
|
[
"MIT"
] | 1
|
2022-02-05T11:37:13.000Z
|
2022-02-05T11:37:13.000Z
|
tests/bugs/core_5970_test.py
|
FirebirdSQL/firebird-qa
|
96af2def7f905a06f178e2a80a2c8be4a4b44782
|
[
"MIT"
] | 1
|
2021-09-03T11:47:00.000Z
|
2021-09-03T12:42:10.000Z
|
tests/bugs/core_5970_test.py
|
FirebirdSQL/firebird-qa
|
96af2def7f905a06f178e2a80a2c8be4a4b44782
|
[
"MIT"
] | 1
|
2021-06-30T14:14:16.000Z
|
2021-06-30T14:14:16.000Z
|
#coding:utf-8
#
# id: bugs.core_5970
# title: Built-in cryptographic functions
# decription:
# Issues found during implementing this test - see CORE-6185, CORE-6186.
# This test checks only ability to call ENCRYPT()/DECRYPT() functions with different parameters.
# Also, it checks that <source> -> encrypt(<source>) -> decrypt(encrypted_source) gives the same <source>.
#
# Checked on:
# 4.0.0.1646 SS: 3.657s.
# 4.0.0.1637 SC: 3.271s.
# 4.0.0.1633 CS: 4.191s.
#
# tracker_id: CORE-5970
# min_versions: ['4.0.0']
# versions: 4.0
# qmid: None
import pytest
from firebird.qa import db_factory, isql_act, Action
# version: 4.0
# resources: None
substitutions_1 = [('[ \t]+', ' ')]
init_script_1 = """"""
db_1 = db_factory(sql_dialect=3, init=init_script_1)
test_script_1 = """
set blob all;
set list on;
create or alter procedure sp_block_test(a_alg varchar(30)) as begin end;
create or alter procedure sp_stream_test(a_alg varchar(30)) as begin end;
commit;
recreate table test( crypto_alg varchar(30), source_text blob, crypto_key varchar(128), crypto_iv varchar(128) );
recreate global temporary table gtt_tmp(
source_text blob
,encrypted_text blob
) on commit delete rows;
commit;
recreate table secure_table(secret_field varchar(1000), init_vector varchar(16) );
insert into secure_table(secret_field, init_vector) values( lpad('',1000, 'A'), '1234567890123456');
commit;
--set echo on;
-- Should NOT cause any errors when call encrypt() decrypt() for these params:
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'AES', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('',16, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'ANUBIS', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('',16, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'BLOWFISH', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('', 8, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'KHAZAD', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('', 8, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'RC5', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('', 8, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'RC6', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('',16, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( '"SAFER+"', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('',16, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'TWOFISH', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('',16, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'XTEA', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('', 8, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'CHACHA20', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('', 8, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'RC4', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), null );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'SOBER128', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('', 8, uuid_to_char( gen_uuid() )) );
-- Should cause FAILS: invalid length of keys:
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'AES', lpad('', 65535, gen_uuid()), lpad('',11, uuid_to_char( gen_uuid() )), lpad('',16, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'ANUBIS', lpad('', 65535, gen_uuid()), lpad('',12, uuid_to_char( gen_uuid() )), lpad('',16, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'BLOWFISH', lpad('', 65535, gen_uuid()), lpad('',13, uuid_to_char( gen_uuid() )), lpad('', 8, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'KHAZAD', lpad('', 65535, gen_uuid()), lpad('',14, uuid_to_char( gen_uuid() )), lpad('', 8, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'RC5', lpad('', 65535, gen_uuid()), lpad('',15, uuid_to_char( gen_uuid() )), lpad('', 8, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'RC6', lpad('', 65535, gen_uuid()), lpad('',17, uuid_to_char( gen_uuid() )), lpad('',16, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( '"SAFER+"', lpad('', 65535, gen_uuid()), lpad('',18, uuid_to_char( gen_uuid() )), lpad('',16, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'TWOFISH', lpad('', 65535, gen_uuid()), lpad('',19, uuid_to_char( gen_uuid() )), lpad('',16, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'XTEA', lpad('', 65535, gen_uuid()), lpad('',20, uuid_to_char( gen_uuid() )), lpad('', 8, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'CHACHA20', lpad('', 65535, gen_uuid()), lpad('',21, uuid_to_char( gen_uuid() )), lpad('', 8, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'RC4', lpad('', 65535, gen_uuid()), lpad('',22, uuid_to_char( gen_uuid() )), null );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'SOBER128', lpad('', 65535, gen_uuid()), lpad('',23, uuid_to_char( gen_uuid() )), lpad('', 8, uuid_to_char( gen_uuid() )) );
-- Should cause FAILS: invalid length of IVs:
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'AES', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('',11, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'ANUBIS', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('',13, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'BLOWFISH', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('',15, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'KHAZAD', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('',17, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'RC5', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('',19, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'RC6', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('',21, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( '"SAFER+"', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('',23, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'TWOFISH', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('',25, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'XTEA', lpad('', 65535, gen_uuid()), lpad('',26, uuid_to_char( gen_uuid() )), lpad('',27, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'CHACHA20', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('',29, uuid_to_char( gen_uuid() )) );
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'RC4', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('',31, uuid_to_char( gen_uuid() )) ); -- IV not needed for this alg
insert into test( crypto_alg, source_text, crypto_key, crypto_iv) values( 'SOBER128', lpad('', 65535, gen_uuid()), lpad('',16, uuid_to_char( gen_uuid() )), lpad('',33, uuid_to_char( gen_uuid() )) );
commit;
set term ^;
create or alter procedure sp_block_test(a_alg varchar(30))
returns(
encryption_algorithm varchar(30)
,encryption_mode varchar(10)
,enc_key_octet_length int
,enc_init_vector_octet_length int
,encrypted_equals_to_decrypted boolean
,encryption_finish_gdscode int
) as
declare v_encrypted blob;
declare v_encrypt_sttm blob;
declare v_decrypt_sttm blob;
declare s_source_text blob;
declare s_decrypted_text blob;
begin
delete from gtt_tmp;
for
select
t.source_text
,t.crypto_alg
,t.crypto_key
,t.crypto_iv
from test t
where upper( t.crypto_alg ) = upper( :a_alg )
as cursor c
do begin
encryption_algorithm = c.crypto_alg;
enc_key_octet_length = octet_length( c.crypto_key );
enc_init_vector_octet_length = octet_length( c.crypto_iv );
-- block_cipher ::= { AES | ANUBIS | BLOWFISH | KHAZAD | RC5 | RC6 | SAFER+ | TWOFISH | XTEA }
-- mode ::= { CBC | CFB | CTR | ECB | OFB }
for
select 'CBC' as mode from rdb$database union all
select 'CFB' from rdb$database union all -- AES
select 'CTR' from rdb$database union all -- AES
select 'ECB' from rdb$database union all
select 'OFB' from rdb$database -- AES
as cursor cm
do begin
encryption_mode = cm.mode;
encrypted_equals_to_decrypted = null;
encryption_finish_gdscode = null;
begin
-- Mode should be specified for block ciphers.
-- Initialization vector (IV) should be specified for block ciphers in all modes except ECB and all stream ciphers except RC4.
insert into gtt_tmp(source_text) values(c.source_text);
s_source_text = c.source_text;
-- This caused crash when length of string was 65535; sent letter to Alex et al, 11.11.2019:
-- v_encrypt_sttm = 'select encrypt( q''{' || c.source_text || '}'' using ' || c.crypto_alg || ' mode ofb key q''{' || c.crypto_key || '}'' iv q''{' || c.crypto_iv || '}'' ) from rdb$database';
v_encrypt_sttm = 'select encrypt( t.source_text using ' || c.crypto_alg || ' mode ' || cm.mode || ' key q''{' || c.crypto_key || '}'' iv q''{' || c.crypto_iv || '}'' ) from gtt_tmp t';
execute statement v_encrypt_sttm into v_encrypted;
update gtt_tmp t set t.encrypted_text = :v_encrypted;
v_decrypt_sttm = 'select decrypt( t.encrypted_text using ' || c.crypto_alg || ' mode ' || cm.mode || ' key q''{' || c.crypto_key || '}'' iv q''{' || c.crypto_iv || '}'' ) from gtt_tmp t';
execute statement v_decrypt_sttm into s_decrypted_text;
encrypted_equals_to_decrypted = false;
if ( hash(s_source_text) = hash(s_decrypted_text) ) then
if (s_source_text = s_decrypted_text) then
encrypted_equals_to_decrypted = true;
when any do
begin
-- 335545230 : TomCrypt library error: Invalid argument provided.
-- 335545234 : Encrypting in CBC mode
-- 335545224 : Initialization vector (IV) makes no sense for chosen cipher and/or mode
encryption_finish_gdscode = gdscode;
end
end
suspend;
delete from gtt_tmp;
end
end
end
^
create or alter procedure sp_stream_test(a_alg varchar(30))
returns(
encryption_algorithm varchar(30)
,enc_key_octet_length int
,enc_init_vector_octet_length int
,encrypted_equals_to_decrypted boolean
,encryption_finish_gdscode int
) as
declare v_encrypted blob;
declare v_encrypt_sttm blob;
declare v_decrypt_sttm blob;
declare s_source_text blob;
declare s_decrypted_text blob;
declare iv_suffix blob;
begin
delete from gtt_tmp;
for
select
t.source_text
,t.crypto_alg
,t.crypto_key
,t.crypto_iv
from test t
where upper( t.crypto_alg ) = upper( :a_alg )
as cursor c
do begin
-- stream_cipher ::= { CHACHA20 | RC4 | SOBER128 }
encryption_algorithm = c.crypto_alg;
enc_key_octet_length = octet_length( c.crypto_key );
encryption_finish_gdscode = null;
begin
-- Mode should be specified for block ciphers.
-- Initialization vector (IV) should be specified for block ciphers in all modes except ECB and all stream ciphers except RC4.
insert into gtt_tmp(source_text) values(c.source_text);
s_source_text = c.source_text;
enc_init_vector_octet_length = 0;
if ( upper( :a_alg ) = upper('RC4') ) then
iv_suffix= '';
else
begin
iv_suffix= ' iv q''{' || c.crypto_iv || '}'' ';
enc_init_vector_octet_length = octet_length(c.crypto_iv);
end
v_encrypt_sttm = 'select encrypt( t.source_text using ' || c.crypto_alg || ' key q''{' || c.crypto_key || '}'' ' || iv_suffix || ') from gtt_tmp t';
execute statement v_encrypt_sttm into v_encrypted;
update gtt_tmp t set t.encrypted_text = :v_encrypted;
v_decrypt_sttm = 'select decrypt( t.encrypted_text using ' || c.crypto_alg || ' key q''{' || c.crypto_key || '}'' ' || iv_suffix || ') from gtt_tmp t';
execute statement v_decrypt_sttm into s_decrypted_text;
encrypted_equals_to_decrypted = false;
if ( hash(s_source_text) = hash(s_decrypted_text) ) then
if (s_source_text = s_decrypted_text) then
encrypted_equals_to_decrypted = true;
when any do
begin
encryption_finish_gdscode = gdscode;
end
end
suspend;
delete from gtt_tmp;
end
end
^
set term ;^
commit;
---------------------------------------
set bail off;
-- 1. Main checks:
-- ###############
-- 1.1 Block cipher:
select * from sp_block_test('aes');
select * from sp_block_test('anubis');
select * from sp_block_test('blowfish');
select * from sp_block_test('khazad');
select * from sp_block_test('rc5');
select * from sp_block_test('rc6');
select * from sp_block_test('"safer+"');
select * from sp_block_test('twofish');
select * from sp_block_test('xtea');
-- 1.2 Stream cipher:
select * from sp_stream_test('chacha20');
select * from sp_stream_test('rc4');
select * from sp_stream_test('sober128');
-- 2. Auxiliary checks:
-- ####################
-- 2.1. "Counter length (CTR_LENGTH, bytes) may be specified only in CTR mode, default is the size of IV."
select encrypt( 'fooriobar' using AES mode CTR key q'{A8586F1E-DB13-4D}' iv q'{D2FF255D-EDE3-44}' CTR_LENGTH -123 ) as ctr_clause_case_1 from rdb$database;
select encrypt( 'fooriobar' using AES mode CTR key q'{A8586F1E-DB13-4D}' iv q'{D2FF255D-EDE3-44}' CTR_LENGTH 0 ) as ctr_clause_case_2 from rdb$database;
select encrypt( 'fooriobar' using AES mode CTR key q'{A8586F1E-DB13-4D}' iv q'{D2FF255D-EDE3-44}' CTR_LENGTH 16 ) as ctr_clause_case_3 from rdb$database;
select encrypt( 'fooriobar' using AES mode CTR key q'{A8586F1E-DB13-4D}' iv q'{D2FF255D-EDE3-44}' CTR_LENGTH 123 ) as ctr_clause_case_4 from rdb$database;
select encrypt( 'fooriobar' using AES mode OFB key q'{A8586F1E-DB13-4D}' iv q'{D2FF255D-EDE3-44}' CTR_LENGTH 16 ) as ctr_clause_case_5 from rdb$database;
-- 2.2. "Initial counter value (COUNTER) may be specified only for CHACHA20 cipher, default is 0."
select encrypt( 'fooriobar' using CHACHA20 key q'{1110FB89-AD32-4E}' iv q'{114E811E}' counter 0 ) from rdb$database;
-- lead to crash, letter 11.11.2019 15:35 --> select encrypt( 'fooriobar' using CHACHA20 key q'{1110FB89-AD32-4E}' iv q'{114E811E}' counter cast(null as bigint) ) from rdb$database;
select encrypt( 'fooriobar' using CHACHA20 key q'{1110FB89-AD32-4E}' iv q'{114E811E}' counter 1 ) from rdb$database;
select encrypt( 'fooriobar' using CHACHA20 key q'{1110FB89-AD32-4E}' iv q'{114E811E}' counter -9223372036854775808 ) from rdb$database;
select encrypt( 'fooriobar' using CHACHA20 key q'{1110FB89-AD32-4E}' iv q'{114E811E}' counter 9223372036854775807 ) from rdb$database;
-- 2.3. Following query led to crash, see letter to Alex, 30.12.2018 00:15
-- Expected STDERR:
-- Statement failed, SQLSTATE = 22023
-- Invalid key length 9, need 16 or 32
select encrypt('QweRtYUioP' using chacha20 key '192837465' iv '777555333') as invalid_params from rdb$database;
-- 4. "Functions return BLOB when first argument is blob and varbinary for all text types."
set sqlda_display on;
with
d as (
select
cast('Functions return BLOB when first argument is blob and varbinary for all text types.' as blob) as d_blob
,cast('Functions return BLOB when first argument is blob and varbinary for all text types.' as varchar(255) ) as d_char
,x'0154090759DF' as e_bin
from rdb$database
)
select
encrypt( d.d_blob using AES mode CTR key q'{A8586F1E-DB13-4D}' iv q'{D2FF255D-EDE3-44}' ) as e_blob
,encrypt( d.d_char using AES mode CTR key q'{A8586F1E-DB13-4D}' iv q'{D2FF255D-EDE3-44}' ) as e_char
,decrypt( d.e_bin using sober128 key 'AbcdAbcdAbcdAbcd' iv '01234567') as d_bin
from d
rows 0;
set sqlda_display off;
"""
act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1)
expected_stdout_1 = """
ENCRYPTION_ALGORITHM AES
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM AES
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM AES
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM AES
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM AES
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM AES
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 11
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM AES
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 11
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM AES
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 11
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM AES
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 11
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM AES
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 11
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM AES
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 11
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM AES
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 11
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM AES
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 11
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM AES
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 11
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM AES
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 11
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM ANUBIS
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM ANUBIS
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM ANUBIS
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM ANUBIS
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM ANUBIS
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM ANUBIS
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 12
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM ANUBIS
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 12
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM ANUBIS
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 12
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM ANUBIS
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 12
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM ANUBIS
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 12
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM ANUBIS
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 13
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM ANUBIS
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 13
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM ANUBIS
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 13
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM ANUBIS
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 13
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM ANUBIS
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 13
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM BLOWFISH
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM BLOWFISH
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM BLOWFISH
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM BLOWFISH
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM BLOWFISH
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM BLOWFISH
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 13
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM BLOWFISH
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 13
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM BLOWFISH
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 13
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM BLOWFISH
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 13
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM BLOWFISH
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 13
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM BLOWFISH
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 15
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM BLOWFISH
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 15
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM BLOWFISH
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 15
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM BLOWFISH
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 15
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM BLOWFISH
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 15
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM KHAZAD
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM KHAZAD
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM KHAZAD
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM KHAZAD
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM KHAZAD
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM KHAZAD
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 14
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM KHAZAD
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 14
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM KHAZAD
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 14
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM KHAZAD
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 14
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM KHAZAD
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 14
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM KHAZAD
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 17
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM KHAZAD
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 17
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM KHAZAD
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 17
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM KHAZAD
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 17
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM KHAZAD
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 17
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM RC5
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM RC5
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM RC5
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM RC5
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM RC5
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM RC5
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 15
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM RC5
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 15
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM RC5
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 15
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM RC5
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 15
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM RC5
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 15
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM RC5
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 19
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM RC5
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 19
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM RC5
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 19
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM RC5
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 19
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM RC5
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 19
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM RC6
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM RC6
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM RC6
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM RC6
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM RC6
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM RC6
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 17
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM RC6
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 17
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM RC6
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 17
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM RC6
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 17
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM RC6
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 17
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM RC6
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 21
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM RC6
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 21
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM RC6
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 21
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM RC6
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 21
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM RC6
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 21
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM "SAFER+"
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM "SAFER+"
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM "SAFER+"
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM "SAFER+"
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM "SAFER+"
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM "SAFER+"
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 18
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM "SAFER+"
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 18
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM "SAFER+"
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 18
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM "SAFER+"
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 18
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM "SAFER+"
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 18
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM "SAFER+"
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 23
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM "SAFER+"
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 23
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM "SAFER+"
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 23
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM "SAFER+"
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 23
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM "SAFER+"
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 23
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM TWOFISH
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM TWOFISH
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM TWOFISH
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM TWOFISH
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM TWOFISH
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM TWOFISH
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 19
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM TWOFISH
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 19
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM TWOFISH
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 19
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM TWOFISH
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 19
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM TWOFISH
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 19
ENC_INIT_VECTOR_OCTET_LENGTH 16
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM TWOFISH
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 25
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM TWOFISH
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 25
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM TWOFISH
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 25
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM TWOFISH
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 25
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM TWOFISH
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 25
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM XTEA
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM XTEA
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM XTEA
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM XTEA
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM XTEA
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM XTEA
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 20
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM XTEA
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 20
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM XTEA
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 20
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM XTEA
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 20
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM XTEA
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 20
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM XTEA
ENCRYPTION_MODE CBC
ENC_KEY_OCTET_LENGTH 26
ENC_INIT_VECTOR_OCTET_LENGTH 27
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM XTEA
ENCRYPTION_MODE CFB
ENC_KEY_OCTET_LENGTH 26
ENC_INIT_VECTOR_OCTET_LENGTH 27
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM XTEA
ENCRYPTION_MODE CTR
ENC_KEY_OCTET_LENGTH 26
ENC_INIT_VECTOR_OCTET_LENGTH 27
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM XTEA
ENCRYPTION_MODE ECB
ENC_KEY_OCTET_LENGTH 26
ENC_INIT_VECTOR_OCTET_LENGTH 27
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545224
ENCRYPTION_ALGORITHM XTEA
ENCRYPTION_MODE OFB
ENC_KEY_OCTET_LENGTH 26
ENC_INIT_VECTOR_OCTET_LENGTH 27
ENCRYPTED_EQUALS_TO_DECRYPTED <null>
ENCRYPTION_FINISH_GDSCODE 335545229
ENCRYPTION_ALGORITHM CHACHA20
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM CHACHA20
ENC_KEY_OCTET_LENGTH 21
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE 335545250
ENCRYPTION_ALGORITHM CHACHA20
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 29
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE 335545240
ENCRYPTION_ALGORITHM RC4
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 0
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM RC4
ENC_KEY_OCTET_LENGTH 22
ENC_INIT_VECTOR_OCTET_LENGTH 0
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM RC4
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 0
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM SOBER128
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE <null>
ENCRYPTION_ALGORITHM SOBER128
ENC_KEY_OCTET_LENGTH 23
ENC_INIT_VECTOR_OCTET_LENGTH 8
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE 335545230
ENCRYPTION_ALGORITHM SOBER128
ENC_KEY_OCTET_LENGTH 16
ENC_INIT_VECTOR_OCTET_LENGTH 33
ENCRYPTED_EQUALS_TO_DECRYPTED <true>
ENCRYPTION_FINISH_GDSCODE 335545230
CTR_CLAUSE_CASE_2 E813A50C069FC418AA
CTR_CLAUSE_CASE_3 E813A50C069FC418AA
ENCRYPT 8E709DDA89912F172C
ENCRYPT BC3604C147B53D3BDD
ENCRYPT C8051FB1A2581EA9A1
ENCRYPT 2E2298CF4C2B81AD54
INPUT message field count: 0
OUTPUT message field count: 3
01: sqltype: 520 BLOB scale: 0 subtype: 0 len: 8
: name: ENCRYPT alias: E_BLOB
: table: owner:
02: sqltype: 448 VARYING scale: 0 subtype: 0 len: 255 charset: 1 OCTETS
: name: ENCRYPT alias: E_CHAR
: table: owner:
03: sqltype: 448 VARYING scale: 0 subtype: 0 len: 6 charset: 1 OCTETS
: name: DECRYPT alias: D_BIN
: table: owner:
"""
expected_stderr_1 = """
Statement failed, SQLSTATE = 22023
Too big counter value -123, maximum 16 can be used
Statement failed, SQLSTATE = 22023
Too big counter value 123, maximum 16 can be used
Statement failed, SQLSTATE = 22023
Counter length/value parameter is not used with mode OFB
Statement failed, SQLSTATE = 22023
Invalid key length 9, need 16 or 32
"""
@pytest.mark.version('>=4.0')
def test_1(act_1: Action):
act_1.expected_stdout = expected_stdout_1
act_1.expected_stderr = expected_stderr_1
act_1.execute()
assert act_1.clean_stderr == act_1.clean_expected_stderr
assert act_1.clean_stdout == act_1.clean_expected_stdout
| 40.426429
| 234
| 0.635422
| 6,599
| 56,597
| 5.034702
| 0.053493
| 0.099657
| 0.077263
| 0.118168
| 0.909102
| 0.897935
| 0.894805
| 0.891253
| 0.886137
| 0.881682
| 0
| 0.063883
| 0.30855
| 56,597
| 1,399
| 235
| 40.455325
| 0.785097
| 0.012439
| 0
| 0.845415
| 0
| 0.055895
| 0.988706
| 0.236934
| 0
| 0
| 0
| 0
| 0.001747
| 1
| 0.000873
| false
| 0
| 0.001747
| 0
| 0.00262
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9b9215eb4a23e5091d119468e3e50fb809e78378
| 14,916
|
py
|
Python
|
stats/ISCs_sign_one_sample_perm/plot_ISC_subplots_T_vals.py
|
athiede13/free_speech
|
bde32c2d48724c98f089376876cf9888f67a9f20
|
[
"MIT"
] | null | null | null |
stats/ISCs_sign_one_sample_perm/plot_ISC_subplots_T_vals.py
|
athiede13/free_speech
|
bde32c2d48724c98f089376876cf9888f67a9f20
|
[
"MIT"
] | null | null | null |
stats/ISCs_sign_one_sample_perm/plot_ISC_subplots_T_vals.py
|
athiede13/free_speech
|
bde32c2d48724c98f089376876cf9888f67a9f20
|
[
"MIT"
] | null | null | null |
"""
Plot subplots.
Created on 6.5.2019
@author: Anja Thiede <anja.thiede@helsinki.fi>
"""
import matplotlib.pyplot as plt
#%matplotlib qt
#%matplotlib inline
#to fill
filepath = '/media/cbru/SMEDY/results/ISCs_comp_against_0/'
con = 'speech'
files = ('/media/cbru/SMEDY/results/ISCs_comp_against_0/con_5.000000e-01-4Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_5.000000e-01-4Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_5.000000e-01-4Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_5.000000e-01-4Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/white.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_5.000000e-01-4Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_5.000000e-01-4Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_5.000000e-01-4Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_5.000000e-01-4Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/legend/T-stats-colorbar1.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_4-8Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_4-8Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_4-8Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_4-8Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/white.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_4-8Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_4-8Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_4-8Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_4-8Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/legend/T-stats-colorbar2.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_8-12Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_8-12Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_8-12Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_8-12Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/white.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_8-12Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_8-12Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_8-12Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_8-12Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/legend/T-stats-colorbar3.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_12-25Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_12-25Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_12-25Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_12-25Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/white.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_12-25Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_12-25Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_12-25Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_12-25Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/legend/T-stats-colorbar4.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_25-45Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_25-45Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_25-45Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_25-45Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/white.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_25-45Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_25-45Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_25-45Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_25-45Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/legend/T-stats-colorbar5.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_55-90Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_55-90Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_55-90Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_55-90Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/white.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_55-90Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_55-90Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_55-90Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_55-90Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/legend/T-stats-colorbar6.jpg'
)
files_con = ('/media/cbru/SMEDY/results/ISCs_comp_against_0/con_5.000000e-01-4Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_5.000000e-01-4Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_5.000000e-01-4Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_5.000000e-01-4Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_4-8Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_4-8Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_4-8Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_4-8Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_8-12Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_8-12Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_8-12Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_8-12Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_12-25Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_12-25Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_12-25Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_12-25Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_25-45Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_25-45Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_25-45Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_25-45Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_55-90Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_55-90Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_55-90Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/con_55-90Hz_613_1_med-rh.jpg',
)
files_dys = ('/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_5.000000e-01-4Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_5.000000e-01-4Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_5.000000e-01-4Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_5.000000e-01-4Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_4-8Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_4-8Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_4-8Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_4-8Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_8-12Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_8-12Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_8-12Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_8-12Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_12-25Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_12-25Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_12-25Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_12-25Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_25-45Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_25-45Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_25-45Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_25-45Hz_613_1_med-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_55-90Hz_613_1_lat-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_55-90Hz_613_1_lat-rh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_55-90Hz_613_1_med-lh.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/dys_55-90Hz_613_1_med-rh.jpg',
)
files_leg = ('/media/cbru/SMEDY/results/ISCs_comp_against_0/legend/T-stats-colorbar1.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/legend/T-stats-colorbar2.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/legend/T-stats-colorbar3.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/legend/T-stats-colorbar4.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/legend/T-stats-colorbar5.jpg',
'/media/cbru/SMEDY/results/ISCs_comp_against_0/legend/T-stats-colorbar6.jpg'
)
#plot subplots
plt.rcParams['font.family'] = "serif"
fig = plt.figure(figsize=(17, 13))
positions = {3, 7, 11, 15, 19, 23}
legend = ('', '', '', 'delta (0.5\u20134 Hz)',
'', '', '', 'theta (4\u20138 Hz)',
'', '', '', 'alpha (8\u201312 Hz)',
'', '', '', 'beta (12\u201325 Hz)',
'', '', '', 'low gamma (25\u201345 Hz)',
'', '', '', 'high gamma (55\u201390 Hz)'
)
# delta \u03B4
# theta \u03B8
# alpha \u03B1
# beta \u03B2
# gamma \u03B3
#for file in files:
# img = plt.imread(file, format='jpg')
# ax = fig.add_subplot(6,10,i)
# ax.imshow(img, aspect = 'equal')
# ax.axis('on')
# if i in positions:
# ax.text(0.6, 1.05, legend[i], horizontalalignment='center',
# verticalalignment='center', transform=ax.transAxes, fontsize=17)
# if i==3:
# ax.text(0, 1.9, 'control group', horizontalalignment='center',
# verticalalignment='center', transform=ax.transAxes, fontsize=22)
# if i==8:
# ax.text(0, 1.9, 'dyslexic group', horizontalalignment='center',
# verticalalignment='center', transform=ax.transAxes, fontsize=22)
# if i in {1,3,6,8}:
# ax.text(0.5, 1.5, 'lh', horizontalalignment='center',
# verticalalignment='center', transform=ax.transAxes, fontsize=15)
# if i in {2,4,7,9}:
# ax.text(0.5, 1.5, 'rh', horizontalalignment='center',
# verticalalignment='center', transform=ax.transAxes, fontsize=15)
# if i==5:
# left, width = 0.07, 0.1
# bottom, height = 0.1, .8
# ax.axis([left, bottom, width, height], {'on','image'})
# plt.set_cmap('hot')
# i=i+1
i = 0
gs3 = plt.GridSpec(6, 1)
gs3.update(left=0.74, right=0.84)
for file in files_leg:
img = plt.imread(file, format='jpg')
ax = fig.add_subplot(gs3[i])
ax.imshow(img)#, aspect='equal')
ax.axis('off')
ax.set_aspect('equal')
i = i+1
i = 0
gs1 = plt.GridSpec(6, 4)
gs1.update(left=0, right=0.38)
for file in files_con:
img = plt.imread(file, format='jpg')
ax = fig.add_subplot(gs1[i])
ax.imshow(img, aspect='equal')
ax.axis('off')
if i in positions:
ax.text(1.3, 1.05, legend[i], horizontalalignment='center',
verticalalignment='center', transform=ax.transAxes, fontsize=17)
if i == 2:
ax.text(0, 2, 'control group', horizontalalignment='center',
verticalalignment='center', transform=ax.transAxes, fontsize=22)
ax.text(-1, 1.6, 'lateral', horizontalalignment='center',
verticalalignment='center', transform=ax.transAxes, fontsize=22)
ax.text(1, 1.6, 'medial', horizontalalignment='center',
verticalalignment='center', transform=ax.transAxes, fontsize=22)
if i in {0, 2}:
ax.text(0.5, 1.3, 'lh', horizontalalignment='center',
verticalalignment='center', transform=ax.transAxes, fontsize=15)
if i in {1, 3}:
ax.text(0.5, 1.3, 'rh', horizontalalignment='center',
verticalalignment='center', transform=ax.transAxes, fontsize=15)
i = i+1
ii = 0
gs2 = plt.GridSpec(6, 4)
gs2.update(left=0.4, right=0.78)
for file in files_dys:
img = plt.imread(file, format='jpg')
ax2 = fig.add_subplot(gs2[ii])
ax2.imshow(img, aspect='equal')
ax2.axis('off')
if ii == 2:
ax2.text(0, 2, 'dyslexic group', horizontalalignment='center',
verticalalignment='center', transform=ax2.transAxes, fontsize=22)
ax2.text(-1, 1.6, 'lateral', horizontalalignment='center',
verticalalignment='center', transform=ax2.transAxes, fontsize=22)
ax2.text(1, 1.6, 'medial', horizontalalignment='center',
verticalalignment='center', transform=ax2.transAxes, fontsize=22)
if ii in {0, 2}:
ax2.text(0.5, 1.3, 'lh', horizontalalignment='center',
verticalalignment='center', transform=ax2.transAxes, fontsize=15)
if ii in {1, 3}:
ax2.text(0.5, 1.3, 'rh', horizontalalignment='center',
verticalalignment='center', transform=ax2.transAxes, fontsize=15)
ii = ii+1
#fig.tight_layout()
fig.subplots_adjust(wspace=0, hspace=0.12)
fig.suptitle("T-statistics of ISCs during speech", fontsize=25, x=0.4, y=1.06)
plt.show()
fig.savefig(filepath + 'summary_T_' + con + '_ISCs2.pdf', dpi=600, bbox_inches='tight')
fig.savefig(filepath + 'summary_T_' + con + '_ISCs2.png', dpi=600, bbox_inches='tight')
| 57.369231
| 99
| 0.687785
| 2,432
| 14,916
| 3.909539
| 0.072368
| 0.108856
| 0.169331
| 0.253997
| 0.904186
| 0.896403
| 0.886411
| 0.877051
| 0.872949
| 0.855069
| 0
| 0.096849
| 0.161706
| 14,916
| 259
| 100
| 57.590734
| 0.663548
| 0.09158
| 0
| 0.658031
| 0
| 0.082902
| 0.660966
| 0.626684
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005181
| 0
| 0.005181
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b5db0d2015d2107e7ea954526af082a93180936f
| 6,360
|
py
|
Python
|
pycql/lextab.py
|
tomkralidis/pycql
|
35cea1ee462e06b75bbe47bf766e8f0bf79b0018
|
[
"MIT"
] | 10
|
2019-09-18T13:29:17.000Z
|
2021-07-15T18:21:54.000Z
|
pycql/lextab.py
|
tomkralidis/pycql
|
35cea1ee462e06b75bbe47bf766e8f0bf79b0018
|
[
"MIT"
] | 6
|
2019-09-18T14:55:57.000Z
|
2021-08-30T21:43:46.000Z
|
pycql/lextab.py
|
tomkralidis/pycql
|
35cea1ee462e06b75bbe47bf766e8f0bf79b0018
|
[
"MIT"
] | 4
|
2020-05-13T12:35:12.000Z
|
2021-06-29T10:37:11.000Z
|
# lextab.py. This file automatically created by PLY (version 3.11). Don't edit!
_tabversion = '3.10'
_lextokens = set(('AFTER', 'AND', 'ATTRIBUTE', 'BBOX', 'BEFORE', 'BETWEEN', 'BEYOND', 'COMMA', 'CONTAINS', 'CROSSES', 'DISJOINT', 'DIVIDE', 'DURATION', 'DURING', 'DWITHIN', 'ENVELOPE', 'EQ', 'EQUALS', 'FLOAT', 'GE', 'GEOMETRY', 'GT', 'ILIKE', 'IN', 'INTEGER', 'INTERSECTS', 'IS', 'LBRACKET', 'LE', 'LIKE', 'LPAREN', 'LT', 'MINUS', 'NE', 'NOT', 'NULL', 'OR', 'OVERLAPS', 'PLUS', 'QUOTED', 'RBRACKET', 'RELATE', 'RPAREN', 'TIME', 'TIMES', 'TOUCHES', 'UNITS', 'WITHIN', 'feet', 'kilometers', 'meters', 'nautical miles', 'statute miles'))
_lexreflags = 64
_lexliterals = ''
_lexstateinfo = {'INITIAL': 'inclusive'}
_lexstatere = {'INITIAL': [('(?P<t_GEOMETRY>(POINT\\s*\\(((-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*))\\))|((MULTIPOINT|LINESTRING)\\s*\\(((-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*))(\\s*,\\s*((-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)))*\\))|((MULTIPOINT|MULTILINESTRING|POLYGON)\\s*\\(\\(\\s*((-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*))(\\s*,\\s*((-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)))*\\s*\\)(\\s*,\\s*\\(\\s*((-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*))(\\s*,\\s*((-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)))*\\s*\\))*\\))|(MULTIPOLYGON\\s*\\(\\(\\s*\\(\\s*((-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*))(\\s*,\\s*((-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)))*\\s*\\)(\\s*,\\s*\\(\\s*((-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*))(\\s*,\\s*((-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)))*\\s*\\))*\\s*\\)(\\s*,\\s*\\(\\s*\\(\\s*((-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*))(\\s*,\\s*((-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)))*\\s*\\)(\\s*,\\s*\\(\\s*((-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*))(\\s*,\\s*((-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)|(-?[0-9]*\\.?[0-9]+\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*\\s+-?[0-9]*\\.?[0-9]+\\s*)))*\\s*\\))*\\s*\\))*\\)))|(?P<t_ENVELOPE>ENVELOPE\\s*\\((\\s*-?[0-9]*\\.?[0-9]+\\s*){4}\\))|(?P<t_UNITS>(feet)|(meters)|(statute miles)|(nautical miles)|(kilometers))|(?P<t_TIME>\\d{4}-\\d{2}-\\d{2}T[0-2][0-9]:[0-5][0-9]:[0-5][0-9]Z)|(?P<t_DURATION>P((\\d+Y)?(\\d+M)?(\\d+D)?)?(T(\\d+H)?(\\d+M)?(\\d+S)?)?)|(?P<t_FLOAT>[0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?)|(?P<t_INTEGER>-?[0-9]+)|(?P<t_QUOTED>(\\"[^"]*\\")|(\\\'[^\\\']*\\\'))|(?P<t_ATTRIBUTE>[a-zA-Z_$][0-9a-zA-Z_$]*)|(?P<t_newline>\\n+)|(?P<t_AND>AND)|(?P<t_GE>>=)|(?P<t_LBRACKET>\\[)|(?P<t_LE><=)|(?P<t_LPAREN>\\()|(?P<t_NE><>)|(?P<t_OR>OR)|(?P<t_PLUS>\\+)|(?P<t_RBRACKET>\\])|(?P<t_RPAREN>\\))|(?P<t_TIMES>\\*)|(?P<t_COMMA>,)|(?P<t_DIVIDE>/)|(?P<t_EQ>=)|(?P<t_GT>>)|(?P<t_LT><)|(?P<t_MINUS>-)', [None, ('t_GEOMETRY', 'GEOMETRY'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_ENVELOPE', 'ENVELOPE'), None, ('t_UNITS', 'UNITS'), None, None, None, None, None, ('t_TIME', 'TIME'), ('t_DURATION', 'DURATION'), None, None, None, None, None, None, None, None, ('t_FLOAT', 'FLOAT'), None, ('t_INTEGER', 'INTEGER'), ('t_QUOTED', 'QUOTED'), None, None, ('t_ATTRIBUTE', 'ATTRIBUTE'), ('t_newline', 'newline'), (None, 'AND'), (None, 'GE'), (None, 'LBRACKET'), (None, 'LE'), (None, 'LPAREN'), (None, 'NE'), (None, 'OR'), (None, 'PLUS'), (None, 'RBRACKET'), (None, 'RPAREN'), (None, 'TIMES'), (None, 'COMMA'), (None, 'DIVIDE'), (None, 'EQ'), (None, 'GT'), (None, 'LT'), (None, 'MINUS')])]}
_lexstateignore = {'INITIAL': ' \t'}
_lexstateerrorf = {'INITIAL': 't_error'}
_lexstateeoff = {}
| 578.181818
| 5,542
| 0.372484
| 1,168
| 6,360
| 1.986301
| 0.092466
| 0.240517
| 0.179741
| 0.236207
| 0.500862
| 0.500862
| 0.4875
| 0.484483
| 0.484483
| 0.469828
| 0
| 0.094739
| 0.040723
| 6,360
| 10
| 5,543
| 636
| 0.285527
| 0.012107
| 0
| 0
| 0
| 0.222222
| 0.832988
| 0.645439
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.