code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
"""retina_resnet50"""
import keras
from perceptron.zoo.retinanet_resnet_50 import models
import tensorflow as tf
def Retina_Resnet50(weights_file="resnet50_coco_best_v2.1.0.h5"):
from perceptron.utils.func import maybe_download_model_data
keras.backend.tensorflow_backend.set_session(get_session())
weight_fpath = maybe_download_model_data(weights_file,
'https://perceptron-benchmark.s3-us-west-1.amazonaws.com/models/coco/resnet50_coco_best_v2.1.0.h5')
model = models.load_model(weight_fpath, backbone_name='resnet50')
return model
def get_session():
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
return tf.Session(config=config)
|
[
"perceptron.zoo.retinanet_resnet_50.models.load_model",
"tensorflow.ConfigProto",
"tensorflow.Session",
"perceptron.utils.func.maybe_download_model_data"
] |
[((337, 485), 'perceptron.utils.func.maybe_download_model_data', 'maybe_download_model_data', (['weights_file', '"""https://perceptron-benchmark.s3-us-west-1.amazonaws.com/models/coco/resnet50_coco_best_v2.1.0.h5"""'], {}), "(weights_file,\n 'https://perceptron-benchmark.s3-us-west-1.amazonaws.com/models/coco/resnet50_coco_best_v2.1.0.h5'\n )\n", (362, 485), False, 'from perceptron.utils.func import maybe_download_model_data\n'), ((499, 556), 'perceptron.zoo.retinanet_resnet_50.models.load_model', 'models.load_model', (['weight_fpath'], {'backbone_name': '"""resnet50"""'}), "(weight_fpath, backbone_name='resnet50')\n", (516, 556), False, 'from perceptron.zoo.retinanet_resnet_50 import models\n'), ((613, 629), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {}), '()\n', (627, 629), True, 'import tensorflow as tf\n'), ((686, 711), 'tensorflow.Session', 'tf.Session', ([], {'config': 'config'}), '(config=config)\n', (696, 711), True, 'import tensorflow as tf\n')]
|
from datetime import datetime
from time import localtime
from time import sleep
from win10toast import ToastNotifier
import kthread
import os
from tkinter import *
import sys
toaster = ToastNotifier()
class Terminkalender:
def __init__(self):
self.dead = False
self.text_auf_editbutton = "Edit"
self.buttonschrift = "Helvetica 16 bold"
self.root = Tk(className='Terminkalender')
self.haupframe_wo_alles_ist = Frame(self.root)
self.seconds_before_and_after_to_display = 900
self.sleeptime = 1
self.canvas_im_hauptframe = Canvas(self.haupframe_wo_alles_ist)
self.button_edit = Button(
self.canvas_im_hauptframe,
text=self.text_auf_editbutton,
command=lambda: self.open_termin_file(),
font=self.buttonschrift,
)
self.t = kthread.KThread(target=self.check_dates_inf, name="checkinf")
self.t.start()
def open_termin_file(self):
os.popen(r"termine.txt")
def read_txt_file(self):
with open("termine.txt", mode="r", encoding="utf-8") as f:
data = [x.strip() for x in f.readlines()]
return data
def mainloop(self):
self.haupframe_wo_alles_ist.pack(fill=BOTH, expand=1)
self.canvas_im_hauptframe.pack(side=LEFT, fill=BOTH, expand=1)
self.button_edit.pack()
self.root.protocol("WM_DELETE_WINDOW", self.onexit)
self.root.mainloop()
def onexit(self):
try:
self.root.quit()
except Exception as Fehler:
print(Fehler)
self.dead = True
try:
self.t.terminate()
except Exception as Fehler:
print(Fehler)
sys.exit()
def check_dates_inf(self):
while not self.dead:
try:
self.check_dates()
finally:
pass
def check_dates(self):
zeitmeldung = [zeit.split("_", maxsplit=1) for zeit in self.read_txt_file()]
alldatestocheck = []
for alltermine in zeitmeldung:
try:
print(f'xxx{alltermine[0]}')
allezeiten = [
[
int(x[:-1].lstrip('0') + x[-1])
for x in re.findall(r"\d+", alltermine[0])
][:6]
]
print(allezeiten)
for einzelne_zeit in allezeiten:
day, month, year, hour, minute, second = einzelne_zeit
datumadden = datetime(year, month, day, hour, minute, second)
alldatestocheck.append((datumadden, alltermine[1]))
except Exception as Fehler:
print(Fehler)
for einzelner_event in alldatestocheck:
aktuellesdatum = datetime(
localtime().tm_year,
localtime().tm_mon,
localtime().tm_mday,
localtime().tm_hour,
localtime().tm_min,
localtime().tm_sec,
)
differenz = aktuellesdatum - einzelner_event[0]
if abs(differenz.total_seconds()) < self.seconds_before_and_after_to_display:
toaster.show_toast(
str(einzelner_event[0]),
einzelner_event[1],
icon_path=None,
duration=5,
threaded=True,
)
if not self.dead:
sleep(self.sleeptime)
if __name__ == "__main__":
Termin = Terminkalender()
Termin.mainloop()
|
[
"kthread.KThread",
"os.popen",
"time.sleep",
"datetime.datetime",
"time.localtime",
"win10toast.ToastNotifier",
"sys.exit"
] |
[((185, 200), 'win10toast.ToastNotifier', 'ToastNotifier', ([], {}), '()\n', (198, 200), False, 'from win10toast import ToastNotifier\n'), ((860, 921), 'kthread.KThread', 'kthread.KThread', ([], {'target': 'self.check_dates_inf', 'name': '"""checkinf"""'}), "(target=self.check_dates_inf, name='checkinf')\n", (875, 921), False, 'import kthread\n'), ((987, 1010), 'os.popen', 'os.popen', (['"""termine.txt"""'], {}), "('termine.txt')\n", (995, 1010), False, 'import os\n'), ((1728, 1738), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1736, 1738), False, 'import sys\n'), ((3490, 3511), 'time.sleep', 'sleep', (['self.sleeptime'], {}), '(self.sleeptime)\n', (3495, 3511), False, 'from time import sleep\n'), ((2549, 2597), 'datetime.datetime', 'datetime', (['year', 'month', 'day', 'hour', 'minute', 'second'], {}), '(year, month, day, hour, minute, second)\n', (2557, 2597), False, 'from datetime import datetime\n'), ((2843, 2854), 'time.localtime', 'localtime', ([], {}), '()\n', (2852, 2854), False, 'from time import localtime\n'), ((2880, 2891), 'time.localtime', 'localtime', ([], {}), '()\n', (2889, 2891), False, 'from time import localtime\n'), ((2916, 2927), 'time.localtime', 'localtime', ([], {}), '()\n', (2925, 2927), False, 'from time import localtime\n'), ((2953, 2964), 'time.localtime', 'localtime', ([], {}), '()\n', (2962, 2964), False, 'from time import localtime\n'), ((2990, 3001), 'time.localtime', 'localtime', ([], {}), '()\n', (2999, 3001), False, 'from time import localtime\n'), ((3026, 3037), 'time.localtime', 'localtime', ([], {}), '()\n', (3035, 3037), False, 'from time import localtime\n')]
|
from main import app as application
from api.utils.database import db
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
migrate = Migrate(application, db)
manager = Manager(application)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
# python manage.py db init
# python manage.py db migrate
# python manage.py db upgrade
# python manage.py db -help
manager.run()
|
[
"flask_script.Manager",
"flask_migrate.Migrate"
] |
[((164, 188), 'flask_migrate.Migrate', 'Migrate', (['application', 'db'], {}), '(application, db)\n', (171, 188), False, 'from flask_migrate import Migrate, MigrateCommand\n'), ((200, 220), 'flask_script.Manager', 'Manager', (['application'], {}), '(application)\n', (207, 220), False, 'from flask_script import Manager\n')]
|
import numpy as np
import argparse
from maci.learners import MAVBAC, MASQL, ROMMEO
from maci.misc.sampler import MASampler
from maci.environments import PBeautyGame, MatrixGame, DifferentialGame
from maci.environments import make_particle_env
from maci.misc import logger
import gtimer as gt
import datetime
from copy import deepcopy
from maci.get_agents import ddpg_agent, masql_agent, pr2ac_agent, rom_agent
import maci.misc.tf_utils as U
import os
from keras.backend.tensorflow_backend import set_session
import tensorflow as tf
config = tf.ConfigProto()
config.gpu_options.allow_growth = True # dynamically grow the memory used on the GPU
sess = tf.Session(config=config)
set_session(sess)
def get_particle_game(particle_game_name, arglist):
env = make_particle_env(game_name=particle_game_name)
print(env.action_space, env.observation_space)
agent_num = env.n
adv_agent_num = 0
if particle_game_name == 'simple_push' or particle_game_name == 'simple_adversary':
adv_agent_num = 1
elif particle_game_name == 'simple_tag':
adv_agent_num = 3
model_names_setting = arglist.model_names_setting.split('_')
model_name = '_'.join(model_names_setting)
model_names = [model_names_setting[1]] * adv_agent_num + [model_names_setting[0]] * (agent_num - adv_agent_num)
return env, agent_num, model_name, model_names
def parse_args():
parser = argparse.ArgumentParser("Reinforcement Learning experiments for multiagent environments")
# Environment
# ['particle-simple_spread', 'particle-simple_adversary', 'particle-simple_tag', 'particle-simple_push']
parser.add_argument('-g', "--game_name", type=str, default="diff-ma_softq", help="name of the game")
parser.add_argument('-p', "--p", type=float, default=1.1, help="p")
parser.add_argument('-mu', "--mu", type=float, default=1.5, help="mu")
parser.add_argument('-r', "--reward_type", type=str, default="abs", help="reward type")
parser.add_argument('-mp', "--max_path_length", type=int, default=1, help="reward type")
parser.add_argument('-ms', "--max_steps", type=int, default=10000, help="number of epochs")
parser.add_argument('-me', "--memory", type=int, default=0, help="reward type")
parser.add_argument('-n', "--n", type=int, default=2, help="name of the game")
parser.add_argument('-bs', "--batch_size", type=int, default=512, help="name of the game")
parser.add_argument('-hm', "--hidden_size", type=int, default=100, help="name of the game")
parser.add_argument('-ti', "--training_interval", type=int, default=1, help="name of the game")
parser.add_argument('-re', "--repeat", type=bool, default=False, help="name of the game")
parser.add_argument('-a', "--aux", type=bool, default=True, help="name of the game")
parser.add_argument('-gr', "--global_reward", type=bool, default=False, help="name of the game")
parser.add_argument('-m', "--model_names_setting", type=str, default='PR2AC1_PR2AC1', help="models setting agent vs adv")
return parser.parse_args()
def main(arglist):
game_name = arglist.game_name
# 'abs', 'one'
reward_type = arglist.reward_type
p = arglist.p
agent_num = arglist.n
u_range = 1.
k = 0
print(arglist.aux, 'arglist.aux')
model_names_setting = arglist.model_names_setting.split('_')
model_names = [model_names_setting[0]] + [model_names_setting[1]] * (agent_num - 1)
model_name = '_'.join(model_names)
path_prefix = game_name
if game_name == 'pbeauty':
env = PBeautyGame(agent_num=agent_num, reward_type=reward_type, p=p)
path_prefix = game_name + '-' + reward_type + '-' + str(p)
elif 'matrix' in game_name:
matrix_game_name = game_name.split('-')[-1]
repeated = arglist.repeat
max_step = arglist.max_path_length
memory = arglist.memory
env = MatrixGame(game=matrix_game_name, agent_num=agent_num,
action_num=2, repeated=repeated,
max_step=max_step, memory=memory,
discrete_action=False, tuple_obs=False)
path_prefix = '{}-{}-{}-{}'.format(game_name, repeated, max_step, memory)
elif 'diff' in game_name:
diff_game_name = game_name.split('-')[-1]
agent_num = 2
env = DifferentialGame(diff_game_name, agent_num)
elif 'particle' in game_name:
particle_game_name = game_name.split('-')[-1]
env, agent_num, model_name, model_names = get_particle_game(particle_game_name, arglist)
now = datetime.datetime.now()
timestamp = now.strftime('%Y-%m-%d %H:%M:%S.%f %Z')
if 'CG' in model_name:
model_name = model_name + '-{}'.format(arglist.mu)
if not arglist.aux:
model_name = model_name + '-{}'.format(arglist.aux)
suffix = '{}/{}/{}/{}'.format(path_prefix, agent_num, model_name, timestamp)
print(suffix)
logger.add_tabular_output('./log/{}.csv'.format(suffix))
snapshot_dir = './snapshot/{}'.format(suffix)
policy_dir = './policy/{}'.format(suffix)
os.makedirs(snapshot_dir, exist_ok=True)
os.makedirs(policy_dir, exist_ok=True)
logger.set_snapshot_dir(snapshot_dir)
agents = []
M = arglist.hidden_size
batch_size = arglist.batch_size
# MultiAgent sampler
sampler = MASampler(agent_num=agent_num, joint=True, global_reward=arglist.global_reward, max_path_length=25, min_pool_size=100, batch_size=batch_size)
base_kwargs = {
'sampler': sampler,
'epoch_length': 100,
'n_epochs': arglist.max_steps,
'n_train_repeat': 1,
'eval_render': True,
'eval_n_episodes': 10
}
with U.single_threaded_session():
for i, model_name in enumerate(model_names):
if 'PR2AC' in model_name:
k = int(model_name[-1])
g = False
mu = arglist.mu
if 'G' in model_name:
g = True
agent = pr2ac_agent(model_name, i, env, M, u_range, base_kwargs, k=k, g=g, mu=mu, game_name=game_name, aux=arglist.aux)
elif model_name == 'MASQL':
agent = masql_agent(model_name, i, env, M, u_range, base_kwargs, game_name=game_name)
elif model_name == 'ROMMEO':
agent = rom_agent(model_name, i, env, M, u_range, base_kwargs, game_name=game_name)
else:
if model_name == 'DDPG':
joint = False
opponent_modelling = False
elif model_name == 'MADDPG': # Multi-Agent Deep Deterministic Policy Gradient
joint = True
opponent_modelling = False
elif model_name == 'DDPG-OM':
joint = True
opponent_modelling = True
agent = ddpg_agent(joint, opponent_modelling, model_names, i, env, M, u_range, base_kwargs, game_name=game_name)
agents.append(agent)
sampler.initialize(env, agents)
for agent in agents:
agent._init_training()
gt.rename_root('MARLAlgorithm')
gt.reset()
gt.set_def_unique(False)
initial_exploration_done = False
# noise = .1
noise = .5
alpha = .1
for agent in agents:
try:
agent.policy.set_noise_level(noise)
except:
pass
# alpha = .5
for steps in gt.timed_for(range(base_kwargs['n_epochs'] + 1)):
# alpha = .1 + np.exp(-0.1 * max(steps-10, 0)) * 500.
logger.push_prefix('Epoch #%d | ' % steps)
if steps % (25*1000) == 0:
print(suffix)
for t in range(base_kwargs['epoch_length']):
# TODO.code consolidation: Add control interval to sampler
if not initial_exploration_done:
if steps >= 1000:
initial_exploration_done = True
sampler.sample()
if not initial_exploration_done:
continue
gt.stamp('sample')
print('Sample Done')
if steps == 1000:
noise = 0.1
for agent in agents:
try:
agent.policy.set_noise_level(noise)
except:
pass
# alpha = 10.
if steps == 2000:
noise = 0.1
for agent in agents:
try:
agent.policy.set_noise_level(noise)
except:
pass
# alpha = .1
if steps == 3000:
noise = 0.05
for agent in agents:
try:
agent.policy.set_noise_level(noise)
except:
pass
if steps > base_kwargs['n_epochs'] / 6:
noise = 0.01
for agent in agents:
try:
agent.policy.set_noise_level(noise)
except:
pass
if steps % arglist.training_interval != 0:
continue
for j in range(base_kwargs['n_train_repeat']):
batch_n = []
recent_batch_n = []
indices = None
receent_indices = None
for i, agent in enumerate(agents):
if i == 0:
batch = agent.pool.random_batch(batch_size)
indices = agent.pool.indices
receent_indices = list(range(agent.pool._top-batch_size, agent.pool._top))
batch_n.append(agent.pool.random_batch_by_indices(indices))
recent_batch_n.append(agent.pool.random_batch_by_indices(receent_indices))
# print(len(batch_n))
target_next_actions_n = []
# try:
all_obs = np.array(np.concatenate([batch['observations'] for batch in batch_n], axis=-1))
all_next_obs = np.array(np.concatenate([batch['next_observations'] for batch in batch_n], axis=-1))
# print(all_obs[0])
for batch in batch_n:
# print('making all obs')
batch['all_observations'] = deepcopy(all_obs)
batch['all_next_observations'] = deepcopy(all_next_obs)
opponent_current_actions_n = []
for agent, batch in zip(agents, batch_n):
target_next_actions_n.append(agent.target_policy.get_actions(batch['next_observations']))
opponent_current_actions_n.append(agent.policy.get_actions(batch['observations']))
# update opponent actions
for i, agent in enumerate(agents):
batch_n[i]['opponent_current_actions'] = np.reshape(
np.delete(deepcopy(opponent_current_actions_n), i, 0), (-1, agent._opponent_action_dim))
opponent_actions_n = np.array([batch['actions'] for batch in batch_n])
recent_opponent_actions_n = np.array([batch['actions'] for batch in recent_batch_n])
####### figure out
recent_opponent_observations_n = []
for batch in recent_batch_n:
recent_opponent_observations_n.append(batch['observations'])
current_actions = [agents[i].policy.get_actions(batch_n[i]['next_observations'])[0][0] for i in range(agent_num)]
all_actions_k = []
for i, agent in enumerate(agents):
if isinstance(agent, MAVBAC):
if agent._k > 0:
batch_actions_k = agent.policy.get_all_actions(batch_n[i]['next_observations'])
actions_k = [a[0][0] for a in batch_actions_k]
all_actions_k.append(';'.join(list(map(str, actions_k))))
if len(all_actions_k) > 0:
with open('{}/all_actions.csv'.format(policy_dir), 'a') as f:
f.write(','.join(list(map(str, all_actions_k))) + '\n')
with open('{}/policy.csv'.format(policy_dir), 'a') as f:
f.write(','.join(list(map(str, current_actions)))+'\n')
# print('============')
for i, agent in enumerate(agents):
try:
batch_n[i]['next_actions'] = deepcopy(target_next_actions_n[i])
except:
pass
batch_n[i]['opponent_actions'] = np.reshape(np.delete(deepcopy(opponent_actions_n), i, 0), (-1, agent._opponent_action_dim))
if agent.joint:
if agent.opponent_modelling:
batch_n[i]['recent_opponent_observations'] = recent_opponent_observations_n[i]
batch_n[i]['recent_opponent_actions'] = np.reshape(np.delete(deepcopy(recent_opponent_actions_n), i, 0), (-1, agent._opponent_action_dim))
batch_n[i]['opponent_next_actions'] = agent.opponent_policy.get_actions(batch_n[i]['next_observations'])
else:
batch_n[i]['opponent_next_actions'] = np.reshape(np.delete(deepcopy(target_next_actions_n), i, 0), (-1, agent._opponent_action_dim))
if isinstance(agent, MAVBAC) or isinstance(agent, MASQL) or isinstance(agent, ROMMEO):
agent._do_training(iteration=t + steps * agent._epoch_length, batch=batch_n[i], annealing=alpha)
else:
agent._do_training(iteration=t + steps * agent._epoch_length, batch=batch_n[i])
gt.stamp('train')
sampler.terminate()
if __name__ == '__main__':
arglist = parse_args()
main(arglist)
|
[
"argparse.ArgumentParser",
"tensorflow.ConfigProto",
"keras.backend.tensorflow_backend.set_session",
"maci.misc.logger.set_snapshot_dir",
"maci.misc.tf_utils.single_threaded_session",
"gtimer.set_def_unique",
"gtimer.rename_root",
"gtimer.stamp",
"gtimer.reset",
"datetime.datetime.now",
"maci.environments.make_particle_env",
"maci.environments.DifferentialGame",
"copy.deepcopy",
"tensorflow.Session",
"maci.environments.MatrixGame",
"maci.get_agents.rom_agent",
"numpy.concatenate",
"maci.get_agents.ddpg_agent",
"maci.misc.sampler.MASampler",
"os.makedirs",
"maci.environments.PBeautyGame",
"maci.get_agents.masql_agent",
"numpy.array",
"maci.misc.logger.push_prefix",
"maci.get_agents.pr2ac_agent"
] |
[((544, 560), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {}), '()\n', (558, 560), True, 'import tensorflow as tf\n'), ((654, 679), 'tensorflow.Session', 'tf.Session', ([], {'config': 'config'}), '(config=config)\n', (664, 679), True, 'import tensorflow as tf\n'), ((680, 697), 'keras.backend.tensorflow_backend.set_session', 'set_session', (['sess'], {}), '(sess)\n', (691, 697), False, 'from keras.backend.tensorflow_backend import set_session\n'), ((762, 809), 'maci.environments.make_particle_env', 'make_particle_env', ([], {'game_name': 'particle_game_name'}), '(game_name=particle_game_name)\n', (779, 809), False, 'from maci.environments import make_particle_env\n'), ((1401, 1495), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""Reinforcement Learning experiments for multiagent environments"""'], {}), "(\n 'Reinforcement Learning experiments for multiagent environments')\n", (1424, 1495), False, 'import argparse\n'), ((4551, 4574), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4572, 4574), False, 'import datetime\n'), ((5064, 5104), 'os.makedirs', 'os.makedirs', (['snapshot_dir'], {'exist_ok': '(True)'}), '(snapshot_dir, exist_ok=True)\n', (5075, 5104), False, 'import os\n'), ((5109, 5147), 'os.makedirs', 'os.makedirs', (['policy_dir'], {'exist_ok': '(True)'}), '(policy_dir, exist_ok=True)\n', (5120, 5147), False, 'import os\n'), ((5152, 5189), 'maci.misc.logger.set_snapshot_dir', 'logger.set_snapshot_dir', (['snapshot_dir'], {}), '(snapshot_dir)\n', (5175, 5189), False, 'from maci.misc import logger\n'), ((5310, 5461), 'maci.misc.sampler.MASampler', 'MASampler', ([], {'agent_num': 'agent_num', 'joint': '(True)', 'global_reward': 'arglist.global_reward', 'max_path_length': '(25)', 'min_pool_size': '(100)', 'batch_size': 'batch_size'}), '(agent_num=agent_num, joint=True, global_reward=arglist.\n global_reward, max_path_length=25, min_pool_size=100, batch_size=batch_size\n )\n', (5319, 5461), False, 'from maci.misc.sampler import MASampler\n'), ((3536, 3598), 'maci.environments.PBeautyGame', 'PBeautyGame', ([], {'agent_num': 'agent_num', 'reward_type': 'reward_type', 'p': 'p'}), '(agent_num=agent_num, reward_type=reward_type, p=p)\n', (3547, 3598), False, 'from maci.environments import PBeautyGame, MatrixGame, DifferentialGame\n'), ((5673, 5700), 'maci.misc.tf_utils.single_threaded_session', 'U.single_threaded_session', ([], {}), '()\n', (5698, 5700), True, 'import maci.misc.tf_utils as U\n'), ((7093, 7124), 'gtimer.rename_root', 'gt.rename_root', (['"""MARLAlgorithm"""'], {}), "('MARLAlgorithm')\n", (7107, 7124), True, 'import gtimer as gt\n'), ((7133, 7143), 'gtimer.reset', 'gt.reset', ([], {}), '()\n', (7141, 7143), True, 'import gtimer as gt\n'), ((7152, 7176), 'gtimer.set_def_unique', 'gt.set_def_unique', (['(False)'], {}), '(False)\n', (7169, 7176), True, 'import gtimer as gt\n'), ((3874, 4044), 'maci.environments.MatrixGame', 'MatrixGame', ([], {'game': 'matrix_game_name', 'agent_num': 'agent_num', 'action_num': '(2)', 'repeated': 'repeated', 'max_step': 'max_step', 'memory': 'memory', 'discrete_action': '(False)', 'tuple_obs': '(False)'}), '(game=matrix_game_name, agent_num=agent_num, action_num=2,\n repeated=repeated, max_step=max_step, memory=memory, discrete_action=\n False, tuple_obs=False)\n', (3884, 4044), False, 'from maci.environments import PBeautyGame, MatrixGame, DifferentialGame\n'), ((7588, 7630), 'maci.misc.logger.push_prefix', 'logger.push_prefix', (["('Epoch #%d | ' % steps)"], {}), "('Epoch #%d | ' % steps)\n", (7606, 7630), False, 'from maci.misc import logger\n'), ((4310, 4353), 'maci.environments.DifferentialGame', 'DifferentialGame', (['diff_game_name', 'agent_num'], {}), '(diff_game_name, agent_num)\n', (4326, 4353), False, 'from maci.environments import PBeautyGame, MatrixGame, DifferentialGame\n'), ((5982, 6097), 'maci.get_agents.pr2ac_agent', 'pr2ac_agent', (['model_name', 'i', 'env', 'M', 'u_range', 'base_kwargs'], {'k': 'k', 'g': 'g', 'mu': 'mu', 'game_name': 'game_name', 'aux': 'arglist.aux'}), '(model_name, i, env, M, u_range, base_kwargs, k=k, g=g, mu=mu,\n game_name=game_name, aux=arglist.aux)\n', (5993, 6097), False, 'from maci.get_agents import ddpg_agent, masql_agent, pr2ac_agent, rom_agent\n'), ((8102, 8120), 'gtimer.stamp', 'gt.stamp', (['"""sample"""'], {}), "('sample')\n", (8110, 8120), True, 'import gtimer as gt\n'), ((14329, 14346), 'gtimer.stamp', 'gt.stamp', (['"""train"""'], {}), "('train')\n", (14337, 14346), True, 'import gtimer as gt\n'), ((6158, 6235), 'maci.get_agents.masql_agent', 'masql_agent', (['model_name', 'i', 'env', 'M', 'u_range', 'base_kwargs'], {'game_name': 'game_name'}), '(model_name, i, env, M, u_range, base_kwargs, game_name=game_name)\n', (6169, 6235), False, 'from maci.get_agents import ddpg_agent, masql_agent, pr2ac_agent, rom_agent\n'), ((11420, 11469), 'numpy.array', 'np.array', (["[batch['actions'] for batch in batch_n]"], {}), "([batch['actions'] for batch in batch_n])\n", (11428, 11469), True, 'import numpy as np\n'), ((11518, 11574), 'numpy.array', 'np.array', (["[batch['actions'] for batch in recent_batch_n]"], {}), "([batch['actions'] for batch in recent_batch_n])\n", (11526, 11574), True, 'import numpy as np\n'), ((6301, 6376), 'maci.get_agents.rom_agent', 'rom_agent', (['model_name', 'i', 'env', 'M', 'u_range', 'base_kwargs'], {'game_name': 'game_name'}), '(model_name, i, env, M, u_range, base_kwargs, game_name=game_name)\n', (6310, 6376), False, 'from maci.get_agents import ddpg_agent, masql_agent, pr2ac_agent, rom_agent\n'), ((6840, 6948), 'maci.get_agents.ddpg_agent', 'ddpg_agent', (['joint', 'opponent_modelling', 'model_names', 'i', 'env', 'M', 'u_range', 'base_kwargs'], {'game_name': 'game_name'}), '(joint, opponent_modelling, model_names, i, env, M, u_range,\n base_kwargs, game_name=game_name)\n', (6850, 6948), False, 'from maci.get_agents import ddpg_agent, masql_agent, pr2ac_agent, rom_agent\n'), ((10274, 10343), 'numpy.concatenate', 'np.concatenate', (["[batch['observations'] for batch in batch_n]"], {'axis': '(-1)'}), "([batch['observations'] for batch in batch_n], axis=-1)\n", (10288, 10343), True, 'import numpy as np\n'), ((10389, 10463), 'numpy.concatenate', 'np.concatenate', (["[batch['next_observations'] for batch in batch_n]"], {'axis': '(-1)'}), "([batch['next_observations'] for batch in batch_n], axis=-1)\n", (10403, 10463), True, 'import numpy as np\n'), ((10649, 10666), 'copy.deepcopy', 'deepcopy', (['all_obs'], {}), '(all_obs)\n', (10657, 10666), False, 'from copy import deepcopy\n'), ((10724, 10746), 'copy.deepcopy', 'deepcopy', (['all_next_obs'], {}), '(all_next_obs)\n', (10732, 10746), False, 'from copy import deepcopy\n'), ((12974, 13008), 'copy.deepcopy', 'deepcopy', (['target_next_actions_n[i]'], {}), '(target_next_actions_n[i])\n', (12982, 13008), False, 'from copy import deepcopy\n'), ((11299, 11335), 'copy.deepcopy', 'deepcopy', (['opponent_current_actions_n'], {}), '(opponent_current_actions_n)\n', (11307, 11335), False, 'from copy import deepcopy\n'), ((13152, 13180), 'copy.deepcopy', 'deepcopy', (['opponent_actions_n'], {}), '(opponent_actions_n)\n', (13160, 13180), False, 'from copy import deepcopy\n'), ((13524, 13559), 'copy.deepcopy', 'deepcopy', (['recent_opponent_actions_n'], {}), '(recent_opponent_actions_n)\n', (13532, 13559), False, 'from copy import deepcopy\n'), ((13864, 13895), 'copy.deepcopy', 'deepcopy', (['target_next_actions_n'], {}), '(target_next_actions_n)\n', (13872, 13895), False, 'from copy import deepcopy\n')]
|
to_import = ['mlmodels.modelutils',
'mlmodels.search.bayesian',
'mlmodels.search.hparameters.lgbm_params']
import logging
logger = logging.getLogger()
from os.path import dirname, abspath, split
project_name = split(dirname(abspath(__file__)))[1]
logger.info(f'{__file__} module: project directory: {project_name}')
import sys
import importlib
for module in to_import:
module_name = module.split('.')[-1]
new_module = importlib.import_module(name = f'.{module}', package = project_name)
sys.modules[__name__].__dict__.update({module_name: new_module})
################################################################################
try:
from lightgbm import LGBMClassifier, LGBMRegressor
except Exception as e:
print('Package missing for model lgbm: lightgbm')
try:
import shap
except Exception as e:
print('Package missing for model lgbm: shap (for feature importance)')
import scipy
import pandas as pd
import numpy as np
#shap.initjs()
from sklearn.model_selection import KFold
from sklearn.metrics import accuracy_score
def_params = dict(
models = [],
features = None,
metric_func = None,
minimize_metric = False,
is_walk_forward = False,
transform_walk_forward = None,
target_name = None,
transform_cols = None,
optimize_on_val = True
)
# Hparameters of lgbm
lgbm_hparam_keys = [
'num_leaves',
'max_depth',
'min_data_in_leaf',
'bagging_fraction',
'learning_rate',
'reg_alpha',
'reg_lambda',
'min_sum_hessian_in_leaf',
'feature_fraction',
'unbalanced_sets',
'num_iterations',
'random_state',
'bagging_freq',
'bagging_seed',
'early_stopping_round',
'objective',
'metric',
'verbose',
'num_class'
]
class LGBM:
def __init__(self, **kwargs):
lgbm_hparams = {}
for k in lgbm_hparam_keys:
if k in kwargs:
lgbm_hparams[k] = kwargs[k]
self.lgbm_hparams = lgbm_hparams
self.__dict__.update(def_params)
self.__dict__.update(kwargs)
self.walk_forward_features = None
assert(self.objective in ['binary', 'multiclass', 'regression', 'multiclassova'])
self.feature_importances = None
def test(self, test_data):
X_test = test_data[0]
y_test = test_data[1]
test_metric = None
test_preds = self.predict(X_test)
return self.get_metric(test_preds, y_test)
def get_metric(self, y_true, y_pred):
if self.metric_func is None:
if self.metric == 'accuracy':
if self.objective in ['multiclass', 'multiclassova']:
return accuracy_score(y_true, np.argmax(y_pred, axis = 1))
elif self.objecetive == 'binary':
return accuracy_score(y_true, y_pred > self.binary_threshold)
else:
return self.metric_func(y_true, y_pred)
def predict(self, x, debug = False, **kwargs):
assert(len(self.models) > 0)
# Predict for each model
model_preds = []
for model in self.models:
try:
model_preds.append(model.predict_proba(x))
except:
model_preds.append(model.predict(x))
preds = np.mean(model_preds, axis = 0)
# Return results of each prediction
return preds
def predict_walk_forward(self, X, X_independent, y, n_pred, clf = None):
"""X_independent is a dataframe that consists of features that dont depend on target. (ex: date)
It must be given in X_val.
X_independent must have enough examples for prediction.
"""
assert(len(self.models) > 0)
assert(isinstance(X, pd.DataFrame))
assert(isinstance(X_independent, pd.DataFrame))
assert(len(X) + n_pred >= len(X_independent))
#assert(target_col not in X.columns)
if self.walk_forward_features is None:
self.walk_forward_features = X.columns
train_size = len(X)
data = X.copy().reset_index(drop = True)
data[self.target_name] = y.values
#display(X_independent)
data = pd.concat([data, X_independent], axis = 0, ignore_index=True)
data = data.reset_index(drop = True)
for i in range(n_pred):
new_example_i = train_size + i
# 1- Calculate new features
data.loc[new_example_i, self.transform_cols] = self.transform_walk_forward(data.iloc[:new_example_i])
# 2- Make a prediction (if a model was not specified, predictions from all CV models will be averaged.)
last_example = data.loc[new_example_i, self.walk_forward_features].values.reshape(1, -1)
data.loc[new_example_i, self.target_name] = self.predict(last_example) if clf is None else clf.predict(last_example)
return data.loc[data.index >= train_size, self.target_name].values
def fit(self, x):
""" X can be pd.Dataframe, np.ndarray or sparse.
y has to be pd.series
"""
train_data = x['train_data']
val_data = x['val_data']
self.models = []
# For CV
oof_preds = np.zeros(len(train_data[0]))
X_data = train_data[0]
y_data = train_data[1]
# Validate after CV
X_val = val_data[0]
try:
y_val = np.array(val_data[1].todense()).ravel()
except:
y_val = np.array(val_data[1]).ravel()
is_sparse = scipy.sparse.issparse(X_data)
# Create dataframe to keep feature importances for each fold
feature_importances = pd.DataFrame()
if not is_sparse:
self.features = X_data.columns
if self.features is not None:
if not len(self.features) == X_data.shape[1]:
raise ValueError(
'Number of features must be the same as n_columns in X.')
# Create column for features
feature_importances['feature'] = self.features
cv_metrics = list()
n_folds = 0
folds = None
val_preds = None
if not isinstance(self.folds, list):
folds = self.folds.split(X_data, y_data)
else:
folds = self.folds
oof_idx = []
for i_fold, (trn_idx, val_idx) in enumerate(folds):
# We can calculate an oof score only on oof examples.
# In time series CV schemes some examples will never become oof.
oof_idx.extend(val_idx)
n_folds += 1
X_trn_fold = X_data[trn_idx] if is_sparse else X_data.iloc[trn_idx]
X_val_fold = X_data[val_idx] if is_sparse else X_data.iloc[val_idx]
y_val_fold = None
y_trn_fold = None
if isinstance(y_data, pd.Series):
y_trn_fold = y_data.iloc[trn_idx]
y_val_fold = y_data.iloc[val_idx]
else:
y_trn_fold = y_data[trn_idx]
y_val_fold = y_data[val_idx]
try:
y_trn_fold = np.array(y_trn_fold.todense()).ravel()
y_val_fold = np.array(y_val_fold.todense()).ravel()
except:
y_trn_fold = np.array(y_trn_fold).ravel()
y_val_fold = np.array(y_val_fold).ravel()
logger.info('Training on fold {}'.format(i_fold))
# Training for this fold
clf = LGBMRegressor(**self.lgbm_hparams) if self.objective == 'regression' else LGBMClassifier(**self.lgbm_hparams)
clf = clf.fit(X = X_trn_fold, y = y_trn_fold,
eval_set = [(X_trn_fold, y_trn_fold),
(X_val_fold, y_val_fold)],
early_stopping_rounds = 250,
verbose = 200)
# Keep models of each fold
self.models.append(clf)
feature_importances['fold_{}'.format(i_fold)] = clf.feature_importances_
try:
oof_preds[val_idx] = clf.predict_proba(X_val_fold)
except:
oof_preds[val_idx] = clf.predict(X_val_fold) if not self.is_walk_forward else \
self.predict_walk_forward(X_trn_fold, X_val_fold, y_trn_fold, len(y_val_fold), clf)
# Validation for this fold
if X_val is not None:
if val_preds is None:
try:
val_preds = clf.predict_proba(X_val)
except:
val_preds = clf.predict(X_val) if not self.is_walk_forward else self.predict_walk_forward(X_data, X_val, y_data, len(y_val), clf)
else:
try:
val_preds += clf.predict_proba(X_val)
except:
val_preds += clf.predict(X_val) if not self.is_walk_forward else self.predict_walk_forward(X_data, X_val, y_data, len(y_val), clf)
logger.info('Training has finished.')
# Validation
val_metric = None
if X_val is not None:
val_preds /= n_folds
logger.info('Calculating validation metric...')
val_metric = self.get_metric(y_val, val_preds)
logger.info(f'Validation {self.metric}: {val_metric}')
feature_importances['importance'] = \
feature_importances[[f'fold_{i}' for i in range(n_folds)]].sum(axis = 1)
cols_to_keep = [col for col in feature_importances.columns if 'fold' not in col]
self.feature_importances = feature_importances[cols_to_keep]
if 'feature' in self.feature_importances.columns:
self.feature_importances.sort_values(by = 'importance',
ascending = False,
inplace = True)
return {
#'cv_metrics': cv_metrics,
'feature_importances': feature_importances,
'val_preds' : val_preds,
'oof_preds': oof_preds,
'metric': self.get_metric(train_data[1][oof_idx], oof_preds[oof_idx]) if not self.optimize_on_val else val_metric,
'val_metric': val_metric
}
def display_feature_importances(self):
display(self.feature_importances.style.background_gradient(cmap = 'coolwarm'))
def explain_shap(self, data, features = None, class_names = None, which_class = None, return_importances = True, plot_type = None):
X, y = data
explainer = shap.TreeExplainer(self.models[0])
shap_values = explainer.shap_values(X)
if which_class is not None:
assert(class_names is not None)
assert(which_class in class_names)
class_i = class_names.index(which_class)
shap_values = shap_values[class_i]
shap.summary_plot(shap_values,
X,
feature_names = features,
class_names = class_names,
plot_type = plot_type)
if return_importances:
return shap_values
def search(self, x, num_iter = 3, trials_path = 'trials_lgbm', fixed_hparams = None, search_space = None):
# Get default hparams
search_space = lgbm_params.search_space if search_space is None else search_space
fixed_params = lgbm_params.search_fixed if fixed_hparams is None else fixed_hparams
print("Fixed hparameters:")
print(fixed_params)
self.__dict__.update(fixed_params)
# Search
print(f'Minimize metric: {self.minimize_metric}')
res_search = bayesian.bayesian_search(
self.__init__,
self.fit,
x,
search_space,
fixed_params,
num_iter = num_iter,
mode = 'bayesian',
minimize = self.minimize_metric,
trials_path = trials_path,
model_tag = 'lgbm')
return res_search
|
[
"pandas.DataFrame",
"os.path.abspath",
"lightgbm.LGBMClassifier",
"importlib.import_module",
"numpy.argmax",
"scipy.sparse.issparse",
"sklearn.metrics.accuracy_score",
"shap.TreeExplainer",
"numpy.mean",
"numpy.array",
"lightgbm.LGBMRegressor",
"shap.summary_plot",
"pandas.concat",
"logging.getLogger"
] |
[((158, 177), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (175, 177), False, 'import logging\n'), ((455, 519), 'importlib.import_module', 'importlib.import_module', ([], {'name': 'f""".{module}"""', 'package': 'project_name'}), "(name=f'.{module}', package=project_name)\n", (478, 519), False, 'import importlib\n'), ((3381, 3409), 'numpy.mean', 'np.mean', (['model_preds'], {'axis': '(0)'}), '(model_preds, axis=0)\n', (3388, 3409), True, 'import numpy as np\n'), ((4320, 4379), 'pandas.concat', 'pd.concat', (['[data, X_independent]'], {'axis': '(0)', 'ignore_index': '(True)'}), '([data, X_independent], axis=0, ignore_index=True)\n', (4329, 4379), True, 'import pandas as pd\n'), ((5721, 5750), 'scipy.sparse.issparse', 'scipy.sparse.issparse', (['X_data'], {}), '(X_data)\n', (5742, 5750), False, 'import scipy\n'), ((5859, 5873), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (5871, 5873), True, 'import pandas as pd\n'), ((11018, 11052), 'shap.TreeExplainer', 'shap.TreeExplainer', (['self.models[0]'], {}), '(self.models[0])\n', (11036, 11052), False, 'import shap\n'), ((11348, 11456), 'shap.summary_plot', 'shap.summary_plot', (['shap_values', 'X'], {'feature_names': 'features', 'class_names': 'class_names', 'plot_type': 'plot_type'}), '(shap_values, X, feature_names=features, class_names=\n class_names, plot_type=plot_type)\n', (11365, 11456), False, 'import shap\n'), ((252, 269), 'os.path.abspath', 'abspath', (['__file__'], {}), '(__file__)\n', (259, 269), False, 'from os.path import dirname, abspath, split\n'), ((7774, 7808), 'lightgbm.LGBMRegressor', 'LGBMRegressor', ([], {}), '(**self.lgbm_hparams)\n', (7787, 7808), False, 'from lightgbm import LGBMClassifier, LGBMRegressor\n'), ((7848, 7883), 'lightgbm.LGBMClassifier', 'LGBMClassifier', ([], {}), '(**self.lgbm_hparams)\n', (7862, 7883), False, 'from lightgbm import LGBMClassifier, LGBMRegressor\n'), ((2777, 2802), 'numpy.argmax', 'np.argmax', (['y_pred'], {'axis': '(1)'}), '(y_pred, axis=1)\n', (2786, 2802), True, 'import numpy as np\n'), ((2883, 2937), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['y_true', '(y_pred > self.binary_threshold)'], {}), '(y_true, y_pred > self.binary_threshold)\n', (2897, 2937), False, 'from sklearn.metrics import accuracy_score\n'), ((5662, 5683), 'numpy.array', 'np.array', (['val_data[1]'], {}), '(val_data[1])\n', (5670, 5683), True, 'import numpy as np\n'), ((7540, 7560), 'numpy.array', 'np.array', (['y_trn_fold'], {}), '(y_trn_fold)\n', (7548, 7560), True, 'import numpy as np\n'), ((7602, 7622), 'numpy.array', 'np.array', (['y_val_fold'], {}), '(y_val_fold)\n', (7610, 7622), True, 'import numpy as np\n')]
|
from django.forms import RadioSelect
from .base import WidgetTest
class RadioSelectTest(WidgetTest):
widget = RadioSelect
def test_render(self):
self.check_html(self.widget(choices=self.beatles), 'beatle', 'J', html=(
"""<ul>
<li><label><input checked type="radio" name="beatle" value="J" /> John</label></li>
<li><label><input type="radio" name="beatle" value="P" /> Paul</label></li>
<li><label><input type="radio" name="beatle" value="G" /> George</label></li>
<li><label><input type="radio" name="beatle" value="R" /> Ringo</label></li>
</ul>"""
))
def test_nested_choices(self):
nested_choices = (
('unknown', 'Unknown'),
('Audio', (('vinyl', 'Vinyl'), ('cd', 'CD'))),
('Video', (('vhs', 'VHS'), ('dvd', 'DVD'))),
)
html = """
<ul id="media">
<li>
<label for="media_0"><input id="media_0" name="nestchoice" type="radio" value="unknown" /> Unknown</label>
</li>
<li>Audio<ul id="media_1">
<li>
<label for="media_1_0"><input id="media_1_0" name="nestchoice" type="radio" value="vinyl" /> Vinyl</label>
</li>
<li><label for="media_1_1"><input id="media_1_1" name="nestchoice" type="radio" value="cd" /> CD</label></li>
</ul></li>
<li>Video<ul id="media_2">
<li><label for="media_2_0"><input id="media_2_0" name="nestchoice" type="radio" value="vhs" /> VHS</label></li>
<li>
<label for="media_2_1">
<input checked id="media_2_1" name="nestchoice" type="radio" value="dvd" /> DVD
</label>
</li>
</ul></li>
</ul>
"""
self.check_html(
self.widget(choices=nested_choices), 'nestchoice', 'dvd',
attrs={'id': 'media'}, html=html,
)
def test_constructor_attrs(self):
"""
Attributes provided at instantiation are passed to the constituent
inputs.
"""
widget = RadioSelect(attrs={'id': 'foo'}, choices=self.beatles)
html = """
<ul id="foo">
<li>
<label for="foo_0"><input checked type="radio" id="foo_0" value="J" name="beatle" /> John</label>
</li>
<li><label for="foo_1"><input type="radio" id="foo_1" value="P" name="beatle" /> Paul</label></li>
<li><label for="foo_2"><input type="radio" id="foo_2" value="G" name="beatle" /> George</label></li>
<li><label for="foo_3"><input type="radio" id="foo_3" value="R" name="beatle" /> Ringo</label></li>
</ul>
"""
self.check_html(widget, 'beatle', 'J', html=html)
def test_render_attrs(self):
"""
Attributes provided at render-time are passed to the constituent
inputs.
"""
html = """
<ul id="bar">
<li>
<label for="bar_0"><input checked type="radio" id="bar_0" value="J" name="beatle" /> John</label>
</li>
<li><label for="bar_1"><input type="radio" id="bar_1" value="P" name="beatle" /> Paul</label></li>
<li><label for="bar_2"><input type="radio" id="bar_2" value="G" name="beatle" /> George</label></li>
<li><label for="bar_3"><input type="radio" id="bar_3" value="R" name="beatle" /> Ringo</label></li>
</ul>
"""
self.check_html(self.widget(choices=self.beatles), 'beatle', 'J', attrs={'id': 'bar'}, html=html)
|
[
"django.forms.RadioSelect"
] |
[((2063, 2117), 'django.forms.RadioSelect', 'RadioSelect', ([], {'attrs': "{'id': 'foo'}", 'choices': 'self.beatles'}), "(attrs={'id': 'foo'}, choices=self.beatles)\n", (2074, 2117), False, 'from django.forms import RadioSelect\n')]
|
from http import HTTPStatus
from typing import List
from fastapi import APIRouter, Depends, Query, Request
from fastapi.concurrency import run_in_threadpool
from sqlalchemy.orm import Session
import mlrun.api.crud
import mlrun.api.utils.auth.verifier
import mlrun.api.utils.singletons.project_member
from mlrun.api import schemas
from mlrun.api.api import deps
from mlrun.api.api.utils import log_and_raise
from mlrun.api.schemas.artifact import ArtifactsFormat
from mlrun.api.utils.singletons.db import get_db
from mlrun.config import config
from mlrun.utils import is_legacy_artifact, logger
router = APIRouter()
@router.post("/artifact/{project}/{uid}/{key:path}")
async def store_artifact(
request: Request,
project: str,
uid: str,
key: str,
tag: str = "",
iter: int = 0,
auth_info: mlrun.api.schemas.AuthInfo = Depends(deps.authenticate_request),
db_session: Session = Depends(deps.get_db_session),
):
await run_in_threadpool(
mlrun.api.utils.singletons.project_member.get_project_member().ensure_project,
db_session,
project,
auth_info=auth_info,
)
await run_in_threadpool(
mlrun.api.utils.auth.verifier.AuthVerifier().query_project_resource_permissions,
mlrun.api.schemas.AuthorizationResourceTypes.artifact,
project,
key,
mlrun.api.schemas.AuthorizationAction.store,
auth_info,
)
data = None
try:
data = await request.json()
except ValueError:
log_and_raise(HTTPStatus.BAD_REQUEST.value, reason="bad JSON body")
logger.debug("Storing artifact", data=data)
await run_in_threadpool(
mlrun.api.crud.Artifacts().store_artifact,
db_session,
key,
data,
uid,
tag,
iter,
project,
)
return {}
@router.get("/projects/{project}/artifact-tags")
def list_artifact_tags(
project: str,
auth_info: mlrun.api.schemas.AuthInfo = Depends(deps.authenticate_request),
db_session: Session = Depends(deps.get_db_session),
):
mlrun.api.utils.auth.verifier.AuthVerifier().query_project_permissions(
project,
mlrun.api.schemas.AuthorizationAction.read,
auth_info,
)
tag_tuples = get_db().list_artifact_tags(db_session, project)
artifact_key_to_tag = {tag_tuple[1]: tag_tuple[2] for tag_tuple in tag_tuples}
allowed_artifact_keys = mlrun.api.utils.auth.verifier.AuthVerifier().filter_project_resources_by_permissions(
mlrun.api.schemas.AuthorizationResourceTypes.artifact,
list(artifact_key_to_tag.keys()),
lambda artifact_key: (
project,
artifact_key,
),
auth_info,
)
tags = [
tag_tuple[2]
for tag_tuple in tag_tuples
if tag_tuple[1] in allowed_artifact_keys
]
return {
"project": project,
"tags": tags,
}
@router.get("/projects/{project}/artifact/{key:path}")
def get_artifact(
project: str,
key: str,
tag: str = "latest",
iter: int = 0,
format_: ArtifactsFormat = Query(ArtifactsFormat.legacy, alias="format"),
auth_info: mlrun.api.schemas.AuthInfo = Depends(deps.authenticate_request),
db_session: Session = Depends(deps.get_db_session),
):
data = mlrun.api.crud.Artifacts().get_artifact(
db_session, key, tag, iter, project, format_
)
mlrun.api.utils.auth.verifier.AuthVerifier().query_project_resource_permissions(
mlrun.api.schemas.AuthorizationResourceTypes.artifact,
project,
key,
mlrun.api.schemas.AuthorizationAction.read,
auth_info,
)
return {
"data": data,
}
@router.delete("/artifact/{project}/{uid}")
def delete_artifact(
project: str,
uid: str,
key: str,
tag: str = "",
auth_info: mlrun.api.schemas.AuthInfo = Depends(deps.authenticate_request),
db_session: Session = Depends(deps.get_db_session),
):
mlrun.api.utils.auth.verifier.AuthVerifier().query_project_resource_permissions(
mlrun.api.schemas.AuthorizationResourceTypes.artifact,
project,
key,
mlrun.api.schemas.AuthorizationAction.delete,
auth_info,
)
mlrun.api.crud.Artifacts().delete_artifact(db_session, key, tag, project)
return {}
@router.get("/artifacts")
def list_artifacts(
project: str = None,
name: str = None,
tag: str = None,
kind: str = None,
category: schemas.ArtifactCategories = None,
labels: List[str] = Query([], alias="label"),
iter: int = Query(None, ge=0),
best_iteration: bool = Query(False, alias="best-iteration"),
format_: ArtifactsFormat = Query(ArtifactsFormat.legacy, alias="format"),
auth_info: mlrun.api.schemas.AuthInfo = Depends(deps.authenticate_request),
db_session: Session = Depends(deps.get_db_session),
):
if project is None:
project = config.default_project
mlrun.api.utils.auth.verifier.AuthVerifier().query_project_permissions(
project,
mlrun.api.schemas.AuthorizationAction.read,
auth_info,
)
artifacts = mlrun.api.crud.Artifacts().list_artifacts(
db_session,
project,
name,
tag,
labels,
kind=kind,
category=category,
iter=iter,
best_iteration=best_iteration,
format_=format_,
)
artifacts = mlrun.api.utils.auth.verifier.AuthVerifier().filter_project_resources_by_permissions(
mlrun.api.schemas.AuthorizationResourceTypes.artifact,
artifacts,
_artifact_project_and_resource_name_extractor,
auth_info,
)
return {
"artifacts": artifacts,
}
@router.delete("/artifacts")
def delete_artifacts(
project: str = mlrun.mlconf.default_project,
name: str = "",
tag: str = "",
labels: List[str] = Query([], alias="label"),
auth_info: mlrun.api.schemas.AuthInfo = Depends(deps.authenticate_request),
db_session: Session = Depends(deps.get_db_session),
):
artifacts = mlrun.api.crud.Artifacts().list_artifacts(
db_session, project, name, tag, labels
)
mlrun.api.utils.auth.verifier.AuthVerifier().query_project_resources_permissions(
mlrun.api.schemas.AuthorizationResourceTypes.artifact,
artifacts,
_artifact_project_and_resource_name_extractor,
mlrun.api.schemas.AuthorizationAction.delete,
auth_info,
)
mlrun.api.crud.Artifacts().delete_artifacts(db_session, project, name, tag, labels)
return {}
# Extract project and resource name from legacy artifact structure as well as from new format
def _artifact_project_and_resource_name_extractor(artifact):
if is_legacy_artifact(artifact):
return artifact.get("project", mlrun.mlconf.default_project), artifact["db_key"]
else:
return (
artifact.get("metadata").get("project", mlrun.mlconf.default_project),
artifact.get("spec")["db_key"],
)
|
[
"mlrun.api.api.utils.log_and_raise",
"mlrun.api.utils.singletons.db.get_db",
"fastapi.Query",
"mlrun.utils.is_legacy_artifact",
"fastapi.Depends",
"fastapi.APIRouter",
"mlrun.utils.logger.debug"
] |
[((606, 617), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (615, 617), False, 'from fastapi import APIRouter, Depends, Query, Request\n'), ((849, 883), 'fastapi.Depends', 'Depends', (['deps.authenticate_request'], {}), '(deps.authenticate_request)\n', (856, 883), False, 'from fastapi import APIRouter, Depends, Query, Request\n'), ((911, 939), 'fastapi.Depends', 'Depends', (['deps.get_db_session'], {}), '(deps.get_db_session)\n', (918, 939), False, 'from fastapi import APIRouter, Depends, Query, Request\n'), ((1587, 1630), 'mlrun.utils.logger.debug', 'logger.debug', (['"""Storing artifact"""'], {'data': 'data'}), "('Storing artifact', data=data)\n", (1599, 1630), False, 'from mlrun.utils import is_legacy_artifact, logger\n'), ((1972, 2006), 'fastapi.Depends', 'Depends', (['deps.authenticate_request'], {}), '(deps.authenticate_request)\n', (1979, 2006), False, 'from fastapi import APIRouter, Depends, Query, Request\n'), ((2034, 2062), 'fastapi.Depends', 'Depends', (['deps.get_db_session'], {}), '(deps.get_db_session)\n', (2041, 2062), False, 'from fastapi import APIRouter, Depends, Query, Request\n'), ((3095, 3140), 'fastapi.Query', 'Query', (['ArtifactsFormat.legacy'], {'alias': '"""format"""'}), "(ArtifactsFormat.legacy, alias='format')\n", (3100, 3140), False, 'from fastapi import APIRouter, Depends, Query, Request\n'), ((3186, 3220), 'fastapi.Depends', 'Depends', (['deps.authenticate_request'], {}), '(deps.authenticate_request)\n', (3193, 3220), False, 'from fastapi import APIRouter, Depends, Query, Request\n'), ((3248, 3276), 'fastapi.Depends', 'Depends', (['deps.get_db_session'], {}), '(deps.get_db_session)\n', (3255, 3276), False, 'from fastapi import APIRouter, Depends, Query, Request\n'), ((3864, 3898), 'fastapi.Depends', 'Depends', (['deps.authenticate_request'], {}), '(deps.authenticate_request)\n', (3871, 3898), False, 'from fastapi import APIRouter, Depends, Query, Request\n'), ((3926, 3954), 'fastapi.Depends', 'Depends', (['deps.get_db_session'], {}), '(deps.get_db_session)\n', (3933, 3954), False, 'from fastapi import APIRouter, Depends, Query, Request\n'), ((4519, 4543), 'fastapi.Query', 'Query', (['[]'], {'alias': '"""label"""'}), "([], alias='label')\n", (4524, 4543), False, 'from fastapi import APIRouter, Depends, Query, Request\n'), ((4561, 4578), 'fastapi.Query', 'Query', (['None'], {'ge': '(0)'}), '(None, ge=0)\n', (4566, 4578), False, 'from fastapi import APIRouter, Depends, Query, Request\n'), ((4607, 4643), 'fastapi.Query', 'Query', (['(False)'], {'alias': '"""best-iteration"""'}), "(False, alias='best-iteration')\n", (4612, 4643), False, 'from fastapi import APIRouter, Depends, Query, Request\n'), ((4676, 4721), 'fastapi.Query', 'Query', (['ArtifactsFormat.legacy'], {'alias': '"""format"""'}), "(ArtifactsFormat.legacy, alias='format')\n", (4681, 4721), False, 'from fastapi import APIRouter, Depends, Query, Request\n'), ((4767, 4801), 'fastapi.Depends', 'Depends', (['deps.authenticate_request'], {}), '(deps.authenticate_request)\n', (4774, 4801), False, 'from fastapi import APIRouter, Depends, Query, Request\n'), ((4829, 4857), 'fastapi.Depends', 'Depends', (['deps.get_db_session'], {}), '(deps.get_db_session)\n', (4836, 4857), False, 'from fastapi import APIRouter, Depends, Query, Request\n'), ((5853, 5877), 'fastapi.Query', 'Query', (['[]'], {'alias': '"""label"""'}), "([], alias='label')\n", (5858, 5877), False, 'from fastapi import APIRouter, Depends, Query, Request\n'), ((5923, 5957), 'fastapi.Depends', 'Depends', (['deps.authenticate_request'], {}), '(deps.authenticate_request)\n', (5930, 5957), False, 'from fastapi import APIRouter, Depends, Query, Request\n'), ((5985, 6013), 'fastapi.Depends', 'Depends', (['deps.get_db_session'], {}), '(deps.get_db_session)\n', (5992, 6013), False, 'from fastapi import APIRouter, Depends, Query, Request\n'), ((6698, 6726), 'mlrun.utils.is_legacy_artifact', 'is_legacy_artifact', (['artifact'], {}), '(artifact)\n', (6716, 6726), False, 'from mlrun.utils import is_legacy_artifact, logger\n'), ((1514, 1581), 'mlrun.api.api.utils.log_and_raise', 'log_and_raise', (['HTTPStatus.BAD_REQUEST.value'], {'reason': '"""bad JSON body"""'}), "(HTTPStatus.BAD_REQUEST.value, reason='bad JSON body')\n", (1527, 1581), False, 'from mlrun.api.api.utils import log_and_raise\n'), ((2254, 2262), 'mlrun.api.utils.singletons.db.get_db', 'get_db', ([], {}), '()\n', (2260, 2262), False, 'from mlrun.api.utils.singletons.db import get_db\n')]
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from pedal_settings import views
from django.utils.translation import ugettext_lazy as _
handler404 = views.error404
handler500 = views.error500
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'harp.views.dashboard', name='dashboard'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', views.home, name='home'),
url(_(r'^about/$'), views.about, name='about'),
url(_(r'^dashboard/$'), views.dashboard, name='dashboard'),
url(_(r'^dashboard/all/$'), views.show_all_settings, name='show_all_settings'),
url(r'^dashboard/download/$', views.download_all_settings, name='download_all_settings'),
url(_(r'^dashboard/index$'), views.get_by_index, name='get_by_index'),
url(_(r'^dashboard/prime$'), views.get_by_prime, name='get_by_prime'),
url(_(r'^dashboard/accidents$'), views.get_by_accidents, name='get_by_accidents'),
url(_(r'^dashboard/index/(?P<pedal_index>\d+)/$'), views.show_settings_by_index, name='show_settings_by_number'),
url(_(r'^dashboard/prime/(?P<pedal_prime>\w+)/$'), views.show_settings_by_prime, name='show_settings_by_prime'),
url(_(r'^dashboard/accidents/(?P<accidents>\w+)/$'), views.show_settings_by_accidents, name='show_settings_by_accidents'),
url(_(r'^dashboard/statistics/$'), views.show_statistics, name='show_statistics'),
url(r'^admin/$', include(admin.site.urls)),
)
|
[
"django.utils.translation.ugettext_lazy",
"django.conf.urls.include",
"django.conf.urls.url"
] |
[((385, 419), 'django.conf.urls.url', 'url', (['"""^$"""', 'views.home'], {'name': '"""home"""'}), "('^$', views.home, name='home')\n", (388, 419), False, 'from django.conf.urls import patterns, include, url\n'), ((626, 718), 'django.conf.urls.url', 'url', (['"""^dashboard/download/$"""', 'views.download_all_settings'], {'name': '"""download_all_settings"""'}), "('^dashboard/download/$', views.download_all_settings, name=\n 'download_all_settings')\n", (629, 718), False, 'from django.conf.urls import patterns, include, url\n'), ((430, 443), 'django.utils.translation.ugettext_lazy', '_', (['"""^about/$"""'], {}), "('^about/$')\n", (431, 443), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((482, 499), 'django.utils.translation.ugettext_lazy', '_', (['"""^dashboard/$"""'], {}), "('^dashboard/$')\n", (483, 499), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((546, 567), 'django.utils.translation.ugettext_lazy', '_', (['"""^dashboard/all/$"""'], {}), "('^dashboard/all/$')\n", (547, 567), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((724, 746), 'django.utils.translation.ugettext_lazy', '_', (['"""^dashboard/index$"""'], {}), "('^dashboard/index$')\n", (725, 746), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((799, 821), 'django.utils.translation.ugettext_lazy', '_', (['"""^dashboard/prime$"""'], {}), "('^dashboard/prime$')\n", (800, 821), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((874, 900), 'django.utils.translation.ugettext_lazy', '_', (['"""^dashboard/accidents$"""'], {}), "('^dashboard/accidents$')\n", (875, 900), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((961, 1006), 'django.utils.translation.ugettext_lazy', '_', (['"""^dashboard/index/(?P<pedal_index>\\\\d+)/$"""'], {}), "('^dashboard/index/(?P<pedal_index>\\\\d+)/$')\n", (962, 1006), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1079, 1124), 'django.utils.translation.ugettext_lazy', '_', (['"""^dashboard/prime/(?P<pedal_prime>\\\\w+)/$"""'], {}), "('^dashboard/prime/(?P<pedal_prime>\\\\w+)/$')\n", (1080, 1124), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1196, 1243), 'django.utils.translation.ugettext_lazy', '_', (['"""^dashboard/accidents/(?P<accidents>\\\\w+)/$"""'], {}), "('^dashboard/accidents/(?P<accidents>\\\\w+)/$')\n", (1197, 1243), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1323, 1351), 'django.utils.translation.ugettext_lazy', '_', (['"""^dashboard/statistics/$"""'], {}), "('^dashboard/statistics/$')\n", (1324, 1351), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1423, 1447), 'django.conf.urls.include', 'include', (['admin.site.urls'], {}), '(admin.site.urls)\n', (1430, 1447), False, 'from django.conf.urls import patterns, include, url\n')]
|
import os
import tempfile
import unittest
from dango import config
from dango import utils
import ruamel.yaml
SAMPLE_CONFIG = """
a:
stuff: 1
unicode: ( ͡° ͜ʖ ͡°)
"""
SAMPLE_COMMENTED_CONFIG="""
# Header comment
a: 1
b:
- one
- two # Comment
# commented: 123
unicode: ( ͡° ͜ʖ ͡°)
"""[1:]
class ConfigTest(unittest.TestCase):
def test_simple(self):
c = config.StringConfiguration(SAMPLE_CONFIG)
a = c.root.add_group("a")
stuff = a.register("stuff", default=2)
self.assertEqual(stuff(), 1)
def test_unicode(self):
c = config.StringConfiguration(SAMPLE_CONFIG)
a = c.root.add_group("a")
stuff = a.register("unicode", default=2)
self.assertEqual(stuff(), "( ͡° ͜ʖ ͡°)")
def test_default(self):
c = config.StringConfiguration("")
a = c.root.add_group("a")
stuff = a.register("stuff", default=2)
self.assertEqual(stuff(), 2)
def test_default_dump(self):
c = config.StringConfiguration("")
a = c.root.add_group("a")
a.register("stuff", default=2)
self.assertEqual("a:\n stuff: 2 # Default value\n", c.dumps())
def test_invalid_dump(self):
c = config.StringConfiguration("")
a = c.root.add_group("a")
with self.assertRaises(config.InvalidConfig):
a.register("stuff")
self.assertEqual("a:\n stuff: # Required value\n", c.dumps())
def test_simple_change(self):
c = config.StringConfiguration(SAMPLE_CONFIG)
a = c.root.add_group("a")
stuff = a.register("stuff", default=2)
self.assertEqual(stuff(), 1)
c._data['a']['stuff'] = 3
self.assertEqual(stuff(), 3)
def test_class(self):
class A:
def __init__(self, config):
self.stuff = config.register("stuff")
c = config.StringConfiguration(SAMPLE_CONFIG)
a_group = c.root.add_group(utils.snakify(A.__name__))
a = A(a_group)
self.assertEqual(a.stuff(), 1)
c._data['a']['stuff'] = 3
self.assertEqual(a.stuff(), 3)
class FileConfigurationTest(unittest.TestCase):
def setUp(self):
self.tmpfile = tempfile.mktemp()
def tearDown(self):
os.remove(self.tmpfile)
def test_roundtrip(self):
with open(self.tmpfile, 'w', encoding="utf8") as f:
f.write(SAMPLE_COMMENTED_CONFIG)
fconf = config.FileConfiguration(self.tmpfile)
fconf.load()
fconf.save()
with open(self.tmpfile, encoding="utf8") as f:
self.assertEqual(SAMPLE_COMMENTED_CONFIG, f.read())
|
[
"os.remove",
"dango.utils.snakify",
"dango.config.register",
"dango.config.FileConfiguration",
"tempfile.mktemp",
"dango.config.StringConfiguration"
] |
[((375, 416), 'dango.config.StringConfiguration', 'config.StringConfiguration', (['SAMPLE_CONFIG'], {}), '(SAMPLE_CONFIG)\n', (401, 416), False, 'from dango import config\n'), ((577, 618), 'dango.config.StringConfiguration', 'config.StringConfiguration', (['SAMPLE_CONFIG'], {}), '(SAMPLE_CONFIG)\n', (603, 618), False, 'from dango import config\n'), ((793, 823), 'dango.config.StringConfiguration', 'config.StringConfiguration', (['""""""'], {}), "('')\n", (819, 823), False, 'from dango import config\n'), ((989, 1019), 'dango.config.StringConfiguration', 'config.StringConfiguration', (['""""""'], {}), "('')\n", (1015, 1019), False, 'from dango import config\n'), ((1213, 1243), 'dango.config.StringConfiguration', 'config.StringConfiguration', (['""""""'], {}), "('')\n", (1239, 1243), False, 'from dango import config\n'), ((1484, 1525), 'dango.config.StringConfiguration', 'config.StringConfiguration', (['SAMPLE_CONFIG'], {}), '(SAMPLE_CONFIG)\n', (1510, 1525), False, 'from dango import config\n'), ((1869, 1910), 'dango.config.StringConfiguration', 'config.StringConfiguration', (['SAMPLE_CONFIG'], {}), '(SAMPLE_CONFIG)\n', (1895, 1910), False, 'from dango import config\n'), ((2207, 2224), 'tempfile.mktemp', 'tempfile.mktemp', ([], {}), '()\n', (2222, 2224), False, 'import tempfile\n'), ((2258, 2281), 'os.remove', 'os.remove', (['self.tmpfile'], {}), '(self.tmpfile)\n', (2267, 2281), False, 'import os\n'), ((2435, 2473), 'dango.config.FileConfiguration', 'config.FileConfiguration', (['self.tmpfile'], {}), '(self.tmpfile)\n', (2459, 2473), False, 'from dango import config\n'), ((1946, 1971), 'dango.utils.snakify', 'utils.snakify', (['A.__name__'], {}), '(A.__name__)\n', (1959, 1971), False, 'from dango import utils\n'), ((1831, 1855), 'dango.config.register', 'config.register', (['"""stuff"""'], {}), "('stuff')\n", (1846, 1855), False, 'from dango import config\n')]
|
from easydict import EasyDict as edict
import gflags
import tabulate
gflags.DEFINE_string('assets_dir', '', '')
gflags.DEFINE_string('dataset_dir', '', '')
gflags.DEFINE_string('maps', '', 'Comma separated map names')
gflags.DEFINE_string('model_spec', '', '')
gflags.DEFINE_string('model_file', '', '')
gflags.DEFINE_boolean('resume', False, 'Resume training.')
gflags.DEFINE_float('dmin', None, '')
gflags.DEFINE_float('dmax', None, '')
gflags.DEFINE_float('overlap_ratio', 0.3, '')
gflags.DEFINE_boolean('normalize_wp', False, '')
gflags.DEFINE_boolean('use_gt_wp', True, '')
gflags.DEFINE_boolean('check_wp', False, '')
gflags.DEFINE_boolean('jitter', False, 'Jitter agent initial position and heading')
gflags.DEFINE_boolean('proximity_label', False, 'True if model requires proximity labels')
gflags.DEFINE_boolean('heading_diff_label', False, 'True if model requires heading_diff labels')
gflags.DEFINE_integer('resolution', 64, 'Image resolution')
gflags.DEFINE_float('camera_x', 0.065, 'Camera x position in agent local coord system')
gflags.DEFINE_float('camera_y', 0.0, 'Camera y position in agent local coord system')
gflags.DEFINE_float('camera_z', 1.0, 'Camera z position in agent local coord system')
gflags.DEFINE_float('hfov', 118.6, '')
gflags.DEFINE_float('vfov', 106.9, '')
gflags.DEFINE_integer('batch_size', 64, '')
gflags.DEFINE_integer('samples_per_epoch', 1000000, '')
gflags.DEFINE_integer('max_epochs', 30, '')
gflags.DEFINE_integer('lr_decay_epoch', 100, '')
gflags.DEFINE_float('lr_decay_rate', 0.7, '')
gflags.DEFINE_integer('n_dataset_worker', 2, '')
gflags.DEFINE_string('train_device', 'cuda', '')
gflags.DEFINE_integer('log_interval', 10, '')
gflags.DEFINE_integer('save_interval', 100, '')
gflags.DEFINE_string('persistent_server_cfg', '../gibson_persistent_servers_cfg.yaml', '')
gflags.DEFINE_boolean('trial', False, 'True to enable trial run (smaller datasets).')
gflags.DEFINE_string('visdom_env', 'main', '')
gflags.DEFINE_string('visdom_server', 'http://localhost', '')
gflags.DEFINE_integer('visdom_port', 5001, '')
gflags.DEFINE_integer('vis_interval', 100, '')
gflags.DEFINE_string('set', '', 'A comma separated assignment string that overwrites flag values.')
defaults = {}
def helper(d):
ret = {}
for k, v in d.items():
if isinstance(v, dict):
ret[k] = helper(v)
else:
ret[k] = v
return tabulate.tabulate(ret.items())
# The global flag tree. Tree can be augmented by loading external config files.
g = edict(defaults)
# Since g's values can be accessed as attributes, we cannot add additional methods to it.
# Use the following methods to manipulate g.
def fill(args):
for key in args.keys():
g[key] = args[key].value
def set_s(set_str):
ss = set_str.split(',')
for s in ss:
if s == '':
continue
field, value = s.split('=')
try:
value = eval(value, {'__builtins__': None})
except:
# Cannot convert the value. Treat it as it is.
pass
attrs = field.split('.')
node = g
for i in range(len(attrs) -1):
node = node[attrs[i]]
node[attrs[-1]] = value
def repr(fmt='plain'):
def helper(d):
ret = {}
for k, v in d.items():
if isinstance(v, dict):
ret[k] = helper(v)
else:
ret[k] = v
return tabulate.tabulate(ret.items(), tablefmt=fmt)
return helper(g)
|
[
"gflags.DEFINE_string",
"gflags.DEFINE_boolean",
"gflags.DEFINE_float",
"easydict.EasyDict",
"gflags.DEFINE_integer"
] |
[((71, 113), 'gflags.DEFINE_string', 'gflags.DEFINE_string', (['"""assets_dir"""', '""""""', '""""""'], {}), "('assets_dir', '', '')\n", (91, 113), False, 'import gflags\n'), ((114, 157), 'gflags.DEFINE_string', 'gflags.DEFINE_string', (['"""dataset_dir"""', '""""""', '""""""'], {}), "('dataset_dir', '', '')\n", (134, 157), False, 'import gflags\n'), ((158, 219), 'gflags.DEFINE_string', 'gflags.DEFINE_string', (['"""maps"""', '""""""', '"""Comma separated map names"""'], {}), "('maps', '', 'Comma separated map names')\n", (178, 219), False, 'import gflags\n'), ((220, 262), 'gflags.DEFINE_string', 'gflags.DEFINE_string', (['"""model_spec"""', '""""""', '""""""'], {}), "('model_spec', '', '')\n", (240, 262), False, 'import gflags\n'), ((263, 305), 'gflags.DEFINE_string', 'gflags.DEFINE_string', (['"""model_file"""', '""""""', '""""""'], {}), "('model_file', '', '')\n", (283, 305), False, 'import gflags\n'), ((306, 364), 'gflags.DEFINE_boolean', 'gflags.DEFINE_boolean', (['"""resume"""', '(False)', '"""Resume training."""'], {}), "('resume', False, 'Resume training.')\n", (327, 364), False, 'import gflags\n'), ((365, 402), 'gflags.DEFINE_float', 'gflags.DEFINE_float', (['"""dmin"""', 'None', '""""""'], {}), "('dmin', None, '')\n", (384, 402), False, 'import gflags\n'), ((403, 440), 'gflags.DEFINE_float', 'gflags.DEFINE_float', (['"""dmax"""', 'None', '""""""'], {}), "('dmax', None, '')\n", (422, 440), False, 'import gflags\n'), ((441, 486), 'gflags.DEFINE_float', 'gflags.DEFINE_float', (['"""overlap_ratio"""', '(0.3)', '""""""'], {}), "('overlap_ratio', 0.3, '')\n", (460, 486), False, 'import gflags\n'), ((487, 535), 'gflags.DEFINE_boolean', 'gflags.DEFINE_boolean', (['"""normalize_wp"""', '(False)', '""""""'], {}), "('normalize_wp', False, '')\n", (508, 535), False, 'import gflags\n'), ((536, 580), 'gflags.DEFINE_boolean', 'gflags.DEFINE_boolean', (['"""use_gt_wp"""', '(True)', '""""""'], {}), "('use_gt_wp', True, '')\n", (557, 580), False, 'import gflags\n'), ((581, 625), 'gflags.DEFINE_boolean', 'gflags.DEFINE_boolean', (['"""check_wp"""', '(False)', '""""""'], {}), "('check_wp', False, '')\n", (602, 625), False, 'import gflags\n'), ((626, 713), 'gflags.DEFINE_boolean', 'gflags.DEFINE_boolean', (['"""jitter"""', '(False)', '"""Jitter agent initial position and heading"""'], {}), "('jitter', False,\n 'Jitter agent initial position and heading')\n", (647, 713), False, 'import gflags\n'), ((710, 804), 'gflags.DEFINE_boolean', 'gflags.DEFINE_boolean', (['"""proximity_label"""', '(False)', '"""True if model requires proximity labels"""'], {}), "('proximity_label', False,\n 'True if model requires proximity labels')\n", (731, 804), False, 'import gflags\n'), ((801, 901), 'gflags.DEFINE_boolean', 'gflags.DEFINE_boolean', (['"""heading_diff_label"""', '(False)', '"""True if model requires heading_diff labels"""'], {}), "('heading_diff_label', False,\n 'True if model requires heading_diff labels')\n", (822, 901), False, 'import gflags\n'), ((898, 957), 'gflags.DEFINE_integer', 'gflags.DEFINE_integer', (['"""resolution"""', '(64)', '"""Image resolution"""'], {}), "('resolution', 64, 'Image resolution')\n", (919, 957), False, 'import gflags\n'), ((958, 1049), 'gflags.DEFINE_float', 'gflags.DEFINE_float', (['"""camera_x"""', '(0.065)', '"""Camera x position in agent local coord system"""'], {}), "('camera_x', 0.065,\n 'Camera x position in agent local coord system')\n", (977, 1049), False, 'import gflags\n'), ((1046, 1135), 'gflags.DEFINE_float', 'gflags.DEFINE_float', (['"""camera_y"""', '(0.0)', '"""Camera y position in agent local coord system"""'], {}), "('camera_y', 0.0,\n 'Camera y position in agent local coord system')\n", (1065, 1135), False, 'import gflags\n'), ((1132, 1221), 'gflags.DEFINE_float', 'gflags.DEFINE_float', (['"""camera_z"""', '(1.0)', '"""Camera z position in agent local coord system"""'], {}), "('camera_z', 1.0,\n 'Camera z position in agent local coord system')\n", (1151, 1221), False, 'import gflags\n'), ((1218, 1256), 'gflags.DEFINE_float', 'gflags.DEFINE_float', (['"""hfov"""', '(118.6)', '""""""'], {}), "('hfov', 118.6, '')\n", (1237, 1256), False, 'import gflags\n'), ((1257, 1295), 'gflags.DEFINE_float', 'gflags.DEFINE_float', (['"""vfov"""', '(106.9)', '""""""'], {}), "('vfov', 106.9, '')\n", (1276, 1295), False, 'import gflags\n'), ((1296, 1339), 'gflags.DEFINE_integer', 'gflags.DEFINE_integer', (['"""batch_size"""', '(64)', '""""""'], {}), "('batch_size', 64, '')\n", (1317, 1339), False, 'import gflags\n'), ((1340, 1395), 'gflags.DEFINE_integer', 'gflags.DEFINE_integer', (['"""samples_per_epoch"""', '(1000000)', '""""""'], {}), "('samples_per_epoch', 1000000, '')\n", (1361, 1395), False, 'import gflags\n'), ((1396, 1439), 'gflags.DEFINE_integer', 'gflags.DEFINE_integer', (['"""max_epochs"""', '(30)', '""""""'], {}), "('max_epochs', 30, '')\n", (1417, 1439), False, 'import gflags\n'), ((1440, 1488), 'gflags.DEFINE_integer', 'gflags.DEFINE_integer', (['"""lr_decay_epoch"""', '(100)', '""""""'], {}), "('lr_decay_epoch', 100, '')\n", (1461, 1488), False, 'import gflags\n'), ((1489, 1534), 'gflags.DEFINE_float', 'gflags.DEFINE_float', (['"""lr_decay_rate"""', '(0.7)', '""""""'], {}), "('lr_decay_rate', 0.7, '')\n", (1508, 1534), False, 'import gflags\n'), ((1535, 1583), 'gflags.DEFINE_integer', 'gflags.DEFINE_integer', (['"""n_dataset_worker"""', '(2)', '""""""'], {}), "('n_dataset_worker', 2, '')\n", (1556, 1583), False, 'import gflags\n'), ((1584, 1632), 'gflags.DEFINE_string', 'gflags.DEFINE_string', (['"""train_device"""', '"""cuda"""', '""""""'], {}), "('train_device', 'cuda', '')\n", (1604, 1632), False, 'import gflags\n'), ((1633, 1678), 'gflags.DEFINE_integer', 'gflags.DEFINE_integer', (['"""log_interval"""', '(10)', '""""""'], {}), "('log_interval', 10, '')\n", (1654, 1678), False, 'import gflags\n'), ((1679, 1726), 'gflags.DEFINE_integer', 'gflags.DEFINE_integer', (['"""save_interval"""', '(100)', '""""""'], {}), "('save_interval', 100, '')\n", (1700, 1726), False, 'import gflags\n'), ((1727, 1821), 'gflags.DEFINE_string', 'gflags.DEFINE_string', (['"""persistent_server_cfg"""', '"""../gibson_persistent_servers_cfg.yaml"""', '""""""'], {}), "('persistent_server_cfg',\n '../gibson_persistent_servers_cfg.yaml', '')\n", (1747, 1821), False, 'import gflags\n'), ((1818, 1907), 'gflags.DEFINE_boolean', 'gflags.DEFINE_boolean', (['"""trial"""', '(False)', '"""True to enable trial run (smaller datasets)."""'], {}), "('trial', False,\n 'True to enable trial run (smaller datasets).')\n", (1839, 1907), False, 'import gflags\n'), ((1904, 1950), 'gflags.DEFINE_string', 'gflags.DEFINE_string', (['"""visdom_env"""', '"""main"""', '""""""'], {}), "('visdom_env', 'main', '')\n", (1924, 1950), False, 'import gflags\n'), ((1951, 2012), 'gflags.DEFINE_string', 'gflags.DEFINE_string', (['"""visdom_server"""', '"""http://localhost"""', '""""""'], {}), "('visdom_server', 'http://localhost', '')\n", (1971, 2012), False, 'import gflags\n'), ((2013, 2059), 'gflags.DEFINE_integer', 'gflags.DEFINE_integer', (['"""visdom_port"""', '(5001)', '""""""'], {}), "('visdom_port', 5001, '')\n", (2034, 2059), False, 'import gflags\n'), ((2060, 2106), 'gflags.DEFINE_integer', 'gflags.DEFINE_integer', (['"""vis_interval"""', '(100)', '""""""'], {}), "('vis_interval', 100, '')\n", (2081, 2106), False, 'import gflags\n'), ((2107, 2210), 'gflags.DEFINE_string', 'gflags.DEFINE_string', (['"""set"""', '""""""', '"""A comma separated assignment string that overwrites flag values."""'], {}), "('set', '',\n 'A comma separated assignment string that overwrites flag values.')\n", (2127, 2210), False, 'import gflags\n'), ((2508, 2523), 'easydict.EasyDict', 'edict', (['defaults'], {}), '(defaults)\n', (2513, 2523), True, 'from easydict import EasyDict as edict\n')]
|
import argparse
import sys
sys.path.insert(0, '../../../../common/')
import PipelineImporter as PipelineImporter
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--continent', help='Continent of the file')
parser.add_argument('--country', help='Country of the file')
parser.add_argument('--country_region', help='Country region of the file')
args = parser.parse_args()
PipelineImporter.start(args)
|
[
"PipelineImporter.start",
"sys.path.insert",
"argparse.ArgumentParser"
] |
[((28, 69), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""../../../../common/"""'], {}), "(0, '../../../../common/')\n", (43, 69), False, 'import sys\n'), ((156, 181), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (179, 181), False, 'import argparse\n'), ((431, 459), 'PipelineImporter.start', 'PipelineImporter.start', (['args'], {}), '(args)\n', (453, 459), True, 'import PipelineImporter as PipelineImporter\n')]
|
from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, \
chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, \
roots, sympify, S
x = Symbol('x')
def test_legendre():
assert legendre(0, x) == 1
assert legendre(1, x) == x
assert legendre(2, x) == ((3*x**2-1)/2).expand()
assert legendre(3, x) == ((5*x**3-3*x)/2).expand()
assert legendre(4, x) == ((35*x**4-30*x**2+3)/8).expand()
assert legendre(5, x) == ((63*x**5-70*x**3+15*x)/8).expand()
assert legendre(6, x) == ((231*x**6-315*x**4+105*x**2-5)/16).expand()
assert legendre(10, -1) == 1
assert legendre(11, -1) == -1
assert legendre(10, 1) == 1
assert legendre(11, 1) == 1
assert legendre(10, 0) != 0
assert legendre(11, 0) == 0
assert roots(legendre(4,x), x) == {
(Rational(3, 7) - Rational(2, 35)*30**S.Half)**S.Half: 1,
-(Rational(3, 7) - Rational(2, 35)*30**S.Half)**S.Half: 1,
(Rational(3, 7) + Rational(2, 35)*30**S.Half)**S.Half: 1,
-(Rational(3, 7) + Rational(2, 35)*30**S.Half)**S.Half: 1,
}
def test_assoc_legendre():
Plm=assoc_legendre
Q=(1-x**2)**Rational(1,2)
assert Plm(0, 0, x) == 1
assert Plm(1, 0, x) == x
assert Plm(1, 1, x) == -Q
assert Plm(2, 0, x) == (3*x**2-1)/2
assert Plm(2, 1, x) == -3*x*Q
assert Plm(2, 2, x) == 3*Q**2
assert Plm(3, 0, x) == (5*x**3-3*x)/2
assert Plm(3, 1, x).expand() == (( 3*(1-5*x**2)/2 ).expand() * Q).expand()
assert Plm(3, 2, x) == 15*x * Q**2
assert Plm(3, 3, x) == -15 * Q**3
# negative m
assert Plm(1,-1, x) == -Plm(1, 1, x)/2
assert Plm(2,-2, x) == Plm(2, 2, x)/24
assert Plm(2,-1, x) == -Plm(2, 1, x)/6
assert Plm(3,-3, x) == -Plm(3, 3, x)/720
assert Plm(3,-2, x) == Plm(3, 2, x)/120
assert Plm(3,-1, x) == -Plm(3, 1, x)/12
def test_chebyshev():
assert chebyshevt(0, x) == 1
assert chebyshevt(1, x) == x
assert chebyshevt(2, x) == 2*x**2-1
assert chebyshevt(3, x) == 4*x**3-3*x
for n in range(1, 4):
for k in range(n):
z = chebyshevt_root(n, k)
assert chebyshevt(n, z) == 0
for n in range(1, 4):
for k in range(n):
z = chebyshevu_root(n, k)
assert chebyshevu(n, z) == 0
def test_hermite():
assert hermite(6, x) == 64*x**6 - 480*x**4 + 720*x**2 - 120
|
[
"sympy.Symbol",
"sympy.Rational",
"sympy.chebyshevu_root",
"sympy.hermite",
"sympy.legendre",
"sympy.chebyshevu",
"sympy.chebyshevt",
"sympy.chebyshevt_root"
] |
[((173, 184), 'sympy.Symbol', 'Symbol', (['"""x"""'], {}), "('x')\n", (179, 184), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((218, 232), 'sympy.legendre', 'legendre', (['(0)', 'x'], {}), '(0, x)\n', (226, 232), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((249, 263), 'sympy.legendre', 'legendre', (['(1)', 'x'], {}), '(1, x)\n', (257, 263), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((280, 294), 'sympy.legendre', 'legendre', (['(2)', 'x'], {}), '(2, x)\n', (288, 294), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((333, 347), 'sympy.legendre', 'legendre', (['(3)', 'x'], {}), '(3, x)\n', (341, 347), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((388, 402), 'sympy.legendre', 'legendre', (['(4)', 'x'], {}), '(4, x)\n', (396, 402), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((450, 464), 'sympy.legendre', 'legendre', (['(5)', 'x'], {}), '(5, x)\n', (458, 464), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((515, 529), 'sympy.legendre', 'legendre', (['(6)', 'x'], {}), '(6, x)\n', (523, 529), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((590, 606), 'sympy.legendre', 'legendre', (['(10)', '(-1)'], {}), '(10, -1)\n', (598, 606), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((623, 639), 'sympy.legendre', 'legendre', (['(11)', '(-1)'], {}), '(11, -1)\n', (631, 639), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((657, 672), 'sympy.legendre', 'legendre', (['(10)', '(1)'], {}), '(10, 1)\n', (665, 672), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((689, 704), 'sympy.legendre', 'legendre', (['(11)', '(1)'], {}), '(11, 1)\n', (697, 704), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((721, 736), 'sympy.legendre', 'legendre', (['(10)', '(0)'], {}), '(10, 0)\n', (729, 736), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((753, 768), 'sympy.legendre', 'legendre', (['(11)', '(0)'], {}), '(11, 0)\n', (761, 768), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((1156, 1170), 'sympy.Rational', 'Rational', (['(1)', '(2)'], {}), '(1, 2)\n', (1164, 1170), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((1889, 1905), 'sympy.chebyshevt', 'chebyshevt', (['(0)', 'x'], {}), '(0, x)\n', (1899, 1905), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((1922, 1938), 'sympy.chebyshevt', 'chebyshevt', (['(1)', 'x'], {}), '(1, x)\n', (1932, 1938), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((1955, 1971), 'sympy.chebyshevt', 'chebyshevt', (['(2)', 'x'], {}), '(2, x)\n', (1965, 1971), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((1995, 2011), 'sympy.chebyshevt', 'chebyshevt', (['(3)', 'x'], {}), '(3, x)\n', (2005, 2011), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((2322, 2335), 'sympy.hermite', 'hermite', (['(6)', 'x'], {}), '(6, x)\n', (2329, 2335), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((792, 806), 'sympy.legendre', 'legendre', (['(4)', 'x'], {}), '(4, x)\n', (800, 806), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((2095, 2116), 'sympy.chebyshevt_root', 'chebyshevt_root', (['n', 'k'], {}), '(n, k)\n', (2110, 2116), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((2227, 2248), 'sympy.chebyshevu_root', 'chebyshevu_root', (['n', 'k'], {}), '(n, k)\n', (2242, 2248), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((2136, 2152), 'sympy.chebyshevt', 'chebyshevt', (['n', 'z'], {}), '(n, z)\n', (2146, 2152), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((2268, 2284), 'sympy.chebyshevu', 'chebyshevu', (['n', 'z'], {}), '(n, z)\n', (2278, 2284), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((825, 839), 'sympy.Rational', 'Rational', (['(3)', '(7)'], {}), '(3, 7)\n', (833, 839), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((959, 973), 'sympy.Rational', 'Rational', (['(3)', '(7)'], {}), '(3, 7)\n', (967, 973), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((842, 857), 'sympy.Rational', 'Rational', (['(2)', '(35)'], {}), '(2, 35)\n', (850, 857), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((892, 906), 'sympy.Rational', 'Rational', (['(3)', '(7)'], {}), '(3, 7)\n', (900, 906), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((976, 991), 'sympy.Rational', 'Rational', (['(2)', '(35)'], {}), '(2, 35)\n', (984, 991), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((1026, 1040), 'sympy.Rational', 'Rational', (['(3)', '(7)'], {}), '(3, 7)\n', (1034, 1040), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((909, 924), 'sympy.Rational', 'Rational', (['(2)', '(35)'], {}), '(2, 35)\n', (917, 924), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n'), ((1043, 1058), 'sympy.Rational', 'Rational', (['(2)', '(35)'], {}), '(2, 35)\n', (1051, 1058), False, 'from sympy import legendre, Symbol, hermite, chebyshevu, chebyshevt, chebyshevt_root, chebyshevu_root, assoc_legendre, Rational, roots, sympify, S\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_commandline
----------------------------------
Tests for `orpheum` module.
"""
from click.testing import CliRunner
def test_cli():
from orpheum.commandline import cli
runner = CliRunner()
result = runner.invoke(cli)
assert result.exit_code == 0
def test_cli_short_help():
from orpheum.commandline import cli
runner = CliRunner()
result = runner.invoke(cli, ["-h"])
assert result.exit_code == 0
def test_cli_long_help():
from orpheum.commandline import cli
runner = CliRunner()
result = runner.invoke(cli, ["--help"])
assert result.exit_code == 0
|
[
"click.testing.CliRunner"
] |
[((244, 255), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (253, 255), False, 'from click.testing import CliRunner\n'), ((405, 416), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (414, 416), False, 'from click.testing import CliRunner\n'), ((573, 584), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (582, 584), False, 'from click.testing import CliRunner\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Fri Apr 16 13:12:07 2021
@author: <NAME>
"""
import warnings
warnings.filterwarnings("ignore")
import time
import unittest
import numpy as np
from smoot.smoot import MOO
from smoot.zdt import ZDT
from smt.sampling_methods import LHS
from smt.problems import Branin
from smt.utils.sm_test_case import SMTestCase
from pymoo.factory import get_performance_indicator
class TestMOO(SMTestCase):
def test_Branin(self):
n_iter = 10
fun = Branin()
criterion = "EI"
mo = MOO(
n_iter=n_iter,
criterion=criterion,
xlimits=fun.xlimits,
random_state=42,
)
print("running test Branin 2D -> 1D")
start = time.time()
mo.optimize(fun=fun)
x_opt, y_opt = mo.result.X[0][0], mo.result.F[0][0]
print("x_opt :", x_opt)
print("y_opt :", y_opt)
print("seconds taken Branin: ", time.time() - start, "\n")
self.assertTrue(
np.allclose([[-3.14, 12.275]], x_opt, rtol=0.2)
or np.allclose([[3.14, 2.275]], x_opt, rtol=0.2)
or np.allclose([[9.42, 2.475]], x_opt, rtol=0.2)
)
self.assertAlmostEqual(0.39, float(y_opt), delta=1)
def test_zdt(self, type=1, criterion="EHVI", ndim=2, n_iter=10):
fun = ZDT(type=type, ndim=ndim)
mo = MOO(
n_iter=n_iter,
criterion=criterion,
random_state=1,
)
print("running test ZDT", type, ": " + str(ndim) + "D -> 2D,", criterion)
start = time.time()
mo.optimize(fun=fun)
print("seconds taken :", time.time() - start)
exact = fun.pareto(random_state=1)[1]
gd = get_performance_indicator("gd", exact)
dist = gd.calc(mo.result.F)
print("distance to the exact Pareto front", dist, "\n")
self.assertLess(dist, 1)
def test_zdt_2(self):
self.test_zdt(type=2, criterion="WB2S")
def test_zdt_3(self):
self.test_zdt(type=3, criterion="PI", n_iter=20)
def test_zdt_2_3Dto2D(self):
self.test_zdt(type=2, criterion="EHVI", ndim=3)
def test_train_pts_known(self):
fun = ZDT()
xlimits = fun.xlimits
sampling = LHS(xlimits=xlimits, random_state=42)
xt = sampling(20) # generating data as if it were known data
yt = fun(xt) # idem : "known" datapoint for training
mo = MOO(n_iter=10, criterion="MPI", xdoe=xt, ydoe=yt, random_state=42)
print("running test ZDT with known training points")
start = time.time()
mo.optimize(fun=fun)
print("seconds taken :", time.time() - start)
exact = fun.pareto(random_state=1)[1]
gd = get_performance_indicator("gd", exact)
dist = gd.calc(mo.result.F)
print("distance to the exact Pareto front", dist, "\n")
self.assertLess(dist, 1)
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"warnings.filterwarnings",
"pymoo.factory.get_performance_indicator",
"smt.problems.Branin",
"numpy.allclose",
"smoot.zdt.ZDT",
"time.time",
"smt.sampling_methods.LHS",
"smoot.smoot.MOO"
] |
[((102, 135), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (125, 135), False, 'import warnings\n'), ((2946, 2961), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2959, 2961), False, 'import unittest\n'), ((498, 506), 'smt.problems.Branin', 'Branin', ([], {}), '()\n', (504, 506), False, 'from smt.problems import Branin\n'), ((546, 623), 'smoot.smoot.MOO', 'MOO', ([], {'n_iter': 'n_iter', 'criterion': 'criterion', 'xlimits': 'fun.xlimits', 'random_state': '(42)'}), '(n_iter=n_iter, criterion=criterion, xlimits=fun.xlimits, random_state=42)\n', (549, 623), False, 'from smoot.smoot import MOO\n'), ((745, 756), 'time.time', 'time.time', ([], {}), '()\n', (754, 756), False, 'import time\n'), ((1338, 1363), 'smoot.zdt.ZDT', 'ZDT', ([], {'type': 'type', 'ndim': 'ndim'}), '(type=type, ndim=ndim)\n', (1341, 1363), False, 'from smoot.zdt import ZDT\n'), ((1378, 1433), 'smoot.smoot.MOO', 'MOO', ([], {'n_iter': 'n_iter', 'criterion': 'criterion', 'random_state': '(1)'}), '(n_iter=n_iter, criterion=criterion, random_state=1)\n', (1381, 1433), False, 'from smoot.smoot import MOO\n'), ((1579, 1590), 'time.time', 'time.time', ([], {}), '()\n', (1588, 1590), False, 'import time\n'), ((1733, 1771), 'pymoo.factory.get_performance_indicator', 'get_performance_indicator', (['"""gd"""', 'exact'], {}), "('gd', exact)\n", (1758, 1771), False, 'from pymoo.factory import get_performance_indicator\n'), ((2205, 2210), 'smoot.zdt.ZDT', 'ZDT', ([], {}), '()\n', (2208, 2210), False, 'from smoot.zdt import ZDT\n'), ((2260, 2297), 'smt.sampling_methods.LHS', 'LHS', ([], {'xlimits': 'xlimits', 'random_state': '(42)'}), '(xlimits=xlimits, random_state=42)\n', (2263, 2297), False, 'from smt.sampling_methods import LHS\n'), ((2443, 2509), 'smoot.smoot.MOO', 'MOO', ([], {'n_iter': '(10)', 'criterion': '"""MPI"""', 'xdoe': 'xt', 'ydoe': 'yt', 'random_state': '(42)'}), "(n_iter=10, criterion='MPI', xdoe=xt, ydoe=yt, random_state=42)\n", (2446, 2509), False, 'from smoot.smoot import MOO\n'), ((2587, 2598), 'time.time', 'time.time', ([], {}), '()\n', (2596, 2598), False, 'import time\n'), ((2741, 2779), 'pymoo.factory.get_performance_indicator', 'get_performance_indicator', (['"""gd"""', 'exact'], {}), "('gd', exact)\n", (2766, 2779), False, 'from pymoo.factory import get_performance_indicator\n'), ((950, 961), 'time.time', 'time.time', ([], {}), '()\n', (959, 961), False, 'import time\n'), ((1014, 1061), 'numpy.allclose', 'np.allclose', (['[[-3.14, 12.275]]', 'x_opt'], {'rtol': '(0.2)'}), '([[-3.14, 12.275]], x_opt, rtol=0.2)\n', (1025, 1061), True, 'import numpy as np\n'), ((1077, 1122), 'numpy.allclose', 'np.allclose', (['[[3.14, 2.275]]', 'x_opt'], {'rtol': '(0.2)'}), '([[3.14, 2.275]], x_opt, rtol=0.2)\n', (1088, 1122), True, 'import numpy as np\n'), ((1138, 1183), 'numpy.allclose', 'np.allclose', (['[[9.42, 2.475]]', 'x_opt'], {'rtol': '(0.2)'}), '([[9.42, 2.475]], x_opt, rtol=0.2)\n', (1149, 1183), True, 'import numpy as np\n'), ((1653, 1664), 'time.time', 'time.time', ([], {}), '()\n', (1662, 1664), False, 'import time\n'), ((2661, 2672), 'time.time', 'time.time', ([], {}), '()\n', (2670, 2672), False, 'import time\n')]
|
# Copyright 2020 QuantumBlack Visual Analytics Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND
# NONINFRINGEMENT. IN NO EVENT WILL THE LICENSOR OR OTHER CONTRIBUTORS
# BE LIABLE FOR ANY CLAIM, DAMAGES, OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF, OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# The QuantumBlack Visual Analytics Limited ("QuantumBlack") name and logo
# (either separately or in combination, "QuantumBlack Trademarks") are
# trademarks of QuantumBlack. The License does not grant you any right or
# license to the QuantumBlack Trademarks. You may not use the QuantumBlack
# Trademarks or any confusingly similar mark as a trademark for your product,
# or use the QuantumBlack Trademarks in any other manner that might cause
# confusion in the marketplace, including but not limited to in advertising,
# on websites, or on software.
#
# See the License for the specific language governing permissions and
# limitations under the License.
from contextlib import contextmanager
import pandas as pd
import pytest
from pandas.util.testing import assert_frame_equal
from kedro.contrib.io.gcs.csv_gcs import CSVGCSDataSet
from kedro.io import DataSetError, Version
from kedro.io.core import generate_timestamp
from . import gcs_mocks
FILENAME = "test.csv"
BUCKET_NAME = "testbucketkedro"
GCP_PROJECT = "testproject"
class MockGCSFileSystem(gcs_mocks.BasicGCSFileSystemMock):
@contextmanager
def open(self, filepath, *args, **kwargs):
gcs_file = self.files.get(filepath)
if not gcs_file:
gcs_file = gcs_mocks.MockGCSFile()
self.files[filepath] = gcs_file
yield gcs_file
@pytest.fixture
def dummy_dataframe():
return pd.DataFrame({"col1": [1, 2], "col2": [4, 5], "col3": [5, 6]})
@pytest.fixture(params=[None])
def load_args(request):
return request.param
@pytest.fixture(params=[{"index": False}])
def save_args(request):
return request.param
@pytest.fixture(params=[None])
def load_version(request):
return request.param
@pytest.fixture(params=[None])
def save_version(request):
return request.param or generate_timestamp()
@pytest.fixture
def mock_gcs_filesystem(mocker):
mocked = mocker.patch("gcsfs.GCSFileSystem", autospec=True)
mocked.return_value = MockGCSFileSystem()
return mocked
@pytest.fixture
def gcs_data_set(
load_args, save_args, mock_gcs_filesystem
): # pylint: disable=unused-argument
return CSVGCSDataSet(
filepath=FILENAME,
bucket_name=BUCKET_NAME,
credentials=None,
load_args=load_args,
save_args=save_args,
project=GCP_PROJECT,
)
class TestJSONGCSDataSet:
def test_credentials_propagated(self, mocker):
"""Test propagating credentials for connecting to GCS"""
mock_gcs = mocker.patch("gcsfs.GCSFileSystem")
credentials = {"client_email": "<EMAIL>", "whatever": "useless"}
CSVGCSDataSet(
filepath=FILENAME,
bucket_name=BUCKET_NAME,
project=GCP_PROJECT,
credentials=credentials,
)
mock_gcs.assert_called_once_with(project=GCP_PROJECT, token=credentials)
@pytest.mark.usefixtures("mock_gcs_filesystem")
def test_non_existent_bucket(self):
"""Test non-existent bucket"""
pattern = r"Failed while loading data from data set CSVGCSDataSet\(.+\)"
with pytest.raises(DataSetError, match=pattern):
CSVGCSDataSet(
filepath=FILENAME,
bucket_name="not-existing-bucket",
project=GCP_PROJECT,
credentials=None,
).load()
@pytest.mark.usefixtures("mock_gcs_filesystem")
def test_save_load_data(self, gcs_data_set, dummy_dataframe):
assert not gcs_data_set.exists()
gcs_data_set.save(dummy_dataframe)
loaded_data = gcs_data_set.load()
assert_frame_equal(dummy_dataframe, loaded_data)
@pytest.mark.usefixtures("mock_gcs_filesystem")
def test_save_and_load_with_protocol(self, dummy_dataframe, load_args, save_args):
"""Test loading the data from GCS using full path."""
gcs_data_set = CSVGCSDataSet(
filepath="gcs://{}/{}".format(BUCKET_NAME, FILENAME),
credentials=None,
load_args=load_args,
save_args=save_args,
project=GCP_PROJECT,
)
assert not gcs_data_set.exists()
gcs_data_set.save(dummy_dataframe)
loaded_data = gcs_data_set.load()
assert_frame_equal(loaded_data, dummy_dataframe)
assert str(gcs_data_set._filepath) == "{}/{}".format(BUCKET_NAME, FILENAME)
def test_exists(self, gcs_data_set, dummy_dataframe):
"""Test `exists` method invocation for both existing and
nonexistent data set."""
assert not gcs_data_set.exists()
gcs_data_set.save(dummy_dataframe)
assert gcs_data_set.exists()
@pytest.mark.parametrize(
"load_args", [{"k1": "v1", "index": "value"}], indirect=True
)
def test_load_extra_params(self, gcs_data_set, load_args):
"""Test overriding the default load arguments."""
for key, value in load_args.items():
assert gcs_data_set._load_args[key] == value
@pytest.mark.parametrize(
"save_args", [{"k1": "v1", "index": "value"}], indirect=True
)
def test_save_extra_params(self, gcs_data_set, save_args):
"""Test overriding the default save arguments."""
for key, value in save_args.items():
assert gcs_data_set._save_args[key] == value
@pytest.mark.parametrize("save_args", [{"option": "value"}], indirect=True)
def test_str_representation(self, gcs_data_set, save_args):
"""Test string representation of the data set instance."""
str_repr = str(gcs_data_set)
assert "CSVGCSDataSet" in str_repr
for k in save_args.keys():
assert k in str_repr
@pytest.mark.parametrize("load_args", [{"custom": 42}], indirect=True)
def test_load_args_propagated(self, gcs_data_set, load_args, mocker):
mock_read_csv = mocker.patch("kedro.contrib.io.gcs.csv_gcs.pd.read_csv")
gcs_data_set.load()
assert mock_read_csv.call_args_list[0][1] == load_args
@pytest.mark.parametrize("save_args", [{"custom": 45}], indirect=True)
def test_save_args_propagated(self, gcs_data_set, save_args, mocker):
mocked_df = mocker.MagicMock()
mocked_df.to_csv.return_value = "dumpedDF"
gcs_data_set.save(mocked_df)
mocked_df.to_csv.assert_called_once_with(**save_args)
@pytest.fixture
def versioned_gcs_data_set(
load_version,
save_version,
load_args,
save_args,
mock_gcs_filesystem, # pylint: disable=unused-argument
):
return CSVGCSDataSet(
bucket_name=BUCKET_NAME,
filepath=FILENAME,
credentials=None,
load_args=load_args,
save_args=save_args,
version=Version(load_version, save_version),
project=GCP_PROJECT,
)
class TestJSONGCSDataSetVersioned:
def test_no_versions(self, versioned_gcs_data_set):
"""Check the error if no versions are available for load."""
pattern = r"Did not find any versions for CSVGCSDataSet\(.+\)"
with pytest.raises(DataSetError, match=pattern):
versioned_gcs_data_set.load()
def test_save_and_load(self, versioned_gcs_data_set, dummy_dataframe):
"""Test that saved and reloaded data matches the original one for
the versioned data set."""
versioned_gcs_data_set.save(dummy_dataframe)
reloaded_df = versioned_gcs_data_set.load()
assert_frame_equal(dummy_dataframe, reloaded_df)
def test_prevent_override(self, versioned_gcs_data_set, dummy_dataframe):
"""Check the error when attempting to override the data set if the
corresponding dataframe object for a given save version already exists in GCS."""
versioned_gcs_data_set.save(dummy_dataframe)
pattern = (
r"Save path \`.+\` for CSVGCSDataSet\(.+\) must not exist "
r"if versioning is enabled"
)
with pytest.raises(DataSetError, match=pattern):
versioned_gcs_data_set.save(dummy_dataframe)
@pytest.mark.parametrize(
"load_version", ["2019-01-01T23.59.59.999Z"], indirect=True
)
@pytest.mark.parametrize(
"save_version", ["2019-01-02T00.00.00.000Z"], indirect=True
)
def test_save_version_warning(
self, versioned_gcs_data_set, load_version, save_version, dummy_dataframe
):
"""Check the warning when saving to the path that differs from
the subsequent load path."""
pattern = (
r"Save version `{0}` did not match load version `{1}` "
r"for CSVGCSDataSet\(.+\)".format(save_version, load_version)
)
with pytest.warns(UserWarning, match=pattern):
versioned_gcs_data_set.save(dummy_dataframe)
def test_version_str_repr(self, save_version, gcs_data_set, versioned_gcs_data_set):
"""Test that version is in string representation of the class instance
when applicable."""
assert FILENAME in str(gcs_data_set)
assert "version" not in str(gcs_data_set)
assert FILENAME in str(versioned_gcs_data_set)
ver_str = "version=Version(load={}, save='{}')".format(None, save_version)
assert ver_str in str(versioned_gcs_data_set)
assert BUCKET_NAME in str(gcs_data_set)
assert BUCKET_NAME in str(versioned_gcs_data_set)
@pytest.mark.parametrize(
"save_version", ["2019-01-02T00.00.00.000Z"], indirect=True
)
def test_exists_versioned(
self,
versioned_gcs_data_set,
dummy_dataframe,
save_version, # pylint: disable=unused-argument
):
"""Test `exists` method invocation for versioned data set."""
assert not versioned_gcs_data_set.exists()
versioned_gcs_data_set.save(dummy_dataframe)
assert versioned_gcs_data_set.exists()
|
[
"pandas.DataFrame",
"pandas.util.testing.assert_frame_equal",
"kedro.io.core.generate_timestamp",
"pytest.warns",
"pytest.fixture",
"kedro.contrib.io.gcs.csv_gcs.CSVGCSDataSet",
"pytest.raises",
"kedro.io.Version",
"pytest.mark.parametrize",
"pytest.mark.usefixtures"
] |
[((2232, 2261), 'pytest.fixture', 'pytest.fixture', ([], {'params': '[None]'}), '(params=[None])\n', (2246, 2261), False, 'import pytest\n'), ((2314, 2355), 'pytest.fixture', 'pytest.fixture', ([], {'params': "[{'index': False}]"}), "(params=[{'index': False}])\n", (2328, 2355), False, 'import pytest\n'), ((2408, 2437), 'pytest.fixture', 'pytest.fixture', ([], {'params': '[None]'}), '(params=[None])\n', (2422, 2437), False, 'import pytest\n'), ((2493, 2522), 'pytest.fixture', 'pytest.fixture', ([], {'params': '[None]'}), '(params=[None])\n', (2507, 2522), False, 'import pytest\n'), ((2166, 2228), 'pandas.DataFrame', 'pd.DataFrame', (["{'col1': [1, 2], 'col2': [4, 5], 'col3': [5, 6]}"], {}), "({'col1': [1, 2], 'col2': [4, 5], 'col3': [5, 6]})\n", (2178, 2228), True, 'import pandas as pd\n'), ((2909, 3051), 'kedro.contrib.io.gcs.csv_gcs.CSVGCSDataSet', 'CSVGCSDataSet', ([], {'filepath': 'FILENAME', 'bucket_name': 'BUCKET_NAME', 'credentials': 'None', 'load_args': 'load_args', 'save_args': 'save_args', 'project': 'GCP_PROJECT'}), '(filepath=FILENAME, bucket_name=BUCKET_NAME, credentials=None,\n load_args=load_args, save_args=save_args, project=GCP_PROJECT)\n', (2922, 3051), False, 'from kedro.contrib.io.gcs.csv_gcs import CSVGCSDataSet\n'), ((3635, 3681), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""mock_gcs_filesystem"""'], {}), "('mock_gcs_filesystem')\n", (3658, 3681), False, 'import pytest\n'), ((4110, 4156), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""mock_gcs_filesystem"""'], {}), "('mock_gcs_filesystem')\n", (4133, 4156), False, 'import pytest\n'), ((4412, 4458), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""mock_gcs_filesystem"""'], {}), "('mock_gcs_filesystem')\n", (4435, 4458), False, 'import pytest\n'), ((5402, 5491), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""load_args"""', "[{'k1': 'v1', 'index': 'value'}]"], {'indirect': '(True)'}), "('load_args', [{'k1': 'v1', 'index': 'value'}],\n indirect=True)\n", (5425, 5491), False, 'import pytest\n'), ((5731, 5820), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""save_args"""', "[{'k1': 'v1', 'index': 'value'}]"], {'indirect': '(True)'}), "('save_args', [{'k1': 'v1', 'index': 'value'}],\n indirect=True)\n", (5754, 5820), False, 'import pytest\n'), ((6060, 6134), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""save_args"""', "[{'option': 'value'}]"], {'indirect': '(True)'}), "('save_args', [{'option': 'value'}], indirect=True)\n", (6083, 6134), False, 'import pytest\n'), ((6420, 6489), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""load_args"""', "[{'custom': 42}]"], {'indirect': '(True)'}), "('load_args', [{'custom': 42}], indirect=True)\n", (6443, 6489), False, 'import pytest\n'), ((6742, 6811), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""save_args"""', "[{'custom': 45}]"], {'indirect': '(True)'}), "('save_args', [{'custom': 45}], indirect=True)\n", (6765, 6811), False, 'import pytest\n'), ((8746, 8834), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""load_version"""', "['2019-01-01T23.59.59.999Z']"], {'indirect': '(True)'}), "('load_version', ['2019-01-01T23.59.59.999Z'],\n indirect=True)\n", (8769, 8834), False, 'import pytest\n'), ((8850, 8938), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""save_version"""', "['2019-01-02T00.00.00.000Z']"], {'indirect': '(True)'}), "('save_version', ['2019-01-02T00.00.00.000Z'],\n indirect=True)\n", (8873, 8938), False, 'import pytest\n'), ((10063, 10151), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""save_version"""', "['2019-01-02T00.00.00.000Z']"], {'indirect': '(True)'}), "('save_version', ['2019-01-02T00.00.00.000Z'],\n indirect=True)\n", (10086, 10151), False, 'import pytest\n'), ((2578, 2598), 'kedro.io.core.generate_timestamp', 'generate_timestamp', ([], {}), '()\n', (2596, 2598), False, 'from kedro.io.core import generate_timestamp\n'), ((3384, 3492), 'kedro.contrib.io.gcs.csv_gcs.CSVGCSDataSet', 'CSVGCSDataSet', ([], {'filepath': 'FILENAME', 'bucket_name': 'BUCKET_NAME', 'project': 'GCP_PROJECT', 'credentials': 'credentials'}), '(filepath=FILENAME, bucket_name=BUCKET_NAME, project=\n GCP_PROJECT, credentials=credentials)\n', (3397, 3492), False, 'from kedro.contrib.io.gcs.csv_gcs import CSVGCSDataSet\n'), ((4357, 4405), 'pandas.util.testing.assert_frame_equal', 'assert_frame_equal', (['dummy_dataframe', 'loaded_data'], {}), '(dummy_dataframe, loaded_data)\n', (4375, 4405), False, 'from pandas.util.testing import assert_frame_equal\n'), ((4985, 5033), 'pandas.util.testing.assert_frame_equal', 'assert_frame_equal', (['loaded_data', 'dummy_dataframe'], {}), '(loaded_data, dummy_dataframe)\n', (5003, 5033), False, 'from pandas.util.testing import assert_frame_equal\n'), ((8138, 8186), 'pandas.util.testing.assert_frame_equal', 'assert_frame_equal', (['dummy_dataframe', 'reloaded_df'], {}), '(dummy_dataframe, reloaded_df)\n', (8156, 8186), False, 'from pandas.util.testing import assert_frame_equal\n'), ((3855, 3897), 'pytest.raises', 'pytest.raises', (['DataSetError'], {'match': 'pattern'}), '(DataSetError, match=pattern)\n', (3868, 3897), False, 'import pytest\n'), ((7436, 7471), 'kedro.io.Version', 'Version', (['load_version', 'save_version'], {}), '(load_version, save_version)\n', (7443, 7471), False, 'from kedro.io import DataSetError, Version\n'), ((7754, 7796), 'pytest.raises', 'pytest.raises', (['DataSetError'], {'match': 'pattern'}), '(DataSetError, match=pattern)\n', (7767, 7796), False, 'import pytest\n'), ((8639, 8681), 'pytest.raises', 'pytest.raises', (['DataSetError'], {'match': 'pattern'}), '(DataSetError, match=pattern)\n', (8652, 8681), False, 'import pytest\n'), ((9366, 9406), 'pytest.warns', 'pytest.warns', (['UserWarning'], {'match': 'pattern'}), '(UserWarning, match=pattern)\n', (9378, 9406), False, 'import pytest\n'), ((3911, 4022), 'kedro.contrib.io.gcs.csv_gcs.CSVGCSDataSet', 'CSVGCSDataSet', ([], {'filepath': 'FILENAME', 'bucket_name': '"""not-existing-bucket"""', 'project': 'GCP_PROJECT', 'credentials': 'None'}), "(filepath=FILENAME, bucket_name='not-existing-bucket', project\n =GCP_PROJECT, credentials=None)\n", (3924, 4022), False, 'from kedro.contrib.io.gcs.csv_gcs import CSVGCSDataSet\n')]
|
import alert
import configparser
from datetime import datetime, timedelta
import getopt
import sys
import time
def wait(minutes):
seconds = minutes * 60
time.sleep(seconds)
# Produces from string the longest substring
# that starts at index and is an integer
def get_contiguous_number(string, index):
length = len(string)
end = length
while end > index:
substring = string[index:end]
if substring.isdigit():
return substring
end -= 1
return ''
# Parses a timing pattern and returns a list of (char, int) tuples
# where the char is either 'w' for work or 'b' for break
def parse_pattern(raw_pattern):
pattern = raw_pattern.replace(' ', '')
length = len(pattern)
index = 0
output = []
while index < length:
next_char = pattern[index]
if next_char == 'w' or next_char == 'b':
# work period
num_str = get_contiguous_number(pattern, index + 1)
if len(num_str) == 0:
raise ValueError(
'No number found after char {} at index {} in pattern {}'.format(
next_char,
index,
raw_pattern
)
)
time = int(num_str)
output.append((next_char, time))
index += len(num_str) + 1
else:
raise ValueError(
'Unexpected char {} at index {} in pattern {}'.format(
next_char,
index,
raw_pattern
)
)
return output
# Cleans up a parsed timing pattern in the [('w'|'b', time)*] format
# Return type is the same
def clean_pattern(pattern):
length = len(pattern)
if length == 0 or length == 1:
return pattern
output = []
last_type, last_time = pattern[0]
for i in range(1, length):
current_type, current_time = pattern[i]
if current_type == last_type:
last_time += current_time
else:
output.append((last_type, last_time))
last_type = current_type
last_time = current_time
output.append((last_type, last_time))
return output
def main():
args = sys.argv[1:]
options = 'ne:p:' # empty string would only accept long options
long_options = [
'email-conf=',
'pattern=',
'no-notifcation'
]
config_vals = {
'email_conf': None,
'pattern': '<PASSWORD>',
'no-notification': False
}
try:
opt_vals, args = getopt.getopt(args, options, long_options)
except getopt.GetoptError as err:
print('Encountered option error!')
print(str(err))
sys.exit()
#print(opt_vals)
for opt, arg in opt_vals:
if opt == '--no-notifcation' or opt == '-n':
config_vals['no-notification'] = True
if opt == '--email-conf' or opt == '-e':
config_vals['email_conf'] = arg
if opt == '--pattern' or opt == '-p':
config_vals['pattern'] = arg
if config_vals['pattern'] is None:
print('Pymodoro requires a pattern!')
sys.exit()
try:
parsed_pattern = parse_pattern(config_vals['pattern'])
cleaned_pattern = clean_pattern(parsed_pattern)
except Exception as err:
print("Could not parse pattern!")
print(str(err))
sys.exit()
if config_vals['email_conf'] is not None:
conf_parser = configparser.ConfigParser()
conf_path = r'email.conf'
conf_parser.read(conf_path)
from_address = conf_parser.get('email', 'fromAddress')
to_address = conf_parser.get('email', 'toAddress')
login = conf_parser.get('email', 'login')
password = conf_parser.get('email', 'password')
server = conf_parser.get('email', 'smtpserver')
subject = 'Pymodoro Alert!'
for task, duration in cleaned_pattern:
task_string = ''
if task == 'b':
task_string = 'break period'
elif task == 'w':
task_string = 'work period'
time = datetime.now()
time_str = time.strftime('%H:%M')
endtime = time + timedelta(minutes=duration)
endtime_str = endtime.strftime('%H:%M')
print('[{}] Beginning {} minute {} until {}!'.format(time_str, duration, task_string, endtime_str))
wait(duration)
time_str = datetime.now().strftime('%H:%M')
print('[{}] Completed {}!'.format(time_str, task_string))
notification_string = 'Your {} is complete!'.format(task_string)
if not config_vals['no-notification']:
alert.notify_desktop('Pymodoro', notification_string)
if config_vals['email_conf'] is not None:
alert.send_email(
from_addr = from_address,
to_addr_list = [to_address],
subject = subject,
message = notification_string,
login = login,
password = password,
smtpserver = server
)
if __name__ == '__main__':
main()
|
[
"getopt.getopt",
"alert.send_email",
"time.strftime",
"time.sleep",
"alert.notify_desktop",
"datetime.timedelta",
"configparser.ConfigParser",
"datetime.datetime.now",
"sys.exit"
] |
[((162, 181), 'time.sleep', 'time.sleep', (['seconds'], {}), '(seconds)\n', (172, 181), False, 'import time\n'), ((2608, 2650), 'getopt.getopt', 'getopt.getopt', (['args', 'options', 'long_options'], {}), '(args, options, long_options)\n', (2621, 2650), False, 'import getopt\n'), ((3204, 3214), 'sys.exit', 'sys.exit', ([], {}), '()\n', (3212, 3214), False, 'import sys\n'), ((3528, 3555), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (3553, 3555), False, 'import configparser\n'), ((4164, 4178), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4176, 4178), False, 'from datetime import datetime, timedelta\n'), ((4198, 4220), 'time.strftime', 'time.strftime', (['"""%H:%M"""'], {}), "('%H:%M')\n", (4211, 4220), False, 'import time\n'), ((2764, 2774), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2772, 2774), False, 'import sys\n'), ((3447, 3457), 'sys.exit', 'sys.exit', ([], {}), '()\n', (3455, 3457), False, 'import sys\n'), ((4246, 4273), 'datetime.timedelta', 'timedelta', ([], {'minutes': 'duration'}), '(minutes=duration)\n', (4255, 4273), False, 'from datetime import datetime, timedelta\n'), ((4706, 4759), 'alert.notify_desktop', 'alert.notify_desktop', (['"""Pymodoro"""', 'notification_string'], {}), "('Pymodoro', notification_string)\n", (4726, 4759), False, 'import alert\n'), ((4824, 4997), 'alert.send_email', 'alert.send_email', ([], {'from_addr': 'from_address', 'to_addr_list': '[to_address]', 'subject': 'subject', 'message': 'notification_string', 'login': 'login', 'password': 'password', 'smtpserver': 'server'}), '(from_addr=from_address, to_addr_list=[to_address], subject\n =subject, message=notification_string, login=login, password=password,\n smtpserver=server)\n', (4840, 4997), False, 'import alert\n'), ((4473, 4487), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4485, 4487), False, 'from datetime import datetime, timedelta\n')]
|
# Copyright (C) 2017 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Events for Zebra protocol service.
"""
import inspect
import logging
from ryu import utils
from ryu.controller import event
from ryu.lib.packet import zebra
LOG = logging.getLogger(__name__)
MOD = utils.import_module(__name__)
ZEBRA_EVENTS = []
class EventZebraBase(event.EventBase):
"""
The base class for Zebra protocol service event class.
The subclasses have at least ``zclient`` and the same attributes with
:py:class: `ryu.lib.packet.zebra.ZebraMessage`.
``zclient`` is an instance of Zebra client class. See
:py:class: `ryu.services.protocols.zebra.client.zclient.ZClient` or
:py:class: `ryu.services.protocols.zebra.server.zserver.ZClient`.
The subclasses are named as::
``"Event" + <Zebra message body class name>``
For Example, if the service received ZEBRA_INTERFACE_ADD message,
the body class should be
:py:class: `ryu.lib.packet.zebra.ZebraInterfaceAdd`, then the event
class will be named as::
"Event" + "ZebraInterfaceAdd" = "EventZebraInterfaceAdd"
``msg`` argument must be an instance of
:py:class: `ryu.lib.packet.zebra.ZebraMessage` and used to extract the
attributes for the event classes.
"""
def __init__(self, zclient, msg):
super(EventZebraBase, self).__init__()
assert isinstance(msg, zebra.ZebraMessage)
self.__dict__ = msg.__dict__
self.zclient = zclient
def __repr__(self):
m = ', '.join(
['%s=%r' % (k, v)
for k, v in self.__dict__.items() if not k.startswith('_')])
return "%s(%s)" % (self.__class__.__name__, m)
__str__ = __repr__
def _event_name(body_cls):
return 'Event%s' % body_cls.__name__
def message_to_event(zclient, msg):
"""
Converts Zebra protocol message instance to Zebra protocol service
event instance.
If corresponding event class is not defined, returns None.
:param zclient: Zebra client instance.
:param msg: Zebra protocol message.
:return: Zebra protocol service event.
"""
if not isinstance(msg, zebra.ZebraMessage):
return None
body_cls = msg.get_body_class(msg.version, msg.command)
ev_cls = getattr(MOD, _event_name(body_cls), None)
if ev_cls is None:
return None
return ev_cls(zclient, msg)
def _define_event_class(body_cls):
name = _event_name(body_cls)
event_cls = type(name, (EventZebraBase,), {})
globals()[name] = event_cls
return event_cls
def _generate_event_classes():
for zebra_cls in zebra.__dict__.values():
if (not inspect.isclass(zebra_cls)
or not issubclass(zebra_cls, zebra._ZebraMessageBody)
or zebra_cls.__name__.startswith('_')):
continue
ev = _define_event_class(zebra_cls)
# LOG.debug('Generated Zebra event: %s' % ev)
ZEBRA_EVENTS.append(ev)
_generate_event_classes()
|
[
"inspect.isclass",
"ryu.utils.import_module",
"ryu.lib.packet.zebra.__dict__.values",
"logging.getLogger"
] |
[((811, 838), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (828, 838), False, 'import logging\n'), ((846, 875), 'ryu.utils.import_module', 'utils.import_module', (['__name__'], {}), '(__name__)\n', (865, 875), False, 'from ryu import utils\n'), ((3266, 3289), 'ryu.lib.packet.zebra.__dict__.values', 'zebra.__dict__.values', ([], {}), '()\n', (3287, 3289), False, 'from ryu.lib.packet import zebra\n'), ((3308, 3334), 'inspect.isclass', 'inspect.isclass', (['zebra_cls'], {}), '(zebra_cls)\n', (3323, 3334), False, 'import inspect\n')]
|
import sys
sys.path.append("..")
from utils.api import Api
from utils.db import Database
from datetime import datetime
from utils.db_query import insert_news_query, news_query
class News:
def __init__(self, profile):
self.profile = profile
self.api = Api
@staticmethod
def _normalize(_news):
news = {
"symbol": str(_news.get("symbol")),
"date": datetime.fromtimestamp(_news.get("datetime")/1000.0),
"title": str(_news.get("headline")),
"source": str(_news.get("source")),
"url": _news.get("url"),
"description": str(_news.get("summary")),
"image": _news.get("image"),
}
return news
@staticmethod
def parse_news_result(result):
return {
"symbol": result[1],
"date": result[2],
"title": result[3],
"source": result[4],
"url": result[5],
"description": result[6],
"image": result[7]
}
def news_load(self, symbol):
_news = {
"endpoint": "news",
"symbol": symbol
}
print(f"[API] News - {symbol}")
news = self.api(_news).get()
print(f"[API] SUCCESS")
for n in news:
n["symbol"] = symbol
return news
def insert_news(self, all_news):
normalized_news = list()
for _news in all_news:
news = self._normalize(_news)
normalized_news.append(news)
try:
print(f"[DB] Batch Insert - News")
db = Database(self.profile)
db.batch_insert(insert_news_query, normalized_news)
db.close()
print(f"[DB] SUCCESS")
return True, "Inserção feita com sucesso"
except Exception as e:
print(e)
return False, f"Ocorreu um erro na inserção no banco de dados: {e}"
def select_news(self, symbols):
results = list()
db = Database(self.profile)
for symbol in symbols:
result = db.query_by_id(news_query, (symbol, ))
parsed = self.parse_news_result(result)
results.append(parsed)
db.close()
return results
|
[
"sys.path.append",
"utils.db.Database"
] |
[((11, 32), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (26, 32), False, 'import sys\n'), ((2008, 2030), 'utils.db.Database', 'Database', (['self.profile'], {}), '(self.profile)\n', (2016, 2030), False, 'from utils.db import Database\n'), ((1601, 1623), 'utils.db.Database', 'Database', (['self.profile'], {}), '(self.profile)\n', (1609, 1623), False, 'from utils.db import Database\n')]
|
# Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=line-too-long
# pylint: disable=missing-docstring
import copy
import textwrap
import unittest
import yaml
from spectator_metric_transformer import (
AggregatedMetricsBuilder,
MetricInfo,
SpectatorMetricTransformer,
TransformationRule)
# This is a sample spectator response containing
# a single value for a single measurmement of a 'jvm.memory.used' meter.
#
# We'll be using it in many of our tests showing how different transforms
# apply to it.
EXAMPLE_MEMORY_USED_RESPONSE = {
'jvm.memory.used': {
'kind': 'Gauge',
'values': [{
'values': [{'t': 1540224536922, 'v': 1489720024.0}],
'tags': [
{'key': 'id', 'value': 'PS Eden Space'},
{'key': 'memtype', 'value': 'HEAP'},
]
}]
}
}
class AggregatedBuilderTest(unittest.TestCase):
TIMESTAMP = 987654321
def _make_values(self, count):
return [{'v': 100 + i, 't': self.TIMESTAMP+i}
for i in range(count)]
def _make_tags(self, **kwargs):
return [{'key': key, 'value': value} for key, value in kwargs.items()]
def _make_simple_rule_builder(self, options=None):
# transformer isnt used for these tests, but is required to construct
options = options or {}
transformer = SpectatorMetricTransformer(options, {})
rule = TransformationRule(
transformer,
{
'rename': 'NewName',
'kind': 'Timer',
'tags': ['status'],
})
# The builder only uses the kind part of the rule.
# The other parts of the rule are used when it is applied
# to preprocess the response before adding to the builder.
return AggregatedMetricsBuilder(rule)
def _make_timer_measurements(self, status='2xx'):
t, v = {'2xx': (0, 0),
'4xx': (0, 400), # different value same time
'5xx': (500, 500), # different value different time
'0xx': (500, 0), # same value different time
}[status]
if status == '0xx':
status = '2xx'
return [
{'values': [{'v':123 + v, 't': self.TIMESTAMP + t}],
'tags': self._make_tags(status=status, statistic='count')},
{'values': [{'v':321 + v, 't': self.TIMESTAMP + t}],
'tags': self._make_tags(status=status, statistic='totalTime')}
]
def _determine_expected_tags(self, measurement):
expect_tags = list(measurement['tags'])
for i, entry in enumerate(expect_tags):
if entry['key'] == 'statistic':
del expect_tags[i]
break
return sorted(expect_tags)
def test_timer(self):
# This test is just showing nothing interesting happening
# and we get out what we put in.
builder = self._make_simple_rule_builder()
for measurement in self._make_timer_measurements():
builder.add(measurement['values'][0], measurement['tags'])
self.assertEquals(
sorted([
{
'values': [{'v': 123, 't': self.TIMESTAMP}],
'tags': sorted([{'key': 'status', 'value': '2xx'},
{'key': 'statistic', 'value': 'count'}])
},
{
'values': [{'v': 321, 't': self.TIMESTAMP}],
'tags': sorted([{'key': 'status', 'value': '2xx'},
{'key': 'statistic', 'value': 'totalTime'}])
}
]),
sorted(builder.build()))
def test_summary(self):
transformer = SpectatorMetricTransformer({}, {})
rule = TransformationRule(
transformer,
{
'rename': 'NewName',
'kind': 'Summary',
'tags': ['status'],
})
builder = AggregatedMetricsBuilder(rule)
for measurement in self._make_timer_measurements():
builder.add(measurement['values'][0], measurement['tags'])
self.assertEquals(
[{
'values': [{'v': {'count': 123, 'totalTime': 321},
't': self.TIMESTAMP}],
'tags': [{'key': 'status', 'value': '2xx'}]
}],
sorted(builder.build())
)
def test_collate_one_measurement(self):
builder = self._make_simple_rule_builder()
output = {}
measurements = self._make_timer_measurements()
for measurement in measurements:
builder._collate_metric_info(
MetricInfo(measurement['values'][0], sorted(measurement['tags'])),
output)
self.assertEquals(
sorted([
MetricInfo({'t': self.TIMESTAMP,
'v': {'count': 123, 'totalTime': 321}},
self._determine_expected_tags(measurements[0]))
]),
sorted(output.values()))
def test_collate_multiple_measurements(self):
builder = self._make_simple_rule_builder()
output = {}
measurements2xx = self._make_timer_measurements(status='2xx')
measurements4xx = self._make_timer_measurements(status='4xx')
measurements5xx = self._make_timer_measurements(status='5xx')
measurements = [
measurements2xx[0],
measurements4xx[1],
measurements5xx[0],
measurements4xx[0],
measurements2xx[1],
measurements5xx[1],
]
for measurement in measurements:
builder._collate_metric_info(
MetricInfo(measurement['values'][0], sorted(measurement['tags'])),
output)
expect_values = [
MetricInfo({'t': self.TIMESTAMP,
'v': {'count': 123, 'totalTime': 321}},
self._determine_expected_tags(measurements2xx[0])),
# different value, same time
MetricInfo({'t': self.TIMESTAMP,
'v': {'count': 123 + 400, 'totalTime': 321 + 400}},
self._determine_expected_tags(measurements4xx[0])),
# different value, different time
MetricInfo({'t': self.TIMESTAMP + 500,
'v': {'count': 123 + 500, 'totalTime': 321 + 500}},
self._determine_expected_tags(measurements5xx[0]))
]
for got in output.values():
found = False
for index, expect in enumerate(expect_values):
if got == expect:
del expect_values[index]
found = True
break
self.assertTrue(found, msg='Missing %r' % got)
self.assertEquals([], expect_values)
def test_summarize_timers_and_build(self):
options = {'summarize_timers': True}
builder = self._make_simple_rule_builder(options=options)
measurements2xx = self._make_timer_measurements(status='2xx')
measurements4xx = self._make_timer_measurements(status='4xx')
measurements5xx = self._make_timer_measurements(status='5xx')
measurements0xx = self._make_timer_measurements(status='0xx')
measurements = [
measurements2xx[0],
measurements4xx[1],
measurements5xx[0],
measurements0xx[0],
measurements4xx[0],
measurements2xx[1],
measurements0xx[1],
measurements5xx[1],
]
for measurement in measurements:
builder.add(measurement['values'][0], measurement['tags'])
self.assertEquals(
sorted([
# 2xx and 0xx got combined together because tags are same
# timestamp is bumped to 0xx's which was later.
{
'values': [{'t': self.TIMESTAMP + 500,
'v': {'count': 2 * 123, 'totalTime': 2 * 321}}],
'tags': self._determine_expected_tags(measurements2xx[0])
},
# different tags, same timestamp (from 2xx)
{
'values': [{'t': self.TIMESTAMP,
'v': {'count': 123 + 400, 'totalTime': 321 + 400}}],
'tags': self._determine_expected_tags(measurements4xx[0])
},
# different tags, different timestamp (from 2xx)
{
'values': [{'t': self.TIMESTAMP + 500,
'v': {'count': 123 + 500, 'totalTime': 321 + 500}}],
'tags': self._determine_expected_tags(measurements5xx[0])
}
]),
sorted(builder.build())
)
class SpectatorMetricTransformerTest(unittest.TestCase):
def do_test(self, spec_yaml, spectator_response, expect_response,
options=None):
spec = yaml.load(spec_yaml)
options = options or {}
transformer = SpectatorMetricTransformer(options, spec)
got_response = transformer.process_response(spectator_response)
for _, got_meter_data in got_response.items():
values = got_meter_data.get('values')
if values:
values.sort()
self.assertResponseEquals(expect_response, got_response)
def assertResponseEquals(self, expect_response, got_response):
if expect_response != got_response:
print('Expected: %r\n'
'Actual: %r\n'
% (expect_response, got_response))
self.assertEquals(expect_response, got_response)
def test_discard_default(self):
spectator_response = EXAMPLE_MEMORY_USED_RESPONSE
spec = {}
transformer = SpectatorMetricTransformer({}, spec)
got_response = transformer.process_response(spectator_response)
self.assertResponseEquals({}, got_response)
def test_identity_default(self):
spectator_response = EXAMPLE_MEMORY_USED_RESPONSE
spec = {}
options = {'default_is_identity': True}
transformer = SpectatorMetricTransformer(options, spec)
got_response = transformer.process_response(spectator_response)
self.assertResponseEquals(spectator_response, got_response)
def test_identity_explicit(self):
spectator_response = EXAMPLE_MEMORY_USED_RESPONSE
spec = {'jvm.memory.used': None}
transformer = SpectatorMetricTransformer({}, spec)
got_response = transformer.process_response(spectator_response)
self.assertResponseEquals(spectator_response, got_response)
def test_stackdriver_timers(self):
spec = {}
transformer = SpectatorMetricTransformer(
{'enforce_stackdriver_names': True}, spec)
do_name = transformer.normalize_meter_name
self.assertEquals('timers', do_name('timers', 'Gauge'))
self.assertEquals('timers', do_name('timers', 'Counter'))
self.assertEquals('timer_latency', do_name('timer', 'Timer'))
self.assertEquals('timer_latency', do_name('timers', 'Timer'))
self.assertEquals('timer_latency', do_name('timer_latency', 'Timer'))
self.assertEquals('timer_latency', do_name('timerLatency', 'Timer'))
do_label = transformer.normalize_label_name
self.assertEquals('status_code', do_label('status_code'))
self.assertEquals('status_code', do_label('statusCode'))
self.assertEquals('status_code_class', do_label('status'))
def test_snakeify(self):
spec = {}
transformer = SpectatorMetricTransformer({'use_snake_case': True}, spec)
snakeify = lambda name: transformer.normalize_text_case(name)
self.assertEquals('test', snakeify('test'))
self.assertEquals('test', snakeify('Test'))
self.assertEquals('test', snakeify('TEST'))
self.assertEquals('camel_case', snakeify('camelCase'))
self.assertEquals('title_case', snakeify('TitleCase'))
self.assertEquals('snake_case', snakeify('Snake_Case'))
self.assertEquals('http_response', snakeify('HTTPResponse'))
self.assertEquals('upper_case', snakeify('UPPER_CASE'))
def test_change_meter_name_explicit(self):
self.do_test(
textwrap.dedent("""\
jvm.memory.used:
rename: platform/java/memory
"""),
EXAMPLE_MEMORY_USED_RESPONSE,
{'platform/java/memory':
EXAMPLE_MEMORY_USED_RESPONSE['jvm.memory.used']}
)
def test_change_meter_name(self):
self.do_test(
textwrap.dedent("""\
jvm.memory.used:
rename: memoryUsed
"""),
EXAMPLE_MEMORY_USED_RESPONSE,
{'memoryUsed': EXAMPLE_MEMORY_USED_RESPONSE['jvm.memory.used']}
)
def test_change_meter_name_snakeify(self):
self.do_test(
textwrap.dedent("""\
jvm.memory.used:
rename: memoryUsed
"""),
EXAMPLE_MEMORY_USED_RESPONSE,
{'memory_used': EXAMPLE_MEMORY_USED_RESPONSE['jvm.memory.used']},
options={'use_snake_case': True}
)
def test_discard_meter_by_name(self):
self.do_test(
textwrap.dedent("""\
jvm.memory.used:
rename:
"""),
EXAMPLE_MEMORY_USED_RESPONSE,
{}
)
def test_change_tag_names(self):
transformed_value = copy.deepcopy(
EXAMPLE_MEMORY_USED_RESPONSE['jvm.memory.used'])
transformed_value['values'][0]['tags'] = sorted([
{'key': 'segment', 'value': 'PS Eden Space'},
{'key': 'scope', 'value': 'HEAP'},
])
self.do_test(
textwrap.dedent("""\
jvm.memory.used:
rename: platform/java/memory
change_tags:
- from: memtype
to: scope
type: STRING
- from: id
to: segment
type: STRING
"""),
EXAMPLE_MEMORY_USED_RESPONSE,
{'platform/java/memory': transformed_value}
)
def test_add_tags(self):
transformed_value = copy.deepcopy(
EXAMPLE_MEMORY_USED_RESPONSE['jvm.memory.used'])
transformed_value['values'][0]['tags'] = sorted(
transformed_value['values'][0]['tags']
+ [
{'key': 'first', 'value': 'FIRST'},
{'key': 'T', 'value': True},
{'key': 'F', 'value': False},
{'key': 'S', 'value': 'true'},
{'key': 'numeric', 'value': 123},
])
self.do_test(
textwrap.dedent("""\
jvm.memory.used:
add_tags:
first: FIRST
T: true
F: false
S: 'true'
numeric: 123
"""),
EXAMPLE_MEMORY_USED_RESPONSE,
{'jvm.memory.used': transformed_value}
)
def test_consolidate_metrics(self):
self.do_test(
textwrap.dedent("""\
storageServiceSupport.autoRefreshTime:
kind: Timer
rename: front50/cache/refresh
tags:
- objectType
- statistic
add_tags:
scheduled: false
storageServiceSupport.scheduledRefreshTime:
kind: Timer
rename: front50/cache/refresh
tags:
- objectType
- statistic
add_tags:
scheduled: true
"""),
{
'storageServiceSupport.autoRefreshTime': {
'kind': 'Timer',
'values': [{
'values': [{'t': 1540224536920, 'v': 10000.0}],
'tags': [
{'key': 'objectType', 'value': 'PIPELINES'},
{'key': 'statistic', 'value': 'totalTime'},
]
}]},
'storageServiceSupport.scheduledRefreshTime': {
'kind': 'Timer',
'values': [{
'values': [{'t': 1540224536922, 'v': 1489720024.0}],
'tags': [
{'key': 'objectType', 'value': 'PIPELINES'},
{'key': 'statistic', 'value': 'totalTime'},
]
}]}
},
{'front50/cache/refresh': {
'kind': 'Timer',
'values': [{
'values': [{'t': 1540224536920, 'v': 10000.0}],
'tags': sorted([
{'key': 'objectType', 'value': 'PIPELINES'},
{'key': 'statistic', 'value': 'totalTime'},
{'key': 'scheduled', 'value': False}
])
}, {
'values': [{'t': 1540224536922, 'v': 1489720024.0}],
'tags': sorted([
{'key': 'objectType', 'value': 'PIPELINES'},
{'key': 'statistic', 'value': 'totalTime'},
{'key': 'scheduled', 'value': True}
])
}
]
}}
)
def test_change_tag_to_type_bool(self):
self.do_test(
textwrap.dedent("""\
jvm.memory.used:
tags:
- id
change_tags:
- from: memtype
to: heap
type: BOOL
compare_value: HEAP
"""),
{'jvm.memory.used': {
'kind': 'Gauge',
'values': sorted([
{'values': [{'t': 1540224536922, 'v': 1489720024.0}],
'tags': [
{'key': 'id', 'value': 'PS Eden Space'},
{'key': 'memtype', 'value': 'HEAP'},
]},
{'values': [{'t': 1540224536923, 'v': 12345.0}],
'tags': [
{'key': 'id', 'value': 'Code Cache'},
{'key': 'memtype', 'value': 'NON HEAP'},
]},
])},
},
{'jvm.memory.used': {
'kind': 'Gauge',
'values': sorted([
{'values': [{'t': 1540224536922, 'v': 1489720024.0}],
'tags': sorted([
{'key': 'id', 'value': 'PS Eden Space'},
{'key': 'heap', 'value': True},
])},
{'values': [{'t': 1540224536923, 'v': 12345.0}],
'tags': sorted([
{'key': 'id', 'value': 'Code Cache'},
{'key': 'heap', 'value': False},
])},
])},
}
)
def test_change_tag_to_type_int(self):
self.do_test(
textwrap.dedent("""\
controller.invocations:
tags:
- controller
- method
- statistic
- status
change_tags:
- from: statusCode
to: statusCode
type: INT
"""),
{'controller.invocations': {
'kind': 'Timer',
'values': [
{'values': [{'t': 1540318956420, 'v': 300130409.0}],
'tags': [
{'key': 'controller', 'value': 'ClusterController'},
{'key': 'method', 'value': 'getServerGroup'},
{'key': 'statistic', 'value': 'totalTime'},
{'key': 'status', 'value': '2xx'},
{'key': 'statusCode', 'value': '200'},
]}
]},
},
{'controller.invocations': {
'kind': 'Timer',
'values': [
{'values': [{'t': 1540318956420, 'v': 300130409.0}],
'tags': [
{'key': 'controller', 'value': 'ClusterController'},
{'key': 'method', 'value': 'getServerGroup'},
{'key': 'statistic', 'value': 'totalTime'},
{'key': 'status', 'value': '2xx'},
{'key': 'statusCode', 'value': 200},
]}
]},
}
)
def test_decompose_tag(self):
self.do_test(
textwrap.dedent("""\
executionCount:
tags:
- status
change_tags:
- from: agent
to: [provider, account, region, agent]
type: [STRING, STRING, STRING, STRING]
extract_regex: '([^/]+)/(?:([^/]+)/(?:([^/]+)/)?)?(.+)'
"""),
{'executionCount': {
'kind': 'Counter',
'values': sorted([{
'values': [{'t': 1540318956422, 'v': 23258.0}],
'tags': [
{'key': 'agent',
'value': 'com.netflix.spinnaker.clouddriver.google.provider.GoogleInfrastructureProvider/my-google-account/australia-southeast1/GoogleSubnetCachingAgent'},
{'key': 'status', 'value': 'success'}
]
}, {
'values': [{'t': 1540318956422, 'v': 9.0}],
'tags': [
{'key': 'agent',
'value': 'com.netflix.spinnaker.clouddriver.appengine.provider.AppengineProvider/my-appengine-account/AppenginePlatformApplicationCachingAgent'},
{'key': 'status', 'value': 'failure'},
]
},
])},
},
{'executionCount': {
'kind': 'Counter',
'values': sorted([{
'values': [{'t': 1540318956422, 'v': 23258.0}],
'tags': sorted([
{'key': 'provider',
'value': 'com.netflix.spinnaker.clouddriver.google.provider.GoogleInfrastructureProvider'},
{'key': 'account', 'value': 'my-google-account'},
{'key': 'region', 'value': 'australia-southeast1'},
{'key': 'agent', 'value': 'GoogleSubnetCachingAgent'},
{'key': 'status', 'value': 'success'}
])
}, {
'values': [{'t': 1540318956422, 'v': 9.0}],
'tags': sorted([
{'key': 'provider',
'value': 'com.netflix.spinnaker.clouddriver.appengine.provider.AppengineProvider'},
{'key': 'account', 'value': 'my-appengine-account'},
{'key': 'region', 'value': ''},
{'key': 'agent', 'value': 'AppenginePlatformApplicationCachingAgent'},
{'key': 'status', 'value': 'failure'},
])
}
])}
}
)
def test_remove_tag(self):
self.do_test(
textwrap.dedent("""\
controller.invocations:
tags:
- controller
- method
- status
# "statistic" is implicit
"""),
{'controller.invocations': {
'kind': 'Timer',
'values': [{
'values': [{'t': 12345, 'v': 1111.0}],
'tags': [
{'key': 'controller', 'value': 'ClusterController'},
{'key': 'method', 'value': 'getServerGroup'},
{'key': 'statistic', 'value': 'totalTime'},
{'key': 'status', 'value': '4xx'},
{'key': 'statusCode', 'value': '400'},
]
}, {
'values': [{'t': 12346, 'v': 2222.0}],
'tags': [
{'key': 'controller', 'value': 'ClusterController'},
{'key': 'method', 'value': 'getServerGroup'},
{'key': 'statistic', 'value': 'totalTime'},
{'key': 'status', 'value': '2xx'},
{'key': 'statusCode', 'value': '200'},
]
}, {
'values': [{'t': 12347, 'v': 4444.0}],
'tags': [
{'key': 'controller', 'value': 'ClusterController'},
{'key': 'method', 'value': 'getServerGroup'},
{'key': 'statistic', 'value': 'totalTime'},
{'key': 'status', 'value': '4xx'},
{'key': 'statusCode', 'value': '404'},
]
}, {
'values': [{'t': 12348, 'v': 8888.0}],
'tags': [
{'key': 'controller', 'value': 'ClusterController'},
{'key': 'method', 'value': 'getServerGroup'},
{'key': 'statistic', 'value': 'count'},
{'key': 'status', 'value': '4xx'},
{'key': 'statusCode', 'value': '404'},
]
},
]}
},
{'controller.invocations': {
'kind': 'Timer',
'values': sorted([{
'values': [{'t': 12347, 'v': 5555.0}],
'tags': sorted([
{'key': 'controller', 'value': 'ClusterController'},
{'key': 'method', 'value': 'getServerGroup'},
{'key': 'statistic', 'value': 'totalTime'},
{'key': 'status', 'value': '4xx'},
])
}, {
'values': [{'t': 12346, 'v': 2222.0}],
'tags': sorted([
{'key': 'controller', 'value': 'ClusterController'},
{'key': 'method', 'value': 'getServerGroup'},
{'key': 'statistic', 'value': 'totalTime'},
{'key': 'status', 'value': '2xx'},
])
}, {
'values': [{'t': 12348, 'v': 8888.0}],
'tags': sorted([
{'key': 'controller', 'value': 'ClusterController'},
{'key': 'method', 'value': 'getServerGroup'},
{'key': 'statistic', 'value': 'count'},
{'key': 'status', 'value': '4xx'},
])
},
])},
}
)
def test_discard_tag_values(self):
self.do_test(
textwrap.dedent("""\
jvm.memory.used:
tags:
- id
change_tags:
- from: memtype
to: heap
type: BOOL
compare_value: HEAP
discard_tag_values:
heap: "(?i)^false$"
"""),
{'jvm.memory.used': {
'kind': 'Gauge',
'values': sorted([{
'values': [{'t': 1540224536922, 'v': 1489720024.0}],
'tags': [
{'key': 'id', 'value': 'PS Eden Space'},
{'key': 'memtype', 'value': 'HEAP'},
]
}, {
'values': [{'t': 1540224536923, 'v': 12345.0}],
'tags': [
{'key': 'id', 'value': 'Code Cache'},
{'key': 'memtype', 'value': 'NON HEAP'},
]
},
])},
},
{'jvm.memory.used': {
'kind': 'Gauge',
'values': sorted([{
'values': [{'t': 1540224536922, 'v': 1489720024.0}],
'tags': sorted([
{'key': 'id', 'value': 'PS Eden Space'},
{'key': 'heap', 'value': True},
])
},
])}
}
)
def test_per_application_removed(self):
# Test is we transform into an application tag and have per_application
# then the application tag will be removed (and values aggregated).
self.do_test(
textwrap.dedent("""\
executions.started:
kind: Counter
per_application: true
change_tags:
- from: source
to: application
type: STRING
tags:
- executionType
"""),
{'executions.started': {
'kind': 'Counter',
'values': sorted([{
'values': [{'t': 1540224536922, 'v': 12.0}],
'tags': [
{'key': 'source', 'value': 'MyApplication'},
{'key': 'executionType', 'value': 'Pipeline'},
]
}, {
'values': [{'t': 1540224536923, 'v': 21.0}],
'tags': [
{'key': 'source', 'value': 'YourApplication'},
{'key': 'executionType', 'value': 'Pipeline'},
]
},
])},
},
{'executions.started': {
'kind': 'Counter',
'values': sorted([{
'values': [{'t': 1540224536923, 'v': 33.0}],
'tags': sorted([
{'key': 'executionType', 'value': 'Pipeline'},
]),
'__per_tag_values': {
'application': sorted([
{
'values': [{'t': 1540224536922, 'v': 12.0}],
'tags': [
{'key': 'application',
'value': 'MyApplication'},
{'key': 'executionType', 'value': 'Pipeline'}
]
}, {
'values': [{'t': 1540224536923, 'v': 21.0}],
'tags': [
{'key': 'application',
'value': 'YourApplication'},
{'key': 'executionType', 'value': 'Pipeline'}
]
}
]),
}}]),
}
}
)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"textwrap.dedent",
"yaml.load",
"copy.deepcopy",
"spectator_metric_transformer.SpectatorMetricTransformer",
"spectator_metric_transformer.TransformationRule",
"spectator_metric_transformer.AggregatedMetricsBuilder"
] |
[((29579, 29594), 'unittest.main', 'unittest.main', ([], {}), '()\n', (29592, 29594), False, 'import unittest\n'), ((1894, 1933), 'spectator_metric_transformer.SpectatorMetricTransformer', 'SpectatorMetricTransformer', (['options', '{}'], {}), '(options, {})\n', (1920, 1933), False, 'from spectator_metric_transformer import AggregatedMetricsBuilder, MetricInfo, SpectatorMetricTransformer, TransformationRule\n'), ((1946, 2041), 'spectator_metric_transformer.TransformationRule', 'TransformationRule', (['transformer', "{'rename': 'NewName', 'kind': 'Timer', 'tags': ['status']}"], {}), "(transformer, {'rename': 'NewName', 'kind': 'Timer',\n 'tags': ['status']})\n", (1964, 2041), False, 'from spectator_metric_transformer import AggregatedMetricsBuilder, MetricInfo, SpectatorMetricTransformer, TransformationRule\n'), ((2293, 2323), 'spectator_metric_transformer.AggregatedMetricsBuilder', 'AggregatedMetricsBuilder', (['rule'], {}), '(rule)\n', (2317, 2323), False, 'from spectator_metric_transformer import AggregatedMetricsBuilder, MetricInfo, SpectatorMetricTransformer, TransformationRule\n'), ((4070, 4104), 'spectator_metric_transformer.SpectatorMetricTransformer', 'SpectatorMetricTransformer', (['{}', '{}'], {}), '({}, {})\n', (4096, 4104), False, 'from spectator_metric_transformer import AggregatedMetricsBuilder, MetricInfo, SpectatorMetricTransformer, TransformationRule\n'), ((4116, 4213), 'spectator_metric_transformer.TransformationRule', 'TransformationRule', (['transformer', "{'rename': 'NewName', 'kind': 'Summary', 'tags': ['status']}"], {}), "(transformer, {'rename': 'NewName', 'kind': 'Summary',\n 'tags': ['status']})\n", (4134, 4213), False, 'from spectator_metric_transformer import AggregatedMetricsBuilder, MetricInfo, SpectatorMetricTransformer, TransformationRule\n'), ((4288, 4318), 'spectator_metric_transformer.AggregatedMetricsBuilder', 'AggregatedMetricsBuilder', (['rule'], {}), '(rule)\n', (4312, 4318), False, 'from spectator_metric_transformer import AggregatedMetricsBuilder, MetricInfo, SpectatorMetricTransformer, TransformationRule\n'), ((8886, 8906), 'yaml.load', 'yaml.load', (['spec_yaml'], {}), '(spec_yaml)\n', (8895, 8906), False, 'import yaml\n'), ((8953, 8994), 'spectator_metric_transformer.SpectatorMetricTransformer', 'SpectatorMetricTransformer', (['options', 'spec'], {}), '(options, spec)\n', (8979, 8994), False, 'from spectator_metric_transformer import AggregatedMetricsBuilder, MetricInfo, SpectatorMetricTransformer, TransformationRule\n'), ((9644, 9680), 'spectator_metric_transformer.SpectatorMetricTransformer', 'SpectatorMetricTransformer', (['{}', 'spec'], {}), '({}, spec)\n', (9670, 9680), False, 'from spectator_metric_transformer import AggregatedMetricsBuilder, MetricInfo, SpectatorMetricTransformer, TransformationRule\n'), ((9963, 10004), 'spectator_metric_transformer.SpectatorMetricTransformer', 'SpectatorMetricTransformer', (['options', 'spec'], {}), '(options, spec)\n', (9989, 10004), False, 'from spectator_metric_transformer import AggregatedMetricsBuilder, MetricInfo, SpectatorMetricTransformer, TransformationRule\n'), ((10283, 10319), 'spectator_metric_transformer.SpectatorMetricTransformer', 'SpectatorMetricTransformer', (['{}', 'spec'], {}), '({}, spec)\n', (10309, 10319), False, 'from spectator_metric_transformer import AggregatedMetricsBuilder, MetricInfo, SpectatorMetricTransformer, TransformationRule\n'), ((10522, 10591), 'spectator_metric_transformer.SpectatorMetricTransformer', 'SpectatorMetricTransformer', (["{'enforce_stackdriver_names': True}", 'spec'], {}), "({'enforce_stackdriver_names': True}, spec)\n", (10548, 10591), False, 'from spectator_metric_transformer import AggregatedMetricsBuilder, MetricInfo, SpectatorMetricTransformer, TransformationRule\n'), ((11346, 11404), 'spectator_metric_transformer.SpectatorMetricTransformer', 'SpectatorMetricTransformer', (["{'use_snake_case': True}", 'spec'], {}), "({'use_snake_case': True}, spec)\n", (11372, 11404), False, 'from spectator_metric_transformer import AggregatedMetricsBuilder, MetricInfo, SpectatorMetricTransformer, TransformationRule\n'), ((13112, 13174), 'copy.deepcopy', 'copy.deepcopy', (["EXAMPLE_MEMORY_USED_RESPONSE['jvm.memory.used']"], {}), "(EXAMPLE_MEMORY_USED_RESPONSE['jvm.memory.used'])\n", (13125, 13174), False, 'import copy\n'), ((13832, 13894), 'copy.deepcopy', 'copy.deepcopy', (["EXAMPLE_MEMORY_USED_RESPONSE['jvm.memory.used']"], {}), "(EXAMPLE_MEMORY_USED_RESPONSE['jvm.memory.used'])\n", (13845, 13894), False, 'import copy\n'), ((11990, 12103), 'textwrap.dedent', 'textwrap.dedent', (['""" jvm.memory.used:\n rename: platform/java/memory\n """'], {}), '(\n """ jvm.memory.used:\n rename: platform/java/memory\n """\n )\n', (12005, 12103), False, 'import textwrap\n'), ((12300, 12403), 'textwrap.dedent', 'textwrap.dedent', (['""" jvm.memory.used:\n rename: memoryUsed\n """'], {}), '(\n """ jvm.memory.used:\n rename: memoryUsed\n """\n )\n', (12315, 12403), False, 'import textwrap\n'), ((12586, 12689), 'textwrap.dedent', 'textwrap.dedent', (['""" jvm.memory.used:\n rename: memoryUsed\n """'], {}), '(\n """ jvm.memory.used:\n rename: memoryUsed\n """\n )\n', (12601, 12689), False, 'import textwrap\n'), ((12910, 12997), 'textwrap.dedent', 'textwrap.dedent', (['""" jvm.memory.used:\n rename:\n """'], {}), '(\n """ jvm.memory.used:\n rename:\n """)\n', (12925, 12997), False, 'import textwrap\n'), ((13369, 13689), 'textwrap.dedent', 'textwrap.dedent', (['""" jvm.memory.used:\n rename: platform/java/memory\n change_tags:\n - from: memtype\n to: scope\n type: STRING\n\n - from: id\n to: segment\n type: STRING\n """'], {}), '(\n """ jvm.memory.used:\n rename: platform/java/memory\n change_tags:\n - from: memtype\n to: scope\n type: STRING\n\n - from: id\n to: segment\n type: STRING\n """\n )\n', (13384, 13689), False, 'import textwrap\n'), ((14274, 14501), 'textwrap.dedent', 'textwrap.dedent', (['""" jvm.memory.used:\n add_tags:\n first: FIRST\n T: true\n F: false\n S: \'true\'\n numeric: 123\n """'], {}), '(\n """ jvm.memory.used:\n add_tags:\n first: FIRST\n T: true\n F: false\n S: \'true\'\n numeric: 123\n """\n )\n', (14289, 14501), False, 'import textwrap\n'), ((14652, 15176), 'textwrap.dedent', 'textwrap.dedent', (['""" storageServiceSupport.autoRefreshTime:\n kind: Timer\n rename: front50/cache/refresh\n tags:\n - objectType\n - statistic\n add_tags:\n scheduled: false\n\n storageServiceSupport.scheduledRefreshTime:\n kind: Timer\n rename: front50/cache/refresh\n tags:\n - objectType\n - statistic\n add_tags:\n scheduled: true\n """'], {}), '(\n """ storageServiceSupport.autoRefreshTime:\n kind: Timer\n rename: front50/cache/refresh\n tags:\n - objectType\n - statistic\n add_tags:\n scheduled: false\n\n storageServiceSupport.scheduledRefreshTime:\n kind: Timer\n rename: front50/cache/refresh\n tags:\n - objectType\n - statistic\n add_tags:\n scheduled: true\n """\n )\n', (14667, 15176), False, 'import textwrap\n'), ((16832, 17096), 'textwrap.dedent', 'textwrap.dedent', (['""" jvm.memory.used:\n tags:\n - id\n change_tags:\n - from: memtype\n to: heap\n type: BOOL\n compare_value: HEAP\n """'], {}), '(\n """ jvm.memory.used:\n tags:\n - id\n change_tags:\n - from: memtype\n to: heap\n type: BOOL\n compare_value: HEAP\n """\n )\n', (16847, 17096), False, 'import textwrap\n'), ((18348, 18676), 'textwrap.dedent', 'textwrap.dedent', (['""" controller.invocations:\n tags:\n - controller\n - method\n - statistic\n - status\n\n change_tags:\n - from: statusCode\n to: statusCode\n type: INT\n """'], {}), '(\n """ controller.invocations:\n tags:\n - controller\n - method\n - statistic\n - status\n\n change_tags:\n - from: statusCode\n to: statusCode\n type: INT\n """\n )\n', (18363, 18676), False, 'import textwrap\n'), ((19844, 20204), 'textwrap.dedent', 'textwrap.dedent', (['""" executionCount:\n tags:\n - status\n\n change_tags:\n - from: agent\n to: [provider, account, region, agent]\n type: [STRING, STRING, STRING, STRING]\n extract_regex: \'([^/]+)/(?:([^/]+)/(?:([^/]+)/)?)?(.+)\'\n """'], {}), '(\n """ executionCount:\n tags:\n - status\n\n change_tags:\n - from: agent\n to: [provider, account, region, agent]\n type: [STRING, STRING, STRING, STRING]\n extract_regex: \'([^/]+)/(?:([^/]+)/(?:([^/]+)/)?)?(.+)\'\n """\n )\n', (19859, 20204), False, 'import textwrap\n'), ((22420, 22638), 'textwrap.dedent', 'textwrap.dedent', (['""" controller.invocations:\n tags:\n - controller\n - method\n - status\n # "statistic" is implicit\n """'], {}), '(\n """ controller.invocations:\n tags:\n - controller\n - method\n - status\n # "statistic" is implicit\n """\n )\n', (22435, 22638), False, 'import textwrap\n'), ((25839, 26175), 'textwrap.dedent', 'textwrap.dedent', (['""" jvm.memory.used:\n tags:\n - id\n change_tags:\n - from: memtype\n to: heap\n type: BOOL\n compare_value: HEAP\n discard_tag_values:\n heap: "(?i)^false$"\n """'], {}), '(\n """ jvm.memory.used:\n tags:\n - id\n change_tags:\n - from: memtype\n to: heap\n type: BOOL\n compare_value: HEAP\n discard_tag_values:\n heap: "(?i)^false$"\n """\n )\n', (25854, 26175), False, 'import textwrap\n'), ((27403, 27718), 'textwrap.dedent', 'textwrap.dedent', (['""" executions.started:\n kind: Counter\n per_application: true\n change_tags:\n - from: source\n to: application\n type: STRING\n tags:\n - executionType\n """'], {}), '(\n """ executions.started:\n kind: Counter\n per_application: true\n change_tags:\n - from: source\n to: application\n type: STRING\n tags:\n - executionType\n """\n )\n', (27418, 27718), False, 'import textwrap\n')]
|
from datetime import datetime, timedelta
from mock import patch
from django import forms
from django.test import TestCase
from corehq.apps.domain.models import Domain
from corehq.apps.sso.forms import (
CreateIdentityProviderForm,
EditIdentityProviderAdminForm,
SSOEnterpriseSettingsForm,
TIME_FORMAT,
)
from corehq.apps.sso.models import (
IdentityProvider,
AuthenticatedEmailDomain,
UserExemptFromSingleSignOn,
)
from corehq.apps.sso.tests import generator
from corehq.apps.users.models import WebUser
class BaseSSOFormTest(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.account = generator.get_billing_account_for_idp()
cls.domain = Domain.get_or_create_with_name(
"vaultwax-001",
is_active=True
)
cls.accounting_admin = WebUser.create(
cls.domain.name, '<EMAIL>', '<PASSWORD>', None, None
)
@classmethod
def tearDownClass(cls):
cls.accounting_admin.delete(None)
cls.domain.delete()
cls.account.delete()
super().tearDownClass()
class TestCreateIdentityProviderForm(BaseSSOFormTest):
@classmethod
def tearDownClass(cls):
IdentityProvider.objects.all().delete()
super().tearDownClass()
def test_bad_slug_is_invalid(self):
"""
Ensure that a poorly formatted slug raises a ValidationError and the
CreateIdentityProviderForm does not validate.
"""
post_data = {
'owner': self.account.id,
'name': '<NAME>',
'slug': 'bad slug',
}
create_idp_form = CreateIdentityProviderForm(post_data)
create_idp_form.cleaned_data = post_data
with self.assertRaises(forms.ValidationError):
create_idp_form.clean_slug()
self.assertFalse(create_idp_form.is_valid())
def test_created_identity_provider(self):
"""
Ensure that a valid CreateIdentityProviderForm successfully creates an
IdentityProvider.
"""
post_data = {
'owner': self.account.id,
'name': 'Azure AD <NAME>',
'slug': 'vaultwax',
}
create_idp_form = CreateIdentityProviderForm(post_data)
self.assertTrue(create_idp_form.is_valid())
create_idp_form.create_identity_provider(self.accounting_admin)
idp = IdentityProvider.objects.get(owner=self.account)
self.assertEqual(idp.owner, self.account)
self.assertEqual(idp.slug, post_data['slug'])
self.assertEqual(idp.name, post_data['name'])
self.assertIsNotNone(idp.sp_cert_public)
self.assertIsNotNone(idp.sp_cert_private)
self.assertIsNotNone(idp.date_sp_cert_expiration)
self.assertEqual(idp.created_by, self.accounting_admin.username)
self.assertEqual(idp.last_modified_by, self.accounting_admin.username)
@patch('corehq.apps.sso.utils.url_helpers.get_dashboard_link', return_value='#')
class TestEditIdentityProviderAdminForm(BaseSSOFormTest):
def setUp(self):
super().setUp()
self.idp = IdentityProvider.objects.create(
owner=self.account,
name='Azure AD for Vault Wax',
slug='vaultwax',
created_by='<EMAIL>',
last_modified_by='<EMAIL>',
)
self.idp.create_service_provider_certificate()
def tearDown(self):
UserExemptFromSingleSignOn.objects.all().delete()
AuthenticatedEmailDomain.objects.all().delete()
IdentityProvider.objects.all().delete()
super().tearDown()
def _fulfill_all_active_requirements(self, except_entity_id=False, except_login_url=False,
except_logout_url=False, except_certificate=False,
except_certificate_date=False):
self.idp.entity_id = None if except_entity_id else 'https://test.org/metadata'
self.idp.login_url = None if except_login_url else 'https://test.org/sls'
self.idp.logout_url = None if except_logout_url else 'https://test.org/slo'
self.idp.idp_cert_public = None if except_certificate else 'TEST CERTIFICATE'
self.idp.date_idp_cert_expiration = (None if except_certificate_date
else datetime.utcnow() + timedelta(days=30))
self.idp.save()
def _get_post_data(self, name=None, is_editable=False, is_active=False, slug=None):
return {
'name': name if name is not None else self.idp.name,
'is_editable': is_editable,
'is_active': is_active,
'slug': slug or self.idp.slug,
}
def test_bad_slug_update_is_invalid(self, *args):
"""
Ensure that if passed a bad slug, EditIdentityProviderAdminForm raises
a ValidationError and does not validate.
"""
post_data = self._get_post_data(slug='bad slug')
edit_idp_form = EditIdentityProviderAdminForm(self.idp, post_data)
edit_idp_form.cleaned_data = post_data
with self.assertRaises(forms.ValidationError):
edit_idp_form.clean_slug()
self.assertFalse(edit_idp_form.is_valid())
def test_slug_update_conflict(self, *args):
"""
Ensure that if another IdentityProvider exists with the same slug,
EditIdentityProviderAdminForm raises a ValidationError and does not
validate.
"""
second_idp = IdentityProvider.objects.create(
owner=self.account,
name='Azure AD for VWX',
slug='vwx',
created_by='<EMAIL>',
last_modified_by='<EMAIL>',
)
post_data = self._get_post_data(slug=second_idp.slug)
edit_idp_form = EditIdentityProviderAdminForm(self.idp, post_data)
edit_idp_form.cleaned_data = post_data
with self.assertRaises(forms.ValidationError):
edit_idp_form.clean_slug()
self.assertFalse(edit_idp_form.is_valid())
def test_slug_and_last_modified_by_updates(self, *args):
"""
Ensure that the `slug` and `last_modified_by` fields properly update
when EditIdentityProviderAdminForm validates and calls
update_identity_provider().
"""
post_data = self._get_post_data(slug='vaultwax-2')
edit_idp_form = EditIdentityProviderAdminForm(self.idp, post_data)
self.assertTrue(edit_idp_form.is_valid())
edit_idp_form.update_identity_provider(self.accounting_admin)
idp = IdentityProvider.objects.get(id=self.idp.id)
self.assertEqual(idp.slug, post_data['slug'])
self.assertEqual(idp.last_modified_by, self.accounting_admin.username)
self.assertNotEqual(idp.created_by, self.accounting_admin.username)
def test_name_updates_and_is_required(self, *args):
"""
Ensure that the `name` field is both required and updates the name
of the IdentityProvider when EditIdentityProviderAdminForm
validates and update_identity_provider() is called.
"""
bad_post_data = self._get_post_data(name='')
bad_edit_idp_form = EditIdentityProviderAdminForm(self.idp, bad_post_data)
self.assertFalse(bad_edit_idp_form.is_valid())
post_data = self._get_post_data(name='new name test')
edit_idp_form = EditIdentityProviderAdminForm(self.idp, post_data)
self.assertTrue(edit_idp_form.is_valid())
edit_idp_form.update_identity_provider(self.accounting_admin)
idp = IdentityProvider.objects.get(id=self.idp.id)
self.assertEqual(idp.name, post_data['name'])
def test_is_editable_has_met_requirements_and_value_updates(self, *args):
"""
Ensure that the requirements for `is_editable` are met in order for
EditIdentityProviderAdminForm to validate and that once it is valid,
calling update_identity_provider() updates the `is_editable` field on
the IdentityProvider as expected.
"""
post_data = self._get_post_data(is_editable=True)
edit_idp_form = EditIdentityProviderAdminForm(self.idp, post_data)
edit_idp_form.cleaned_data = post_data
with self.assertRaises(forms.ValidationError):
edit_idp_form.clean_is_editable()
email_domain = AuthenticatedEmailDomain.objects.create(
identity_provider=self.idp,
email_domain='vaultwax.com',
)
with self.assertRaises(forms.ValidationError):
edit_idp_form.clean_is_editable()
UserExemptFromSingleSignOn.objects.create(
username='<EMAIL>',
email_domain=email_domain,
)
self.assertTrue(edit_idp_form.is_valid())
edit_idp_form.update_identity_provider(self.accounting_admin)
idp = IdentityProvider.objects.get(id=self.idp.id)
self.assertTrue(idp.is_editable)
def test_is_active_has_met_requirements_and_value_updates(self, *args):
"""
Ensure that the requirements for `is_active` are met in order for
EditIdentityProviderAdminForm to validate and that once it is valid,
calling update_identity_provider() updates the `is_active` field on
the IdentityProvider as expected.
"""
post_data = self._get_post_data(is_active=True)
edit_idp_form = EditIdentityProviderAdminForm(self.idp, post_data)
edit_idp_form.cleaned_data = post_data
with self.assertRaises(forms.ValidationError):
edit_idp_form.clean_is_active()
email_domain = AuthenticatedEmailDomain.objects.create(
identity_provider=self.idp,
email_domain='vaultwax.com',
)
with self.assertRaises(forms.ValidationError):
edit_idp_form.clean_is_active()
UserExemptFromSingleSignOn.objects.create(
username='<EMAIL>',
email_domain=email_domain,
)
with self.assertRaises(forms.ValidationError):
edit_idp_form.clean_is_active()
self._fulfill_all_active_requirements(except_entity_id=True)
with self.assertRaises(forms.ValidationError):
edit_idp_form.clean_is_active()
self._fulfill_all_active_requirements(except_login_url=True)
with self.assertRaises(forms.ValidationError):
edit_idp_form.clean_is_active()
self._fulfill_all_active_requirements(except_logout_url=True)
with self.assertRaises(forms.ValidationError):
edit_idp_form.clean_is_active()
self._fulfill_all_active_requirements(except_certificate=True)
with self.assertRaises(forms.ValidationError):
edit_idp_form.clean_is_active()
self._fulfill_all_active_requirements(except_certificate_date=True)
with self.assertRaises(forms.ValidationError):
edit_idp_form.clean_is_active()
self._fulfill_all_active_requirements()
self.assertTrue(edit_idp_form.is_valid())
edit_idp_form.update_identity_provider(self.accounting_admin)
idp = IdentityProvider.objects.get(id=self.idp.id)
self.assertTrue(idp.is_active)
class TestSSOEnterpriseSettingsForm(BaseSSOFormTest):
def setUp(self):
super().setUp()
self.idp = IdentityProvider.objects.create(
owner=self.account,
name='Azure AD for Vault Wax',
slug='vaultwax',
created_by='<EMAIL>',
last_modified_by='<EMAIL>',
)
self.idp.is_editable = True
self.idp.create_service_provider_certificate()
def tearDown(self):
UserExemptFromSingleSignOn.objects.all().delete()
AuthenticatedEmailDomain.objects.all().delete()
IdentityProvider.objects.all().delete()
super().tearDown()
@staticmethod
def _get_post_data(no_entity_id=False, no_login_url=False, no_logout_url=False,
no_certificate=False, no_certificate_date=False, is_active=False):
expiration_date = datetime.utcnow() + timedelta(days=30)
return {
'is_active': is_active,
'entity_id': '' if no_entity_id else 'https://test.org/metadata',
'login_url': '' if no_login_url else 'https://test.org/sls',
'logout_url': '' if no_logout_url else 'https://test.org/slo',
'idp_cert_public': '' if no_certificate else 'TEST CERTIFICATE',
'date_idp_cert_expiration': ('' if no_certificate_date
else expiration_date.strftime(TIME_FORMAT)),
}
def test_is_active_triggers_required_fields_and_updates(self):
"""
Test that if `is_active` is set to true, then related required fields
raise ValidationErrors if left blank. Once the requirements are met and
SSOEnterpriseSettingsForm validates, ensure that
update_identity_provider() updates the `is_active` field on
the IdentityProvider as expected.
"""
post_data = self._get_post_data(
is_active=True, no_entity_id=True, no_login_url=True,
no_logout_url=True, no_certificate=True, no_certificate_date=True
)
edit_sso_idp_form = SSOEnterpriseSettingsForm(self.idp, post_data)
edit_sso_idp_form.cleaned_data = post_data
with self.assertRaises(forms.ValidationError):
edit_sso_idp_form.clean_is_active()
email_domain = AuthenticatedEmailDomain.objects.create(
identity_provider=self.idp,
email_domain='vaultwax.com',
)
with self.assertRaises(forms.ValidationError):
edit_sso_idp_form.clean_is_active()
UserExemptFromSingleSignOn.objects.create(
username='<EMAIL>',
email_domain=email_domain,
)
# should not raise exception now
edit_sso_idp_form.clean_is_active()
with self.assertRaises(forms.ValidationError):
edit_sso_idp_form.clean_entity_id()
with self.assertRaises(forms.ValidationError):
edit_sso_idp_form.clean_login_url()
with self.assertRaises(forms.ValidationError):
edit_sso_idp_form.clean_logout_url()
with self.assertRaises(forms.ValidationError):
edit_sso_idp_form.clean_idp_cert_public()
with self.assertRaises(forms.ValidationError):
edit_sso_idp_form.clean_date_idp_cert_expiration()
self.assertFalse(edit_sso_idp_form.is_valid())
corrected_post_data = self._get_post_data(is_active=True)
corrected_edit_sso_idp_form = SSOEnterpriseSettingsForm(self.idp, corrected_post_data)
self.assertTrue(corrected_edit_sso_idp_form.is_valid())
corrected_edit_sso_idp_form.update_identity_provider(self.accounting_admin)
idp = IdentityProvider.objects.get(id=self.idp.id)
self.assertTrue(idp.is_editable)
self.assertTrue(idp.is_active)
self.assertEqual(idp.entity_id, corrected_post_data['entity_id'])
self.assertEqual(idp.login_url, corrected_post_data['login_url'])
self.assertEqual(idp.logout_url, corrected_post_data['logout_url'])
self.assertEqual(idp.idp_cert_public, corrected_post_data['idp_cert_public'])
self.assertEqual(
idp.date_idp_cert_expiration.strftime(TIME_FORMAT),
corrected_post_data['date_idp_cert_expiration']
)
def test_last_modified_by_and_fields_update_when_not_active(self):
"""
Ensure that fields properly update and that `last_modified_by` updates
as expected when SSOEnterpriseSettingsForm validates and
update_identity_provider() is called.
"""
email_domain = AuthenticatedEmailDomain.objects.create(
identity_provider=self.idp,
email_domain='vaultwax.com',
)
UserExemptFromSingleSignOn.objects.create(
username='b<EMAIL>',
email_domain=email_domain,
)
post_data = self._get_post_data()
edit_sso_idp_form = SSOEnterpriseSettingsForm(self.idp, post_data)
self.assertTrue(edit_sso_idp_form.is_valid())
edit_sso_idp_form.update_identity_provider(self.accounting_admin)
idp = IdentityProvider.objects.get(id=self.idp.id)
self.assertTrue(idp.is_editable)
self.assertFalse(idp.is_active)
self.assertEqual(idp.last_modified_by, self.accounting_admin.username)
self.assertNotEqual(idp.created_by, self.accounting_admin.username)
self.assertEqual(idp.entity_id, post_data['entity_id'])
self.assertEqual(idp.login_url, post_data['login_url'])
self.assertEqual(idp.logout_url, post_data['logout_url'])
self.assertEqual(idp.idp_cert_public, post_data['idp_cert_public'])
self.assertEqual(
idp.date_idp_cert_expiration.strftime(TIME_FORMAT),
post_data['date_idp_cert_expiration']
)
def test_date_idp_cert_expiration_with_bad_value(self):
"""
Ensure that SSOEnterpriseSettingsForm raises a ValidationError if
`date_idp_cert_expiration` is provided with a incorrectly formatted date
string.
"""
post_data = {
'is_active': self.idp.is_active,
'entity_id': self.idp.entity_id,
'login_url': self.idp.login_url,
'logout_url': self.idp.logout_url,
'idp_cert_public': self.idp.idp_cert_public,
'date_idp_cert_expiration': 'purposefully bad date string',
}
edit_sso_idp_form = SSOEnterpriseSettingsForm(self.idp, post_data)
edit_sso_idp_form.cleaned_data = post_data
with self.assertRaises(forms.ValidationError):
edit_sso_idp_form.clean_date_idp_cert_expiration()
|
[
"corehq.apps.sso.models.AuthenticatedEmailDomain.objects.create",
"corehq.apps.sso.models.IdentityProvider.objects.get",
"corehq.apps.sso.models.UserExemptFromSingleSignOn.objects.create",
"corehq.apps.sso.forms.EditIdentityProviderAdminForm",
"corehq.apps.sso.models.AuthenticatedEmailDomain.objects.all",
"corehq.apps.users.models.WebUser.create",
"mock.patch",
"corehq.apps.sso.models.IdentityProvider.objects.all",
"corehq.apps.sso.forms.CreateIdentityProviderForm",
"datetime.datetime.utcnow",
"corehq.apps.sso.models.UserExemptFromSingleSignOn.objects.all",
"corehq.apps.domain.models.Domain.get_or_create_with_name",
"datetime.timedelta",
"corehq.apps.sso.tests.generator.get_billing_account_for_idp",
"corehq.apps.sso.forms.SSOEnterpriseSettingsForm",
"corehq.apps.sso.models.IdentityProvider.objects.create"
] |
[((2932, 3011), 'mock.patch', 'patch', (['"""corehq.apps.sso.utils.url_helpers.get_dashboard_link"""'], {'return_value': '"""#"""'}), "('corehq.apps.sso.utils.url_helpers.get_dashboard_link', return_value='#')\n", (2937, 3011), False, 'from mock import patch\n'), ((663, 702), 'corehq.apps.sso.tests.generator.get_billing_account_for_idp', 'generator.get_billing_account_for_idp', ([], {}), '()\n', (700, 702), False, 'from corehq.apps.sso.tests import generator\n'), ((724, 786), 'corehq.apps.domain.models.Domain.get_or_create_with_name', 'Domain.get_or_create_with_name', (['"""vaultwax-001"""'], {'is_active': '(True)'}), "('vaultwax-001', is_active=True)\n", (754, 786), False, 'from corehq.apps.domain.models import Domain\n'), ((852, 920), 'corehq.apps.users.models.WebUser.create', 'WebUser.create', (['cls.domain.name', '"""<EMAIL>"""', '"""<PASSWORD>"""', 'None', 'None'], {}), "(cls.domain.name, '<EMAIL>', '<PASSWORD>', None, None)\n", (866, 920), False, 'from corehq.apps.users.models import WebUser\n'), ((1657, 1694), 'corehq.apps.sso.forms.CreateIdentityProviderForm', 'CreateIdentityProviderForm', (['post_data'], {}), '(post_data)\n', (1683, 1694), False, 'from corehq.apps.sso.forms import CreateIdentityProviderForm, EditIdentityProviderAdminForm, SSOEnterpriseSettingsForm, TIME_FORMAT\n'), ((2236, 2273), 'corehq.apps.sso.forms.CreateIdentityProviderForm', 'CreateIdentityProviderForm', (['post_data'], {}), '(post_data)\n', (2262, 2273), False, 'from corehq.apps.sso.forms import CreateIdentityProviderForm, EditIdentityProviderAdminForm, SSOEnterpriseSettingsForm, TIME_FORMAT\n'), ((2413, 2461), 'corehq.apps.sso.models.IdentityProvider.objects.get', 'IdentityProvider.objects.get', ([], {'owner': 'self.account'}), '(owner=self.account)\n', (2441, 2461), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((3135, 3293), 'corehq.apps.sso.models.IdentityProvider.objects.create', 'IdentityProvider.objects.create', ([], {'owner': 'self.account', 'name': '"""Azure AD for Vault Wax"""', 'slug': '"""vaultwax"""', 'created_by': '"""<EMAIL>"""', 'last_modified_by': '"""<EMAIL>"""'}), "(owner=self.account, name=\n 'Azure AD for Vault Wax', slug='vaultwax', created_by='<EMAIL>',\n last_modified_by='<EMAIL>')\n", (3166, 3293), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((5004, 5054), 'corehq.apps.sso.forms.EditIdentityProviderAdminForm', 'EditIdentityProviderAdminForm', (['self.idp', 'post_data'], {}), '(self.idp, post_data)\n', (5033, 5054), False, 'from corehq.apps.sso.forms import CreateIdentityProviderForm, EditIdentityProviderAdminForm, SSOEnterpriseSettingsForm, TIME_FORMAT\n'), ((5510, 5652), 'corehq.apps.sso.models.IdentityProvider.objects.create', 'IdentityProvider.objects.create', ([], {'owner': 'self.account', 'name': '"""Azure AD for VWX"""', 'slug': '"""vwx"""', 'created_by': '"""<EMAIL>"""', 'last_modified_by': '"""<EMAIL>"""'}), "(owner=self.account, name='Azure AD for VWX',\n slug='vwx', created_by='<EMAIL>', last_modified_by='<EMAIL>')\n", (5541, 5652), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((5806, 5856), 'corehq.apps.sso.forms.EditIdentityProviderAdminForm', 'EditIdentityProviderAdminForm', (['self.idp', 'post_data'], {}), '(self.idp, post_data)\n', (5835, 5856), False, 'from corehq.apps.sso.forms import CreateIdentityProviderForm, EditIdentityProviderAdminForm, SSOEnterpriseSettingsForm, TIME_FORMAT\n'), ((6394, 6444), 'corehq.apps.sso.forms.EditIdentityProviderAdminForm', 'EditIdentityProviderAdminForm', (['self.idp', 'post_data'], {}), '(self.idp, post_data)\n', (6423, 6444), False, 'from corehq.apps.sso.forms import CreateIdentityProviderForm, EditIdentityProviderAdminForm, SSOEnterpriseSettingsForm, TIME_FORMAT\n'), ((6580, 6624), 'corehq.apps.sso.models.IdentityProvider.objects.get', 'IdentityProvider.objects.get', ([], {'id': 'self.idp.id'}), '(id=self.idp.id)\n', (6608, 6624), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((7198, 7252), 'corehq.apps.sso.forms.EditIdentityProviderAdminForm', 'EditIdentityProviderAdminForm', (['self.idp', 'bad_post_data'], {}), '(self.idp, bad_post_data)\n', (7227, 7252), False, 'from corehq.apps.sso.forms import CreateIdentityProviderForm, EditIdentityProviderAdminForm, SSOEnterpriseSettingsForm, TIME_FORMAT\n'), ((7395, 7445), 'corehq.apps.sso.forms.EditIdentityProviderAdminForm', 'EditIdentityProviderAdminForm', (['self.idp', 'post_data'], {}), '(self.idp, post_data)\n', (7424, 7445), False, 'from corehq.apps.sso.forms import CreateIdentityProviderForm, EditIdentityProviderAdminForm, SSOEnterpriseSettingsForm, TIME_FORMAT\n'), ((7581, 7625), 'corehq.apps.sso.models.IdentityProvider.objects.get', 'IdentityProvider.objects.get', ([], {'id': 'self.idp.id'}), '(id=self.idp.id)\n', (7609, 7625), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((8138, 8188), 'corehq.apps.sso.forms.EditIdentityProviderAdminForm', 'EditIdentityProviderAdminForm', (['self.idp', 'post_data'], {}), '(self.idp, post_data)\n', (8167, 8188), False, 'from corehq.apps.sso.forms import CreateIdentityProviderForm, EditIdentityProviderAdminForm, SSOEnterpriseSettingsForm, TIME_FORMAT\n'), ((8361, 8461), 'corehq.apps.sso.models.AuthenticatedEmailDomain.objects.create', 'AuthenticatedEmailDomain.objects.create', ([], {'identity_provider': 'self.idp', 'email_domain': '"""vaultwax.com"""'}), "(identity_provider=self.idp,\n email_domain='vaultwax.com')\n", (8400, 8461), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((8603, 8696), 'corehq.apps.sso.models.UserExemptFromSingleSignOn.objects.create', 'UserExemptFromSingleSignOn.objects.create', ([], {'username': '"""<EMAIL>"""', 'email_domain': 'email_domain'}), "(username='<EMAIL>', email_domain=\n email_domain)\n", (8644, 8696), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((8862, 8906), 'corehq.apps.sso.models.IdentityProvider.objects.get', 'IdentityProvider.objects.get', ([], {'id': 'self.idp.id'}), '(id=self.idp.id)\n', (8890, 8906), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((9398, 9448), 'corehq.apps.sso.forms.EditIdentityProviderAdminForm', 'EditIdentityProviderAdminForm', (['self.idp', 'post_data'], {}), '(self.idp, post_data)\n', (9427, 9448), False, 'from corehq.apps.sso.forms import CreateIdentityProviderForm, EditIdentityProviderAdminForm, SSOEnterpriseSettingsForm, TIME_FORMAT\n'), ((9620, 9720), 'corehq.apps.sso.models.AuthenticatedEmailDomain.objects.create', 'AuthenticatedEmailDomain.objects.create', ([], {'identity_provider': 'self.idp', 'email_domain': '"""vaultwax.com"""'}), "(identity_provider=self.idp,\n email_domain='vaultwax.com')\n", (9659, 9720), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((9860, 9953), 'corehq.apps.sso.models.UserExemptFromSingleSignOn.objects.create', 'UserExemptFromSingleSignOn.objects.create', ([], {'username': '"""<EMAIL>"""', 'email_domain': 'email_domain'}), "(username='<EMAIL>', email_domain=\n email_domain)\n", (9901, 9953), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((11122, 11166), 'corehq.apps.sso.models.IdentityProvider.objects.get', 'IdentityProvider.objects.get', ([], {'id': 'self.idp.id'}), '(id=self.idp.id)\n', (11150, 11166), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((11327, 11485), 'corehq.apps.sso.models.IdentityProvider.objects.create', 'IdentityProvider.objects.create', ([], {'owner': 'self.account', 'name': '"""Azure AD for Vault Wax"""', 'slug': '"""vaultwax"""', 'created_by': '"""<EMAIL>"""', 'last_modified_by': '"""<EMAIL>"""'}), "(owner=self.account, name=\n 'Azure AD for Vault Wax', slug='vaultwax', created_by='<EMAIL>',\n last_modified_by='<EMAIL>')\n", (11358, 11485), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((13270, 13316), 'corehq.apps.sso.forms.SSOEnterpriseSettingsForm', 'SSOEnterpriseSettingsForm', (['self.idp', 'post_data'], {}), '(self.idp, post_data)\n', (13295, 13316), False, 'from corehq.apps.sso.forms import CreateIdentityProviderForm, EditIdentityProviderAdminForm, SSOEnterpriseSettingsForm, TIME_FORMAT\n'), ((13496, 13596), 'corehq.apps.sso.models.AuthenticatedEmailDomain.objects.create', 'AuthenticatedEmailDomain.objects.create', ([], {'identity_provider': 'self.idp', 'email_domain': '"""vaultwax.com"""'}), "(identity_provider=self.idp,\n email_domain='vaultwax.com')\n", (13535, 13596), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((13740, 13833), 'corehq.apps.sso.models.UserExemptFromSingleSignOn.objects.create', 'UserExemptFromSingleSignOn.objects.create', ([], {'username': '"""<EMAIL>"""', 'email_domain': 'email_domain'}), "(username='<EMAIL>', email_domain=\n email_domain)\n", (13781, 13833), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((14647, 14703), 'corehq.apps.sso.forms.SSOEnterpriseSettingsForm', 'SSOEnterpriseSettingsForm', (['self.idp', 'corrected_post_data'], {}), '(self.idp, corrected_post_data)\n', (14672, 14703), False, 'from corehq.apps.sso.forms import CreateIdentityProviderForm, EditIdentityProviderAdminForm, SSOEnterpriseSettingsForm, TIME_FORMAT\n'), ((14867, 14911), 'corehq.apps.sso.models.IdentityProvider.objects.get', 'IdentityProvider.objects.get', ([], {'id': 'self.idp.id'}), '(id=self.idp.id)\n', (14895, 14911), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((15771, 15871), 'corehq.apps.sso.models.AuthenticatedEmailDomain.objects.create', 'AuthenticatedEmailDomain.objects.create', ([], {'identity_provider': 'self.idp', 'email_domain': '"""vaultwax.com"""'}), "(identity_provider=self.idp,\n email_domain='vaultwax.com')\n", (15810, 15871), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((15911, 16005), 'corehq.apps.sso.models.UserExemptFromSingleSignOn.objects.create', 'UserExemptFromSingleSignOn.objects.create', ([], {'username': '"""b<EMAIL>"""', 'email_domain': 'email_domain'}), "(username='b<EMAIL>', email_domain\n =email_domain)\n", (15952, 16005), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((16106, 16152), 'corehq.apps.sso.forms.SSOEnterpriseSettingsForm', 'SSOEnterpriseSettingsForm', (['self.idp', 'post_data'], {}), '(self.idp, post_data)\n', (16131, 16152), False, 'from corehq.apps.sso.forms import CreateIdentityProviderForm, EditIdentityProviderAdminForm, SSOEnterpriseSettingsForm, TIME_FORMAT\n'), ((16296, 16340), 'corehq.apps.sso.models.IdentityProvider.objects.get', 'IdentityProvider.objects.get', ([], {'id': 'self.idp.id'}), '(id=self.idp.id)\n', (16324, 16340), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((17624, 17670), 'corehq.apps.sso.forms.SSOEnterpriseSettingsForm', 'SSOEnterpriseSettingsForm', (['self.idp', 'post_data'], {}), '(self.idp, post_data)\n', (17649, 17670), False, 'from corehq.apps.sso.forms import CreateIdentityProviderForm, EditIdentityProviderAdminForm, SSOEnterpriseSettingsForm, TIME_FORMAT\n'), ((12072, 12089), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (12087, 12089), False, 'from datetime import datetime, timedelta\n'), ((12092, 12110), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (12101, 12110), False, 'from datetime import datetime, timedelta\n'), ((1231, 1261), 'corehq.apps.sso.models.IdentityProvider.objects.all', 'IdentityProvider.objects.all', ([], {}), '()\n', (1259, 1261), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((3444, 3484), 'corehq.apps.sso.models.UserExemptFromSingleSignOn.objects.all', 'UserExemptFromSingleSignOn.objects.all', ([], {}), '()\n', (3482, 3484), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((3502, 3540), 'corehq.apps.sso.models.AuthenticatedEmailDomain.objects.all', 'AuthenticatedEmailDomain.objects.all', ([], {}), '()\n', (3538, 3540), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((3558, 3588), 'corehq.apps.sso.models.IdentityProvider.objects.all', 'IdentityProvider.objects.all', ([], {}), '()\n', (3586, 3588), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((4352, 4369), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (4367, 4369), False, 'from datetime import datetime, timedelta\n'), ((4372, 4390), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (4381, 4390), False, 'from datetime import datetime, timedelta\n'), ((11672, 11712), 'corehq.apps.sso.models.UserExemptFromSingleSignOn.objects.all', 'UserExemptFromSingleSignOn.objects.all', ([], {}), '()\n', (11710, 11712), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((11730, 11768), 'corehq.apps.sso.models.AuthenticatedEmailDomain.objects.all', 'AuthenticatedEmailDomain.objects.all', ([], {}), '()\n', (11766, 11768), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n'), ((11786, 11816), 'corehq.apps.sso.models.IdentityProvider.objects.all', 'IdentityProvider.objects.all', ([], {}), '()\n', (11814, 11816), False, 'from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain, UserExemptFromSingleSignOn\n')]
|
# -*- coding: utf-8 -*-
# Command line arguments
import argparse
from urllib.parse import parse_qs
from http.cookies import SimpleCookie
from MatchX.utils.user_agents import headers
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--url', help='Target URL.')
parser.add_argument('-l', '--list', help='List of URLs.')
matching_grp = parser.add_argument_group('Matching arguments')
matching_grp.add_argument('-mw', '--match-words', help='Provide a list of words or strings that you want to find in the response (Example: <script>...</script>, token=*, key=*).')
matching_grp.add_argument('-mr', '--match-regex', help='Provide a list of regex patterns that you want to find in the response (Example: root:.*:0:0).')
request_grp = parser.add_argument_group('Request arguments')
request_grp.add_argument('-m','--method', help='Request method (GET/POST), example: --method POST. Default is GET.', default="GET")
request_grp.add_argument('-d','--data', help='POST data (Example: --data "squery=google&data=hacked").')
request_grp.add_argument('--user-agent', help='Specify User agent (Example: Mozilla/5.0 (X11; Linux i586; rv:63.0) Gecko/20100101 Firefox/63.0).', default=headers)
request_grp.add_argument('--timeout', help='Connection timeout, default is 30.', default=30)
request_grp.add_argument('--cookies', help='Specify cookies if required (Example: --cookies "PASS=TEST; hack=hack").')
request_grp.add_argument('--verify', help='Verify SSL cert. Default is false.', default=False, action='store_true')
other_grp = parser.add_argument_group('Other arguments')
other_grp.add_argument('-t', '--threads', help='Number of concurrent threads, default is 50.',default=50, type=int)
other_grp.add_argument('-v', '--verbose', help='Verbose output', action='store_true')
output_grp = parser.add_argument_group('Output arguments')
output_grp.add_argument('-o', '--output', help='Write json output, default is output.json.', default='output.json')
args = parser.parse_args()
url = args.url
urls = args.list
match_words = args.match_words
match_regex = args.match_regex
user_agent = args.user_agent
timeout = args.timeout
verify = args.verify
threads = args.threads
output = args.output
cookies = args.cookies
verbose = args.verbose
method = args.method
data = args.data
if data:
result = parse_qs(data, strict_parsing=True)
for key in result:
if len(result[key]) == 1:
result[key] = result[key][0]
data = result
if cookies:
raw_data = cookies
cookie = SimpleCookie()
cookie.load(raw_data)
cookies = {}
for key, morsel in cookie.items():
cookies[key] = morsel.value
else:
pass
|
[
"http.cookies.SimpleCookie",
"argparse.ArgumentParser",
"urllib.parse.parse_qs"
] |
[((194, 219), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (217, 219), False, 'import argparse\n'), ((2303, 2338), 'urllib.parse.parse_qs', 'parse_qs', (['data'], {'strict_parsing': '(True)'}), '(data, strict_parsing=True)\n', (2311, 2338), False, 'from urllib.parse import parse_qs\n'), ((2512, 2526), 'http.cookies.SimpleCookie', 'SimpleCookie', ([], {}), '()\n', (2524, 2526), False, 'from http.cookies import SimpleCookie\n')]
|
import sys
import functools
import contextlib
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
from unittest import mock
except ImportError:
import mock
try:
import pymysql
pymysql.install_as_MySQLdb()
except ImportError:
pass
from architect.commands import main
@contextlib.contextmanager
def capture():
out, err, sys.stderr, sys.stdout = sys.stdout, sys.stderr, StringIO(), StringIO()
try:
main()
except SystemExit:
pass
sys.stderr.seek(0)
sys.stdout.seek(0)
yield sys.stdout.read().strip(), sys.stderr.read().strip()
sys.stdout = out
sys.stderr = err
# For some reason unittest's skip decorator doesn't allow
# to skip a class which is very annoying. This one does.
def skip(reason):
def decorator(test_item):
@functools.wraps(test_item)
def skip_wrapper(*args, **kwargs):
raise unittest.SkipTest(reason)
test_item = skip_wrapper
test_item.__unittest_skip__ = True
test_item.__unittest_skip_why__ = reason
return test_item
return decorator
unittest.case.skip = skip
|
[
"io.StringIO",
"sys.stdout.seek",
"unittest.SkipTest",
"sys.stdout.read",
"sys.stderr.read",
"architect.commands.main",
"pymysql.install_as_MySQLdb",
"sys.stderr.seek",
"functools.wraps"
] |
[((315, 343), 'pymysql.install_as_MySQLdb', 'pymysql.install_as_MySQLdb', ([], {}), '()\n', (341, 343), False, 'import pymysql\n'), ((606, 624), 'sys.stderr.seek', 'sys.stderr.seek', (['(0)'], {}), '(0)\n', (621, 624), False, 'import sys\n'), ((629, 647), 'sys.stdout.seek', 'sys.stdout.seek', (['(0)'], {}), '(0)\n', (644, 647), False, 'import sys\n'), ((517, 527), 'io.StringIO', 'StringIO', ([], {}), '()\n', (525, 527), False, 'from io import StringIO\n'), ((529, 539), 'io.StringIO', 'StringIO', ([], {}), '()\n', (537, 539), False, 'from io import StringIO\n'), ((558, 564), 'architect.commands.main', 'main', ([], {}), '()\n', (562, 564), False, 'from architect.commands import main\n'), ((929, 955), 'functools.wraps', 'functools.wraps', (['test_item'], {}), '(test_item)\n', (944, 955), False, 'import functools\n'), ((1017, 1042), 'unittest.SkipTest', 'unittest.SkipTest', (['reason'], {}), '(reason)\n', (1034, 1042), False, 'import unittest\n'), ((659, 676), 'sys.stdout.read', 'sys.stdout.read', ([], {}), '()\n', (674, 676), False, 'import sys\n'), ((686, 703), 'sys.stderr.read', 'sys.stderr.read', ([], {}), '()\n', (701, 703), False, 'import sys\n')]
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from tqdm.auto import tqdm
from typing import Dict, Any
class Conv2dBlock(nn.Module):
norm_map = {
"none": nn.Identity,
"batch": nn.BatchNorm2d,
"instance": nn.InstanceNorm2d,
}
activation_map = {
"none": nn.Identity,
"relu": nn.ReLU,
}
def __init__(
self,
in_channels,
out_channels,
kernel_size,
cfg,
norm="batch",
activation="relu",
):
super().__init__()
conv_cfg = {} if cfg.get("conv", None) is None else cfg["conv"]
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, **conv_cfg)
assert (
norm in Conv2dBlock.norm_map.keys()
), "Chosen normalization method is not implemented."
norm_cfg = {} if cfg.get("norm", None) is None else cfg["norm"]
self.norm = Conv2dBlock.norm_map[norm](out_channels, **norm_cfg)
assert (
activation in Conv2dBlock.activation_map.keys()
), "Chosen activation method is not implemented."
activation_cfg = (
{} if cfg.get("activation", None) is None else cfg["activation"]
)
self.activation = Conv2dBlock.activation_map[activation](**activation_cfg)
def forward(self, x):
return self.activation(self.norm(self.conv(x)))
class RecurrentBlock(nn.Module):
def __init__(self, channels, t):
super().__init__()
cfg = {
"conv": {"padding": 1,},
"activation": {"inplace": True,},
}
self.t = t
self.conv_f = Conv2dBlock(channels, channels, kernel_size=3, cfg=cfg)
self.conv_r = Conv2dBlock(channels, channels, kernel_size=3, cfg=cfg)
def forward(self, x):
r = self.conv_f(x)
for _ in range(self.t):
x = self.conv_r(x) + r
return x
class RRCNNBlock(nn.Module):
def __init__(self, in_channels, out_channels, t):
super().__init__()
self.conv_1x1 = nn.Conv2d(
in_channels, out_channels, kernel_size=1, stride=1, padding=0
)
self.RCNN = nn.Sequential(
RecurrentBlock(out_channels, t), RecurrentBlock(out_channels, t)
)
def forward(self, x):
x1 = self.conv_1x1(x)
x2 = self.RCNN(x1)
return x1 + x2
class R2UNetEncoderBlock(nn.Module):
def __init__(self, inputs, outputs, t):
super().__init__()
self.conv = RRCNNBlock(inputs, outputs, t)
self.pool = nn.MaxPool2d(kernel_size=2)
def forward(self, x):
x = self.conv(x)
pool = self.pool(x)
return x, pool
class R2UNetDecoderBlock(nn.Module):
def __init__(
self,
inputs,
outputs,
t,
upsample_method="deconv",
sizematch_method="interpolate",
):
super().__init__()
assert upsample_method in ["deconv", "interpolate"]
if upsample_method == "deconv":
self.upsample = nn.ConvTranspose2d(inputs, outputs, kernel_size=2, stride=2)
elif upsample_method == "interpolate":
self.upsample = nn.Upsample(
scale_factor=2, mode="bilinear", align_corners=True
)
assert sizematch_method in ["interpolate", "pad"]
if sizematch_method == "interpolate":
self.sizematch = self.sizematch_interpolate
elif sizematch_method == "pad":
self.sizematch = self.sizematch_pad
self.conv = RRCNNBlock(inputs, outputs, t)
def sizematch_interpolate(self, source, target):
return F.interpolate(
source,
size=(target.size(2), target.size(3)),
mode="bilinear",
align_corners=True,
)
def sizematch_pad(self, source, target):
diffX = target.size()[3] - source.size()[3]
diffY = target.size()[2] - source.size()[2]
return F.pad(
source, (diffX // 2, diffX - diffX // 2, diffY // 2, diffX - diffY // 2)
)
def forward(self, x, x_copy):
x = self.upsample(x)
x = self.sizematch(x, x_copy)
x = torch.cat([x_copy, x], dim=1)
x = self.conv(x)
return x
class R2UNetMiddle(nn.Module):
def __init__(self, in_channels, out_channels, t):
super().__init__()
self.conv = RRCNNBlock(in_channels, out_channels, t)
def forward(self, x):
x = self.conv(x)
return x
class R2UNetEncoder(nn.Module):
def __init__(self, in_channels, depth, first_channels, t):
super().__init__()
levels = [R2UNetEncoderBlock(in_channels, first_channels, t)]
levels += [
R2UNetEncoderBlock(
first_channels * 2 ** i, first_channels * 2 ** (i + 1), t
)
for i in range(depth - 1)
]
self.depth = depth
self.levels = nn.ModuleList(levels)
self.features = []
def forward(self, x):
self.features = []
for i in range(self.depth):
ft, x = self.levels[i](x)
self.features.append(ft)
return x
def get_features(self):
return self.features[::-1]
class R2UNetDecoder(nn.Module):
def __init__(self, depth, first_channels, t):
super().__init__()
levels = [
R2UNetDecoderBlock(
first_channels // 2 ** i, first_channels // 2 ** (i + 1), t
)
for i in range(depth)
]
self.depth = depth
self.levels = nn.ModuleList(levels)
def forward(self, x, concats):
for level, x_copy in zip(self.levels, concats):
x = level(x, x_copy)
return x
def get_prediction(self, adict: Dict[str, Any], device: torch.device):
inputs = adict["inputs"].to(device)
outputs = self.forward(inputs)
if self.num_classes == 1:
thresh = adict["thresh"]
predicts = (outputs > thresh).float()
else:
predicts = torch.argmax(outputs, dim=1)
predicts = predicts.detach().cpu().squeeze().numpy()
return {"masks": predicts}
class R2UNet(nn.Module):
def __init__(self, num_classes, in_channels, depth=4, t=2, **kwargs):
super().__init__()
self.num_classes = num_classes
self.encoder = R2UNetEncoder(in_channels, depth, 64, t)
self.middle_conv = R2UNetMiddle(64 * 2 ** (depth - 1), 64 * 2 ** depth, t)
self.decoder = R2UNetDecoder(depth, 64 * 2 ** depth, t)
self.final_conv = nn.Conv2d(64, num_classes, kernel_size=1)
def forward(self, x):
x = self.encoder(x)
features = self.encoder.get_features()
mid = self.middle_conv(x)
x = self.decoder(mid, features)
x = self.final_conv(x)
return x
if __name__ == "__main__":
dev = torch.device("cpu")
net = R2UNet(num_classes=2, in_channels=3, depth=4, t=2).to(dev)
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.SGD(net.parameters(), lr=0.001)
tbar = tqdm(range(10))
for i in tbar:
inps = torch.rand(4, 3, 100, 100).to(dev)
lbls = torch.randint(low=0, high=2, size=(4, 100, 100)).to(dev)
outs = net(inps)
loss = criterion(outs, lbls)
loss.backward()
optimizer.step()
tbar.set_description_str(f"{i}: {loss.item()}")
|
[
"torch.randint",
"torch.nn.ConvTranspose2d",
"torch.rand",
"torch.nn.ModuleList",
"torch.argmax",
"torch.nn.Conv2d",
"torch.nn.CrossEntropyLoss",
"torch.cat",
"torch.nn.Upsample",
"torch.device",
"torch.nn.MaxPool2d",
"torch.nn.functional.pad"
] |
[((6894, 6913), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (6906, 6913), False, 'import torch\n'), ((6999, 7020), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (7018, 7020), True, 'import torch.nn as nn\n'), ((650, 711), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', 'out_channels', 'kernel_size'], {}), '(in_channels, out_channels, kernel_size, **conv_cfg)\n', (659, 711), True, 'import torch.nn as nn\n'), ((2060, 2132), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', 'out_channels'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(in_channels, out_channels, kernel_size=1, stride=1, padding=0)\n', (2069, 2132), True, 'import torch.nn as nn\n'), ((2566, 2593), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(2)'}), '(kernel_size=2)\n', (2578, 2593), True, 'import torch.nn as nn\n'), ((3972, 4051), 'torch.nn.functional.pad', 'F.pad', (['source', '(diffX // 2, diffX - diffX // 2, diffY // 2, diffX - diffY // 2)'], {}), '(source, (diffX // 2, diffX - diffX // 2, diffY // 2, diffX - diffY // 2))\n', (3977, 4051), True, 'import torch.nn.functional as F\n'), ((4188, 4217), 'torch.cat', 'torch.cat', (['[x_copy, x]'], {'dim': '(1)'}), '([x_copy, x], dim=1)\n', (4197, 4217), False, 'import torch\n'), ((4935, 4956), 'torch.nn.ModuleList', 'nn.ModuleList', (['levels'], {}), '(levels)\n', (4948, 4956), True, 'import torch.nn as nn\n'), ((5576, 5597), 'torch.nn.ModuleList', 'nn.ModuleList', (['levels'], {}), '(levels)\n', (5589, 5597), True, 'import torch.nn as nn\n'), ((6588, 6629), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', 'num_classes'], {'kernel_size': '(1)'}), '(64, num_classes, kernel_size=1)\n', (6597, 6629), True, 'import torch.nn as nn\n'), ((3049, 3109), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['inputs', 'outputs'], {'kernel_size': '(2)', 'stride': '(2)'}), '(inputs, outputs, kernel_size=2, stride=2)\n', (3067, 3109), True, 'import torch.nn as nn\n'), ((6058, 6086), 'torch.argmax', 'torch.argmax', (['outputs'], {'dim': '(1)'}), '(outputs, dim=1)\n', (6070, 6086), False, 'import torch\n'), ((3185, 3249), 'torch.nn.Upsample', 'nn.Upsample', ([], {'scale_factor': '(2)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(scale_factor=2, mode='bilinear', align_corners=True)\n", (3196, 3249), True, 'import torch.nn as nn\n'), ((7143, 7169), 'torch.rand', 'torch.rand', (['(4)', '(3)', '(100)', '(100)'], {}), '(4, 3, 100, 100)\n', (7153, 7169), False, 'import torch\n'), ((7193, 7241), 'torch.randint', 'torch.randint', ([], {'low': '(0)', 'high': '(2)', 'size': '(4, 100, 100)'}), '(low=0, high=2, size=(4, 100, 100))\n', (7206, 7241), False, 'import torch\n')]
|
"""Unit tests for the affine connections."""
import warnings
import geomstats.backend as gs
import geomstats.tests
from geomstats.geometry.connection import Connection
from geomstats.geometry.euclidean import EuclideanMetric
from geomstats.geometry.hypersphere import Hypersphere
class TestConnectionMethods(geomstats.tests.TestCase):
def setUp(self):
warnings.simplefilter('ignore', category=UserWarning)
self.dimension = 4
self.euc_metric = EuclideanMetric(dimension=self.dimension)
self.connection = Connection(dimension=2)
self.hypersphere = Hypersphere(dimension=2)
def test_metric_matrix(self):
base_point = gs.array([0., 1., 0., 0.])
result = self.euc_metric.inner_product_matrix(base_point)
expected = gs.array([gs.eye(self.dimension)])
with self.session():
self.assertAllClose(result, expected)
def test_cometric_matrix(self):
base_point = gs.array([0., 1., 0., 0.])
result = self.euc_metric.inner_product_inverse_matrix(base_point)
expected = gs.array([gs.eye(self.dimension)])
with self.session():
self.assertAllClose(result, expected)
@geomstats.tests.np_only
def test_metric_derivative(self):
base_point = gs.array([0., 1., 0., 0.])
result = self.euc_metric.inner_product_derivative_matrix(base_point)
expected = gs.zeros((1,) + (self.dimension, ) * 3)
self.assertAllClose(result, expected)
@geomstats.tests.np_only
def test_christoffels(self):
base_point = gs.array([0., 1., 0., 0.])
result = self.euc_metric.christoffels(base_point)
expected = gs.zeros((1,) + (self.dimension, ) * 3)
self.assertAllClose(result, expected)
@geomstats.tests.np_only
def test_parallel_transport(self):
n_samples = 10
base_point = self.hypersphere.random_uniform(n_samples)
tan_vec_a = self.hypersphere.projection_to_tangent_space(
gs.random.rand(n_samples, 3), base_point)
tan_vec_b = self.hypersphere.projection_to_tangent_space(
gs.random.rand(n_samples, 3), base_point)
expected = self.hypersphere.metric.parallel_transport(
tan_vec_a, tan_vec_b, base_point)
result = self.hypersphere.metric.pole_ladder_parallel_transport(
tan_vec_a, tan_vec_b, base_point)
self.assertAllClose(result, expected, rtol=1e-7, atol=1e-5)
@geomstats.tests.np_only
def test_exp(self):
point = gs.array([[gs.pi / 2, 0], [gs.pi / 6, gs.pi / 4]])
vector = gs.array([[0.25, 0.5], [0.30, 0.2]])
point_ext = self.hypersphere.spherical_to_extrinsic(point)
vector_ext = self.hypersphere.tangent_spherical_to_extrinsic(vector,
point)
self.connection.christoffels = self.hypersphere.metric.christoffels
expected = self.hypersphere.metric.exp(vector_ext, point_ext)
result_spherical = self.connection.exp(
vector, point, n_steps=50, step='rk4')
result = self.hypersphere.spherical_to_extrinsic(result_spherical)
self.assertAllClose(result, expected, rtol=1e-6)
@geomstats.tests.np_only
def test_log(self):
base_point = gs.array([[gs.pi / 3, gs.pi / 4], [gs.pi / 2, gs.pi / 4]])
point = gs.array([[1.0, gs.pi / 2], [gs.pi / 6, gs.pi / 3]])
self.connection.christoffels = self.hypersphere.metric.christoffels
vector = self.connection.log(
point=point, base_point=base_point, n_steps=75, step='rk')
result = self.hypersphere.tangent_spherical_to_extrinsic(
vector, base_point)
p_ext = self.hypersphere.spherical_to_extrinsic(base_point)
q_ext = self.hypersphere.spherical_to_extrinsic(point)
expected = self.hypersphere.metric.log(base_point=p_ext, point=q_ext)
self.assertAllClose(result, expected, rtol=1e-5, atol=1e-5)
if __name__ == '__main__':
geomstats.tests.main()
|
[
"geomstats.geometry.hypersphere.Hypersphere",
"geomstats.backend.random.rand",
"geomstats.backend.array",
"warnings.simplefilter",
"geomstats.geometry.connection.Connection",
"geomstats.geometry.euclidean.EuclideanMetric",
"geomstats.backend.eye",
"geomstats.backend.zeros"
] |
[((368, 421), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {'category': 'UserWarning'}), "('ignore', category=UserWarning)\n", (389, 421), False, 'import warnings\n'), ((476, 517), 'geomstats.geometry.euclidean.EuclideanMetric', 'EuclideanMetric', ([], {'dimension': 'self.dimension'}), '(dimension=self.dimension)\n', (491, 517), False, 'from geomstats.geometry.euclidean import EuclideanMetric\n'), ((545, 568), 'geomstats.geometry.connection.Connection', 'Connection', ([], {'dimension': '(2)'}), '(dimension=2)\n', (555, 568), False, 'from geomstats.geometry.connection import Connection\n'), ((596, 620), 'geomstats.geometry.hypersphere.Hypersphere', 'Hypersphere', ([], {'dimension': '(2)'}), '(dimension=2)\n', (607, 620), False, 'from geomstats.geometry.hypersphere import Hypersphere\n'), ((677, 707), 'geomstats.backend.array', 'gs.array', (['[0.0, 1.0, 0.0, 0.0]'], {}), '([0.0, 1.0, 0.0, 0.0])\n', (685, 707), True, 'import geomstats.backend as gs\n'), ((963, 993), 'geomstats.backend.array', 'gs.array', (['[0.0, 1.0, 0.0, 0.0]'], {}), '([0.0, 1.0, 0.0, 0.0])\n', (971, 993), True, 'import geomstats.backend as gs\n'), ((1288, 1318), 'geomstats.backend.array', 'gs.array', (['[0.0, 1.0, 0.0, 0.0]'], {}), '([0.0, 1.0, 0.0, 0.0])\n', (1296, 1318), True, 'import geomstats.backend as gs\n'), ((1412, 1450), 'geomstats.backend.zeros', 'gs.zeros', (['((1,) + (self.dimension,) * 3)'], {}), '((1,) + (self.dimension,) * 3)\n', (1420, 1450), True, 'import geomstats.backend as gs\n'), ((1583, 1613), 'geomstats.backend.array', 'gs.array', (['[0.0, 1.0, 0.0, 0.0]'], {}), '([0.0, 1.0, 0.0, 0.0])\n', (1591, 1613), True, 'import geomstats.backend as gs\n'), ((1688, 1726), 'geomstats.backend.zeros', 'gs.zeros', (['((1,) + (self.dimension,) * 3)'], {}), '((1,) + (self.dimension,) * 3)\n', (1696, 1726), True, 'import geomstats.backend as gs\n'), ((2538, 2588), 'geomstats.backend.array', 'gs.array', (['[[gs.pi / 2, 0], [gs.pi / 6, gs.pi / 4]]'], {}), '([[gs.pi / 2, 0], [gs.pi / 6, gs.pi / 4]])\n', (2546, 2588), True, 'import geomstats.backend as gs\n'), ((2606, 2641), 'geomstats.backend.array', 'gs.array', (['[[0.25, 0.5], [0.3, 0.2]]'], {}), '([[0.25, 0.5], [0.3, 0.2]])\n', (2614, 2641), True, 'import geomstats.backend as gs\n'), ((3316, 3374), 'geomstats.backend.array', 'gs.array', (['[[gs.pi / 3, gs.pi / 4], [gs.pi / 2, gs.pi / 4]]'], {}), '([[gs.pi / 3, gs.pi / 4], [gs.pi / 2, gs.pi / 4]])\n', (3324, 3374), True, 'import geomstats.backend as gs\n'), ((3391, 3443), 'geomstats.backend.array', 'gs.array', (['[[1.0, gs.pi / 2], [gs.pi / 6, gs.pi / 3]]'], {}), '([[1.0, gs.pi / 2], [gs.pi / 6, gs.pi / 3]])\n', (3399, 3443), True, 'import geomstats.backend as gs\n'), ((2009, 2037), 'geomstats.backend.random.rand', 'gs.random.rand', (['n_samples', '(3)'], {}), '(n_samples, 3)\n', (2023, 2037), True, 'import geomstats.backend as gs\n'), ((2129, 2157), 'geomstats.backend.random.rand', 'gs.random.rand', (['n_samples', '(3)'], {}), '(n_samples, 3)\n', (2143, 2157), True, 'import geomstats.backend as gs\n'), ((800, 822), 'geomstats.backend.eye', 'gs.eye', (['self.dimension'], {}), '(self.dimension)\n', (806, 822), True, 'import geomstats.backend as gs\n'), ((1094, 1116), 'geomstats.backend.eye', 'gs.eye', (['self.dimension'], {}), '(self.dimension)\n', (1100, 1116), True, 'import geomstats.backend as gs\n')]
|
"""Utilities for dealing with the ADO REST API."""
import logging
import requests
from simple_ado.exceptions import ADOHTTPException
def boolstr(value: bool) -> str:
"""Return a boolean formatted as string for ADO calls
:param value: The value to format
:returns: A string representation of the boolean value
"""
return str(value).lower()
def download_from_response_stream(
*, response: requests.Response, output_path: str, log: logging.Logger
) -> None:
"""Downloads a file from an already open response stream.
:param requests.Response response: The response to download from
:param str output_path: The path to write the file out to
:param logging.Logger log: The log to use for progress updates
:raises ADOHTTPException: If we fail to fetch the file for any reason
"""
# A sensible modern value
chunk_size = 1024 * 16
if response.status_code < 200 or response.status_code >= 300:
raise ADOHTTPException("Failed to fetch file", response)
with open(output_path, "wb") as output_file:
content_length_string = response.headers.get("content-length", "0")
total_size = int(content_length_string)
total_downloaded = 0
for data in response.iter_content(chunk_size=chunk_size):
total_downloaded += len(data)
output_file.write(data)
if total_size != 0:
progress = int((total_downloaded * 100.0) / total_size)
log.info(f"Download progress: {progress}%")
|
[
"simple_ado.exceptions.ADOHTTPException"
] |
[((971, 1021), 'simple_ado.exceptions.ADOHTTPException', 'ADOHTTPException', (['"""Failed to fetch file"""', 'response'], {}), "('Failed to fetch file', response)\n", (987, 1021), False, 'from simple_ado.exceptions import ADOHTTPException\n')]
|
from pymyorm.database import Database
from config import db
from models.user import User
if __name__ == '__main__':
Database.connect(**db)
names = User.find().order('id asc').offset(1).limit(2).column('name')
print(names)
|
[
"models.user.User.find",
"pymyorm.database.Database.connect"
] |
[((123, 145), 'pymyorm.database.Database.connect', 'Database.connect', ([], {}), '(**db)\n', (139, 145), False, 'from pymyorm.database import Database\n'), ((159, 170), 'models.user.User.find', 'User.find', ([], {}), '()\n', (168, 170), False, 'from models.user import User\n')]
|
import pytest
import doctest
from insights.parsers import SkipException, tuned
from insights.parsers.tuned import Tuned
from insights.tests import context_wrap
TUNED_OUTPUT = '''
Available profiles:
- balanced
- desktop
- latency-performance
- network-latency
- network-throughput
- powersave
- throughput-performance
- virtual-guest
- virtual-host
Current active profile: virtual-guest
'''.strip()
TUNED_OUTPUT2 = '''
Available profiles:
- balanced
- desktop
- latency-performance
- network-latency
- network-throughput
- powersave
- throughput-performance
- virtual-guest
- virtual-host
It seems that tuned daemon is not running, preset profile is not activated.
Preset profile: virtual-guest
'''.strip()
TUNED_OUTPUT3 = '''
Available profiles:
- balanced - General non-specialized tuned profile
- desktop - Optimize for the desktop use-case
- hpc-compute - Optimize for HPC compute workloads
- latency-performance - Optimize for deterministic performance at the cost of increased power consumption
- network-latency - Optimize for deterministic performance at the cost of increased power consumption, focused on low latency network performance
- network-throughput - Optimize for streaming network throughput, generally only necessary on older CPUs or 40G+ networks
- powersave - Optimize for low power consumption
- sap-netweaver - Optimize for SAP NetWeaver
- throughput-performance - Broadly applicable tuning that provides excellent performance across a variety of common server workloads
- virtual-guest - Optimize for running inside a virtual guest
- virtual-guest-vmware
- virtual-host - Optimize for running KVM guests
Current active profile: virtual-guest-vmware
'''.strip()
TUNED_OUTPUT4 = '''
'''.strip()
def test_active_profile():
tuned_output = Tuned(context_wrap(TUNED_OUTPUT))
assert len(tuned_output.get('available')) == 9
assert tuned_output.get('active') == 'virtual-guest'
assert tuned_output.get('available') == ['balanced',
'desktop',
'latency-performance',
'network-latency',
'network-throughput',
'powersave',
'throughput-performance',
'virtual-guest',
'virtual-host']
def test_preset_profile():
tuned_output = Tuned(context_wrap(TUNED_OUTPUT2))
assert len(tuned_output.get('available')) == 9
assert tuned_output.get('preset') == 'virtual-guest'
assert tuned_output.get('available') == ['balanced',
'desktop',
'latency-performance',
'network-latency',
'network-throughput',
'powersave',
'throughput-performance',
'virtual-guest',
'virtual-host']
def test_tuned_profile():
tuned_output = Tuned(context_wrap(TUNED_OUTPUT3))
assert len(tuned_output.get('available')) == 12
assert tuned_output.get('preset') is None
assert tuned_output.get('active') == 'virtual-guest-vmware'
assert 'sap-netweaver' in tuned_output.get('available')
assert 'virtual-guest-vmware' in tuned_output.get('available')
with pytest.raises(SkipException):
Tuned(context_wrap(''))
def test_doc_example():
env = {'tuned': Tuned(context_wrap(TUNED_OUTPUT))}
failed, total = doctest.testmod(tuned, globs=env)
assert failed == 0
|
[
"insights.tests.context_wrap",
"pytest.raises",
"doctest.testmod"
] |
[((3927, 3960), 'doctest.testmod', 'doctest.testmod', (['tuned'], {'globs': 'env'}), '(tuned, globs=env)\n', (3942, 3960), False, 'import doctest\n'), ((1928, 1954), 'insights.tests.context_wrap', 'context_wrap', (['TUNED_OUTPUT'], {}), '(TUNED_OUTPUT)\n', (1940, 1954), False, 'from insights.tests import context_wrap\n'), ((2682, 2709), 'insights.tests.context_wrap', 'context_wrap', (['TUNED_OUTPUT2'], {}), '(TUNED_OUTPUT2)\n', (2694, 2709), False, 'from insights.tests import context_wrap\n'), ((3436, 3463), 'insights.tests.context_wrap', 'context_wrap', (['TUNED_OUTPUT3'], {}), '(TUNED_OUTPUT3)\n', (3448, 3463), False, 'from insights.tests import context_wrap\n'), ((3764, 3792), 'pytest.raises', 'pytest.raises', (['SkipException'], {}), '(SkipException)\n', (3777, 3792), False, 'import pytest\n'), ((3808, 3824), 'insights.tests.context_wrap', 'context_wrap', (['""""""'], {}), "('')\n", (3820, 3824), False, 'from insights.tests import context_wrap\n'), ((3878, 3904), 'insights.tests.context_wrap', 'context_wrap', (['TUNED_OUTPUT'], {}), '(TUNED_OUTPUT)\n', (3890, 3904), False, 'from insights.tests import context_wrap\n')]
|
import copy
import random
import smmp
import numpy as np
from math import *
from universe1 import *
from protein1 import *
from mergesort import *
from sklearn import preprocessing
phi = np.concatenate((np.random.uniform(-80,-50,10),np.random.uniform(-160,-120,10)))
psi = np.concatenate((np.random.uniform(-50,-20,10),np.random.uniform(110,140,10)))
class geneticAlgorithmProtein():
'''Genetic Algorithm for python'''
def __init__(self,max_iteration=100,population_size=100,
mut_probout=0.18,mut_probin=0.25,crossover_prob=1.,parents_port=0.5,elit_ratio=0.10):
self.dim = len(smmp.var_r.vlvr);
self.dimcant = smmp.mol_par.nvr
self.max_iter = max_iteration
self.pop_size = population_size
self.mut_prob = mut_probin #0.28
self.mut_probout = mut_probout #0.25
self.cross_prob = crossover_prob
self.parent_port = int(parents_port*self.pop_size)
trl = self.pop_size - self.parent_port
if trl%2!=0:
self.parent_port+=1
trl = self.pop_size*elit_ratio
if trl<1 and elit_ratio>0:
self.num_elit=2
else: self.num_elit=int(trl)
self.datazeros=np.zeros(self.dim-self.dimcant)
def run(self,fitnes):
self.__fitness = fitnes#-15.0
#cant of angles for aminoacids
AnglesRes = []
sumAngle = 0
for val in smmp.res_i.nvrrs:
if val == 0:
break;
AnglesRes.append([val,sumAngle])
sumAngle+=val
######################
# initial population #
######################
pop = [[np.zeros(self.dim),0]]*self.pop_size
datazeros=np.zeros(self.dim-self.dimcant)
for p in range(0,self.pop_size):
val = copy.deepcopy(np.random.uniform(-180,180,self.dimcant))
r = random.random()
val = val + (180-val)*r
smmp.var_r.vlvr = np.concatenate((val,datazeros))
pop[p] = [val,myUniverso.energy()]
pop.sort(key = lambda x: x[1])
# evaluation chromosoma
minfit=pop[0][1]
if self.__fitness >= minfit: return pop[0]
M_Echrom = copy.deepcopy(pop[:self.num_elit])
Echrom = copy.deepcopy(M_Echrom[0])
counter = 0
while(counter<self.max_iter and Echrom[1] >= self.__fitness):
Nchrom = self.roulette_wheel_selection(M_Echrom)
print(counter,Echrom[1])
# crossover
offspring1,offspring2 = self.crossoverOne(Echrom,Nchrom,AnglesRes)
smmp.var_r.vlvr = np.concatenate((offspring1[0],datazeros))
offspring1[1]=myUniverso.energy()
smmp.var_r.vlvr = np.concatenate((offspring2[0],datazeros))
offspring2[1]=myUniverso.energy()
#mutation
if offspring1[1] >= offspring2[1]:
offspring1 = self.mutation(offspring1,AnglesRes)
smmp.var_r.vlvr = np.concatenate((offspring1[0],datazeros))
offspring1[1]=myUniverso.energy()
if offspring2[1] > Echrom[1]:
offspring2 = self.mutation(offspring2,AnglesRes)
smmp.var_r.vlvr = np.concatenate((offspring2[0],datazeros))
offspring2[1]=myUniverso.energy()
else:
offspring2 = self.mutation(offspring2,AnglesRes)
smmp.var_r.vlvr = np.concatenate((offspring2[0],datazeros))
offspring2[1]=myUniverso.energy()
if offspring1[1] > Echrom[1]:
offspring1 = self.mutation(offspring1,AnglesRes)
smmp.var_r.vlvr = np.concatenate((offspring1[0],datazeros))
offspring1[1]=myUniverso.energy()
M_Echrom.append(offspring1)
M_Echrom.append(offspring2)
M_Echrom.sort(key = lambda x: x[1])
M_Echrom = copy.deepcopy(M_Echrom[:self.num_elit])
Echrom=copy.deepcopy(M_Echrom[0])
counter+=1
print("cantidad de iteraciones: ",counter)
smmp.var_r.vlvr = np.concatenate((Echrom[0],datazeros))
return Echrom
def roulette_wheel_selection(self,population):
population = population[1:self.num_elit]
fitness = [x[1] for x in population]
sp = np.sum(fitness)
vp = []
for x in population:
vp.append(x[1]/sp)
r = random.random()
vp = preprocessing.normalize([np.array(vp)])
vp = vp[0]
idd = np.searchsorted(vp,r,side='right')-1
#r = random.random()
#idd = int(idd + (self.num_elit-1-idd)*r)
Nc = copy.deepcopy(population[idd])
return Nc
def crossoverOne(self,x,y,AnglesRes):
ofs1 = copy.deepcopy(x)
ofs2 = copy.deepcopy(y)
# One point
l = len(AnglesRes)
ran1=np.random.randint(1,l-1)
for i in range(ran1,l):
if self.cross_prob > np.random.random() :
ofs1[0][AnglesRes[i][1]:AnglesRes[i][0]+AnglesRes[i][1]]=y[0][
AnglesRes[i][1]:AnglesRes[i][0]+AnglesRes[i][1]]
ofs2[0][AnglesRes[i][1]:AnglesRes[i][0]+AnglesRes[i][1]]=x[0][
AnglesRes[i][1]:AnglesRes[i][0]+AnglesRes[i][1]]
return ofs1,ofs2
def crossoverTwo(self,x,y,AnglesRes):
ofs1 = copy.deepcopy(x)
ofs2 = copy.deepcopy(y)
# two point
l = len(AnglesRes)
ran1=np.random.randint(0,l)
ran2=np.random.randint(ran1,l)
for i in range(ran1,ran2):
if self.cross_prob > np.random.random() :
ofs1[0][AnglesRes[i][1]:AnglesRes[i][0]+AnglesRes[i][1]]=y[0][
AnglesRes[i][1]:AnglesRes[i][0]+AnglesRes[i][1]]
ofs2[0][AnglesRes[i][1]:AnglesRes[i][0]+AnglesRes[i][1]]=x[0][
AnglesRes[i][1]:AnglesRes[i][0]+AnglesRes[i][1]]
return ofs1,ofs2
def crossoverUniform(self,x,y,AnglesRes):
ofs1 = copy.deepcopy(x)
ofs2 = copy.deepcopy(y)
# uniform
l = len(AnglesRes)
for i in range(0,l):
if 0.5 > np.random.random() :
ofs1[0][AnglesRes[i][1]:AnglesRes[i][0]+AnglesRes[i][1]]=y[0][
AnglesRes[i][1]:AnglesRes[i][0]+AnglesRes[i][1]]
ofs2[0][AnglesRes[i][1]:AnglesRes[i][0]+AnglesRes[i][1]]=x[0][
AnglesRes[i][1]:AnglesRes[i][0]+AnglesRes[i][1]]
return ofs1,ofs2
def crossoverBinary(self,x,y,AnglesRes):
ofs1 = copy.deepcopy(x)
ofs2 = copy.deepcopy(y)
# uniform
l = len(AnglesRes)
eta=2
for i in range(0,l):
ran = np.random.random()
if 0.5 > ran :
beta = 2.*ran
else:
beta = 1./(2.*(1.-ran))
eta **= 1. / (eta + 1.)
x1 = y[0][AnglesRes[i][1]:AnglesRes[i][0]+AnglesRes[i][1]]
x2 = x[0][AnglesRes[i][1]:AnglesRes[i][0]+AnglesRes[i][1]]
ofs1[0][AnglesRes[i][1]:AnglesRes[i][0]+AnglesRes[i][1]] = 0.5 * (((
1 + beta) * x1) + ((1 - beta) * x2))
ofs2[0][AnglesRes[i][1]:AnglesRes[i][0]+AnglesRes[i][1]] = 0.5 * (((
1 - beta) * x2) + ((1 + beta) * x1))
return ofs1,ofs2
def mutation(self,x,AnglesRes):
ofs = copy.deepcopy(x)
l = len(AnglesRes)
for i in range(0,l):
if self.mut_probout > np.random.random():
for j in range(AnglesRes[i][0]):
if self.mut_prob > np.random.random():
index = AnglesRes[i][1]+j
#r = random.random()
replace = np.random.uniform(-180,180)
#replace = -180 + (180+180)*r
ofs[0][index]=replace
return ofs
myUniverso = Universe(T=300,st=0)
protA = Protein("EXAMPLES/prueba.seq",'')
myUniverso.add(protA)
GAP = geneticAlgorithmProtein(50000,200,
mut_probout= 0.6,#0.18,#0.2
mut_probin= 0.06, #0.25, #0.15
elit_ratio=0.5)
Echrom = GAP.run(-15)
print(myUniverso.energy(),myUniverso.rgyr(),myUniverso.helix(),protA.hbond())
smmp.outpdb(0,'final.pdb')
|
[
"smmp.outpdb",
"numpy.random.uniform",
"copy.deepcopy",
"numpy.sum",
"numpy.zeros",
"numpy.searchsorted",
"random.random",
"numpy.random.randint",
"numpy.random.random",
"numpy.array",
"numpy.concatenate"
] |
[((8489, 8516), 'smmp.outpdb', 'smmp.outpdb', (['(0)', '"""final.pdb"""'], {}), "(0, 'final.pdb')\n", (8500, 8516), False, 'import smmp\n'), ((204, 235), 'numpy.random.uniform', 'np.random.uniform', (['(-80)', '(-50)', '(10)'], {}), '(-80, -50, 10)\n', (221, 235), True, 'import numpy as np\n'), ((234, 267), 'numpy.random.uniform', 'np.random.uniform', (['(-160)', '(-120)', '(10)'], {}), '(-160, -120, 10)\n', (251, 267), True, 'import numpy as np\n'), ((291, 322), 'numpy.random.uniform', 'np.random.uniform', (['(-50)', '(-20)', '(10)'], {}), '(-50, -20, 10)\n', (308, 322), True, 'import numpy as np\n'), ((321, 352), 'numpy.random.uniform', 'np.random.uniform', (['(110)', '(140)', '(10)'], {}), '(110, 140, 10)\n', (338, 352), True, 'import numpy as np\n'), ((1210, 1243), 'numpy.zeros', 'np.zeros', (['(self.dim - self.dimcant)'], {}), '(self.dim - self.dimcant)\n', (1218, 1243), True, 'import numpy as np\n'), ((1729, 1762), 'numpy.zeros', 'np.zeros', (['(self.dim - self.dimcant)'], {}), '(self.dim - self.dimcant)\n', (1737, 1762), True, 'import numpy as np\n'), ((2222, 2256), 'copy.deepcopy', 'copy.deepcopy', (['pop[:self.num_elit]'], {}), '(pop[:self.num_elit])\n', (2235, 2256), False, 'import copy\n'), ((2274, 2300), 'copy.deepcopy', 'copy.deepcopy', (['M_Echrom[0]'], {}), '(M_Echrom[0])\n', (2287, 2300), False, 'import copy\n'), ((4156, 4194), 'numpy.concatenate', 'np.concatenate', (['(Echrom[0], datazeros)'], {}), '((Echrom[0], datazeros))\n', (4170, 4194), True, 'import numpy as np\n'), ((4375, 4390), 'numpy.sum', 'np.sum', (['fitness'], {}), '(fitness)\n', (4381, 4390), True, 'import numpy as np\n'), ((4479, 4494), 'random.random', 'random.random', ([], {}), '()\n', (4492, 4494), False, 'import random\n'), ((4710, 4740), 'copy.deepcopy', 'copy.deepcopy', (['population[idd]'], {}), '(population[idd])\n', (4723, 4740), False, 'import copy\n'), ((4818, 4834), 'copy.deepcopy', 'copy.deepcopy', (['x'], {}), '(x)\n', (4831, 4834), False, 'import copy\n'), ((4850, 4866), 'copy.deepcopy', 'copy.deepcopy', (['y'], {}), '(y)\n', (4863, 4866), False, 'import copy\n'), ((4927, 4954), 'numpy.random.randint', 'np.random.randint', (['(1)', '(l - 1)'], {}), '(1, l - 1)\n', (4944, 4954), True, 'import numpy as np\n'), ((5427, 5443), 'copy.deepcopy', 'copy.deepcopy', (['x'], {}), '(x)\n', (5440, 5443), False, 'import copy\n'), ((5459, 5475), 'copy.deepcopy', 'copy.deepcopy', (['y'], {}), '(y)\n', (5472, 5475), False, 'import copy\n'), ((5536, 5559), 'numpy.random.randint', 'np.random.randint', (['(0)', 'l'], {}), '(0, l)\n', (5553, 5559), True, 'import numpy as np\n'), ((5572, 5598), 'numpy.random.randint', 'np.random.randint', (['ran1', 'l'], {}), '(ran1, l)\n', (5589, 5598), True, 'import numpy as np\n'), ((6087, 6103), 'copy.deepcopy', 'copy.deepcopy', (['x'], {}), '(x)\n', (6100, 6103), False, 'import copy\n'), ((6119, 6135), 'copy.deepcopy', 'copy.deepcopy', (['y'], {}), '(y)\n', (6132, 6135), False, 'import copy\n'), ((6651, 6667), 'copy.deepcopy', 'copy.deepcopy', (['x'], {}), '(x)\n', (6664, 6667), False, 'import copy\n'), ((6683, 6699), 'copy.deepcopy', 'copy.deepcopy', (['y'], {}), '(y)\n', (6696, 6699), False, 'import copy\n'), ((7502, 7518), 'copy.deepcopy', 'copy.deepcopy', (['x'], {}), '(x)\n', (7515, 7518), False, 'import copy\n'), ((1892, 1907), 'random.random', 'random.random', ([], {}), '()\n', (1905, 1907), False, 'import random\n'), ((1974, 2006), 'numpy.concatenate', 'np.concatenate', (['(val, datazeros)'], {}), '((val, datazeros))\n', (1988, 2006), True, 'import numpy as np\n'), ((2625, 2667), 'numpy.concatenate', 'np.concatenate', (['(offspring1[0], datazeros)'], {}), '((offspring1[0], datazeros))\n', (2639, 2667), True, 'import numpy as np\n'), ((2748, 2790), 'numpy.concatenate', 'np.concatenate', (['(offspring2[0], datazeros)'], {}), '((offspring2[0], datazeros))\n', (2762, 2790), True, 'import numpy as np\n'), ((3969, 4008), 'copy.deepcopy', 'copy.deepcopy', (['M_Echrom[:self.num_elit]'], {}), '(M_Echrom[:self.num_elit])\n', (3982, 4008), False, 'import copy\n'), ((4028, 4054), 'copy.deepcopy', 'copy.deepcopy', (['M_Echrom[0]'], {}), '(M_Echrom[0])\n', (4041, 4054), False, 'import copy\n'), ((4581, 4617), 'numpy.searchsorted', 'np.searchsorted', (['vp', 'r'], {'side': '"""right"""'}), "(vp, r, side='right')\n", (4596, 4617), True, 'import numpy as np\n'), ((6815, 6833), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (6831, 6833), True, 'import numpy as np\n'), ((1834, 1876), 'numpy.random.uniform', 'np.random.uniform', (['(-180)', '(180)', 'self.dimcant'], {}), '(-180, 180, self.dimcant)\n', (1851, 1876), True, 'import numpy as np\n'), ((3005, 3047), 'numpy.concatenate', 'np.concatenate', (['(offspring1[0], datazeros)'], {}), '((offspring1[0], datazeros))\n', (3019, 3047), True, 'import numpy as np\n'), ((3463, 3505), 'numpy.concatenate', 'np.concatenate', (['(offspring2[0], datazeros)'], {}), '((offspring2[0], datazeros))\n', (3477, 3505), True, 'import numpy as np\n'), ((4533, 4545), 'numpy.array', 'np.array', (['vp'], {}), '(vp)\n', (4541, 4545), True, 'import numpy as np\n'), ((5027, 5045), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (5043, 5045), True, 'import numpy as np\n'), ((5675, 5693), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (5691, 5693), True, 'import numpy as np\n'), ((6240, 6258), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (6256, 6258), True, 'import numpy as np\n'), ((7661, 7679), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (7677, 7679), True, 'import numpy as np\n'), ((1673, 1691), 'numpy.zeros', 'np.zeros', (['self.dim'], {}), '(self.dim)\n', (1681, 1691), True, 'import numpy as np\n'), ((3250, 3292), 'numpy.concatenate', 'np.concatenate', (['(offspring2[0], datazeros)'], {}), '((offspring2[0], datazeros))\n', (3264, 3292), True, 'import numpy as np\n'), ((3708, 3750), 'numpy.concatenate', 'np.concatenate', (['(offspring1[0], datazeros)'], {}), '((offspring1[0], datazeros))\n', (3722, 3750), True, 'import numpy as np\n'), ((7769, 7787), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (7785, 7787), True, 'import numpy as np\n'), ((7918, 7946), 'numpy.random.uniform', 'np.random.uniform', (['(-180)', '(180)'], {}), '(-180, 180)\n', (7935, 7946), True, 'import numpy as np\n')]
|
# Importing the Keras libraries and packages
import numpy as np
import keras
import tensorflow as tf
from keras.models import load_model
from IPython.display import display
from PIL import Image
from keras.preprocessing import image
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Conv2D
from keras.layers import MaxPooling2D
from keras.layers import Flatten
from keras.layers import Dense
train_datagen = ImageDataGenerator(
rescale=1./255,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True
)
test_datagen = ImageDataGenerator(rescale=1./255)
def import_model():
classifier = load_model("./model/best_weight.h5")
return classifier
def test(classifier, test_img):
print("testing")
test_image = prepImage(test_img)
result = classifier.predict(test_image)
return printResult(result)
def prepImage(testImage):
test_image = image.load_img(testImage, target_size=(64, 64))
test_image = image.img_to_array(test_image)
test_image = np.expand_dims(test_image, axis=0)
return test_image
def printResult(result):
if result[0][0] == 1:
prediction = True
else:
prediction = False
return prediction
|
[
"keras.models.load_model",
"keras.preprocessing.image.ImageDataGenerator",
"numpy.expand_dims",
"keras.preprocessing.image.img_to_array",
"keras.preprocessing.image.load_img"
] |
[((478, 574), 'keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rescale': '(1.0 / 255)', 'shear_range': '(0.2)', 'zoom_range': '(0.2)', 'horizontal_flip': '(True)'}), '(rescale=1.0 / 255, shear_range=0.2, zoom_range=0.2,\n horizontal_flip=True)\n', (496, 574), False, 'from keras.preprocessing.image import ImageDataGenerator\n'), ((602, 639), 'keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rescale': '(1.0 / 255)'}), '(rescale=1.0 / 255)\n', (620, 639), False, 'from keras.preprocessing.image import ImageDataGenerator\n'), ((676, 712), 'keras.models.load_model', 'load_model', (['"""./model/best_weight.h5"""'], {}), "('./model/best_weight.h5')\n", (686, 712), False, 'from keras.models import load_model\n'), ((948, 995), 'keras.preprocessing.image.load_img', 'image.load_img', (['testImage'], {'target_size': '(64, 64)'}), '(testImage, target_size=(64, 64))\n', (962, 995), False, 'from keras.preprocessing import image\n'), ((1013, 1043), 'keras.preprocessing.image.img_to_array', 'image.img_to_array', (['test_image'], {}), '(test_image)\n', (1031, 1043), False, 'from keras.preprocessing import image\n'), ((1061, 1095), 'numpy.expand_dims', 'np.expand_dims', (['test_image'], {'axis': '(0)'}), '(test_image, axis=0)\n', (1075, 1095), True, 'import numpy as np\n')]
|
"""Convert CrowdHuman format dataset to Scalabel."""
import argparse
import json
import os
from typing import List
from ..common.typing import DictStrAny
from ..label.transforms import bbox_to_box2d
from .io import save
from .typing import Category, Config, Dataset, Frame, Label
def parse_arguments() -> argparse.Namespace:
"""Parse the arguments."""
parser = argparse.ArgumentParser(description="crowdhuman to scalabel")
parser.add_argument(
"--input",
"-i",
help="path to Crowdhuman annotation file.",
)
parser.add_argument(
"--output",
"-o",
default="./annotations_scalabel.json",
help="Output filename for Scalabel format annotations.",
)
return parser.parse_args()
def parse_annotations(annotations: List[DictStrAny]) -> List[Label]:
"""Parse annotations per frame."""
labels = []
for anno in annotations:
if not anno["tag"] == "person":
continue
if anno["extra"].get("ignore", 0) == 1:
continue
box2d = bbox_to_box2d(anno["fbox"])
label = Label(
id=anno["extra"]["box_id"],
category="pedestrian",
box2d=box2d,
)
labels.append(label)
return labels
def from_crowdhuman(input_path: str, image_path: str = "./Images/") -> Dataset:
"""Function converting CrowdHuman annotations to Scalabel format."""
frames = []
with open(input_path, "r", encoding="utf-8") as anno_file:
lines = anno_file.readlines()
for line in lines:
frame_json = json.loads(line)
name = frame_json["ID"]
frame = Frame(
name=name,
url=os.path.join(image_path, name + ".jpg"),
labels=parse_annotations(frame_json["gtboxes"]),
)
frames.append(frame)
dataset = Dataset(
frames=frames, config=Config(categories=[Category(name="pedestrian")])
)
return dataset
def run(args: argparse.Namespace) -> None:
"""Run conversion with command line arguments."""
result = from_crowdhuman(args.input)
save(args.output, result)
if __name__ == "__main__":
run(parse_arguments())
|
[
"os.path.join",
"argparse.ArgumentParser",
"json.loads"
] |
[((372, 433), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""crowdhuman to scalabel"""'}), "(description='crowdhuman to scalabel')\n", (395, 433), False, 'import argparse\n'), ((1592, 1608), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (1602, 1608), False, 'import json\n'), ((1719, 1758), 'os.path.join', 'os.path.join', (['image_path', "(name + '.jpg')"], {}), "(image_path, name + '.jpg')\n", (1731, 1758), False, 'import os\n')]
|
import spacy
import json
class PartsofSpeech:
def __init__(self, *arg,**kwargs):
self.pos = list()
self.nlp = spacy.load("en_core_web_sm")
def get_articles(self,text):
doc = self.nlp(text)
print("Articles:", [token.lemma_ for token in doc if token.pos_ == "DET"])
def get_pos(self,text):
doc = self.nlp(text)
for token in doc:
self.pos.append({token.orth_:token.pos_})
print(" {:<8} : {:<5} : {:<7} : {}".format(token.orth_,token.pos_,token.dep_,token.head))
print(" {:<8} : {:<5} : {:<7} : {}".format("token","POS","dep.","head"))
print("------------------------------------")
return json.dumps(self.pos)
# pos=PartsofSpeech()
# pos.get_pos("I drove home with an joy.")
|
[
"spacy.load",
"json.dumps"
] |
[((116, 144), 'spacy.load', 'spacy.load', (['"""en_core_web_sm"""'], {}), "('en_core_web_sm')\n", (126, 144), False, 'import spacy\n'), ((615, 635), 'json.dumps', 'json.dumps', (['self.pos'], {}), '(self.pos)\n', (625, 635), False, 'import json\n')]
|
__source__ = 'https://leetcode.com/problems/flood-fill/description/'
# Time: O(N)
# Space: O(N)
#
# Description: Leetcode # 733. Flood Fill
#
# An image is represented by a 2-D array of integers,
# each integer representing the pixel value of the image (from 0 to 65535).
#
# Given a coordinate (sr, sc) representing the starting pixel (row and column) of the flood fill,
# and a pixel value newColor, "flood fill" the image.
#
# To perform a "flood fill", consider the starting pixel,
# plus any pixels connected 4-directionally to the starting pixel of the same color as the starting pixel,
# plus any pixels connected 4-directionally to those pixels (also with the same color as the starting pixel),
# and so on. Replace the color of all of the aforementioned pixels with the newColor.
#
# At the end, return the modified image.
#
# Example 1:
# Input:
# image = [[1,1,1],[1,1,0],[1,0,1]]
# sr = 1, sc = 1, newColor = 2
# Output: [[2,2,2],[2,2,0],[2,0,1]]
# Explanation:
# From the center of the image (with position (sr, sc) = (1, 1)), all pixels connected
# by a path of the same color as the starting pixel are colored with the new color.
# Note the bottom corner is not colored 2, because it is not 4-directionally connected
# to the starting pixel.
# Note:
#
# The length of image and image[0] will be in the range [1, 50].
# The given starting pixel will satisfy 0 <= sr < image.length and 0 <= sc < image[0].length.
# The value of each color in image[i][j] and newColor will be an integer in [0, 65535].
#
#
import unittest
# 52ms 100%
class Solution(object):
def floodFill(self, image, sr, sc, newColor):
"""
:type image: List[List[int]]
:type sr: int
:type sc: int
:type newColor: int
:rtype: List[List[int]]
"""
R, C = len(image), len(image[0])
color = image[sr][sc]
if color == newColor: return image
def dfs(r, c):
if image[r][c] == color:
image[r][c] = newColor
if r >= 1: dfs(r-1, c)
if r + 1 < R: dfs(r+1, c)
if c >= 1: dfs(r, c - 1)
if c + 1 < C: dfs(r, c + 1)
dfs(sr, sc)
return image
# 92ms 11.15%
class Solution2(object):
def floodFill(self, image, sr, sc, newColor):
"""
:type image: List[List[int]]
:type sr: int
:type sc: int
:type newColor: int
:rtype: List[List[int]]
"""
row, col = len(image), len(image[0])
if row == 0 or col == 0:
return image
original_color = image[sr][sc]
if original_color == newColor:
return image
directions = [(1, 0), (-1, 0), (0, 1), (0, -1)]
stack = [(sr, sc)]
while stack:
current_r, current_c = stack.pop()
image[current_r][current_c] = newColor
for offset_r, offset_c in directions:
new_r, new_c = current_r + offset_r, current_c + offset_c
if new_r >= 0 and new_r < row and new_c >= 0 and new_c < col and image[new_r][new_c] == original_color:
stack.append((new_r, new_c))
return image
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought: https://leetcode.com/problems/flood-fill/solution/
# DFS
# 10ms 77.49%
class Solution {
public int[][] floodFill(int[][] image, int sr, int sc, int newColor) {
int originColor = image[sr][sc];
if (originColor==newColor) return image;
dfs(image, sr, sc, newColor, originColor);
return image;
}
private void dfs(int[][] image, int sr, int sc, int newC, int old) {
if (sr<0 || sr>=image.length || sc<0 || sc>=image[0].length || image[sr][sc]!=old) return;
image[sr][sc]=newC;
dfs(image, sr-1, sc, newC, old);
dfs(image, sr+1, sc, newC, old);
dfs(image, sr, sc-1, newC, old);
dfs(image, sr, sc+1, newC, old);
}
}
# 13ms 42.85%
class Solution {
final int[][] dirs ={{0,1}, {0,-1}, {1,0}, {-1,0}};
public int[][] floodFill(int[][] image, int sr, int sc, int newColor) {
int originCol = image[sr][sc];
if (originCol == newColor) return image;
image[sr][sc] = newColor; //need to color it
dfs(image, sr, sc, newColor, originCol);
return image;
}
private void dfs(int[][] image, int sr, int sc, int newColor, int old) {
for (int[] dir : dirs) {
int x = sr + dir[0];
int y = sc + dir[1];
if (x >= 0 && x < image.length && y >= 0 && y < image[0].length && image[x][y] == old) {
image[x][y] = newColor;
dfs(image, x, y, newColor, old);
}
}
}
}
# BFS
# 11ms 68.07%
class Solution {
public int[][] floodFill(int[][] image, int sr, int sc, int newColor) {
Queue<int[]> queue = new LinkedList();
int[] first = {sr, sc};
queue.add(first);
int oldColor = image[sr][sc];
boolean[][] visited = new boolean[image.length][image[0].length];
for (int i = 0; i < visited.length; i++) {
for (int j = 0; j < visited[i].length; j++) {
visited[i][j] = false;
}
}
while(!queue.isEmpty()) {
int row = queue.peek()[0];
int col = queue.peek()[1];
queue.remove();
int val = image[row][col];
if (!visited[row][col] && val == oldColor) {
visited[row][col] = true;
image[row][col] = newColor;
if (row > 0) queue.add(new int[]{row - 1, col});
if (row < image.length - 1) queue.add( new int[]{row + 1, col});
if (col > 0) queue.add(new int[]{row, col -1});
if (col < image[0].length - 1) queue.add(new int[]{row, col + 1});
}
}
return image;
}
}
'''
|
[
"unittest.main"
] |
[((3314, 3329), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3327, 3329), False, 'import unittest\n')]
|
"""
If you want to make a plot, change the folder name
after installing plotly,cufflink and run the following code.
folder_name='new_results'
"""
import pandas as pd
import os,time
import glob
import cufflinks as cf
import plotly.offline
cf.go_offline()
cf.set_config_file(offline=True, world_readable=True)
import torchvision.models as models
MODEL_LIST = {
'mnasnet':models.mnasnet.__all__[1:],
'resnet': models.resnet.__all__[1:],
'densenet': models.densenet.__all__[1:],
'squeezenet': models.squeezenet.__all__[1:],
'vgg': models.vgg.__all__[1:],
#'mobilenet':[m for m in models.mobilenet.__all__[1:] if m.islower()],
'mobilenet':['mobilenet_v2', 'mobilenet_v3_large', 'mobilenet_v3_small'],
'shufflenetv2':models.shufflenetv2.__all__[1:]
}
setattr(plotly.offline, "__PLOTLY_OFFLINE_INITIALIZED", True)
folder_name='result/'
csv_list=glob.glob(folder_name+'/*.csv')
columes=[]
for key,values in MODEL_LIST.items():
for i in values:
columes.append((key,i))
for csv in csv_list:
df=pd.read_csv(csv)
df.columns = pd.MultiIndex.from_tuples(columes)
df.groupby(level=0,axis=1).mean().mean()
# print(csv)
title=csv.split('/')[1].split('_benchmark')[0]
title=title.replace(' ','_')
df.groupby(level=0,axis=1).mean().mean().iplot(kind='scatter',mode='markers',title=title,yTitle='time(ms)',xTitle='models',asImage=True,filename=title)
for model in MODEL_LIST.keys():
df.mean()[model].iplot(kind='scatter',mode='markers',title=model+"_"+title,yTitle='time(ms)',xTitle='models',asImage=True,filename=model+"_"+title)
time.sleep(1)
|
[
"pandas.MultiIndex.from_tuples",
"cufflinks.go_offline",
"pandas.read_csv",
"cufflinks.set_config_file",
"time.sleep",
"glob.glob"
] |
[((239, 254), 'cufflinks.go_offline', 'cf.go_offline', ([], {}), '()\n', (252, 254), True, 'import cufflinks as cf\n'), ((255, 308), 'cufflinks.set_config_file', 'cf.set_config_file', ([], {'offline': '(True)', 'world_readable': '(True)'}), '(offline=True, world_readable=True)\n', (273, 308), True, 'import cufflinks as cf\n'), ((871, 904), 'glob.glob', 'glob.glob', (["(folder_name + '/*.csv')"], {}), "(folder_name + '/*.csv')\n", (880, 904), False, 'import glob\n'), ((1033, 1049), 'pandas.read_csv', 'pd.read_csv', (['csv'], {}), '(csv)\n', (1044, 1049), True, 'import pandas as pd\n'), ((1067, 1101), 'pandas.MultiIndex.from_tuples', 'pd.MultiIndex.from_tuples', (['columes'], {}), '(columes)\n', (1092, 1101), True, 'import pandas as pd\n'), ((1604, 1617), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1614, 1617), False, 'import os, time\n')]
|
import random
import sys
import os
import json
import couchdb
from datetime import date, datetime, timedelta
from flask import Flask, render_template, request
from flask_login import LoginManager, UserMixin, login_user, current_user
from hashlib import sha256
# -----------------------------------------------------------
# user and word database
# -----------------------------------------------------------
class Database:
def __init__(self) -> None:
host = os.environ['COUCHDB_HOST']
username = os.environ['COUCHDB_USERNAME']
password = os.environ['<PASSWORD>']
self.server = couchdb.Server(f'http://{username}:{password}@{host}:5984')
if 'word_lists' in self.server:
self.word_db = self.server['word_lists']
else:
self.word_db = self.server.create('word_lists')
if 'spelling_bee_users' in self.server:
self.user_db = self.server['spelling_bee_users']
else:
self.user_db = self.server.create('spelling_bee_users')
@property
def today_game(self) -> dict:
today = str(date.today())
today_record = self.word_db.get(today)
if today_record is not None:
return today_record
with open(os.path.join('data', 'pangram_sets.txt'), 'r') as f:
pangram_sets = [set(x.strip()) for x in f]
pangram_set = random.choice(pangram_sets)
required_from = pangram_set
if pangram_set.intersection({'i', 'n', 'g'}) == {'i', 'n', 'g'}:
required_from = required_from - {'i', 'n', 'g'}
elif pangram_set.intersection({'e', 'd'}) == {'e', 'd'}:
required_from = required_from - {'e', 'd'}
elif pangram_set.intersection({'e', 'r'}) == {'e', 'r'}:
required_from = required_from - {'e', 'r'}
required_letter = random.choice(tuple(required_from))
with open(os.path.join('data', 'words.txt'), 'r') as f:
all_words = [x.strip() for x in f]
word_list = tuple([x for x in all_words if pangram_set.union(set(x)) == pangram_set and required_letter in x])
pangram_set.remove(required_letter)
game_dict = {
'hive_letters': list(pangram_set),
'queen_letter': required_letter,
'word_list': word_list
}
self.word_db[today] = game_dict
return self.word_db[today]
@property
def yesterday_words(self):
yesterday = str(date.today() - timedelta(days = 1))
yesterday_game = self.word_db.get(yesterday)
if yesterday_game is not None:
word_list = yesterday_game.get('word_list')
return word_list
else:
return False
def get_user(self, user_id):
if user_id in self.user_db:
return User(user_id, self.user_db)
else:
return None
def authenticate_user(self, user_id, secret_word) -> str:
user_info = self.get_user(user_id)
if user_info is None:
return 'no user'
secret_word = sha256(secret_word.encode('utf-8')).hexdigest()
app.logger.debug(f'Authenticating user {user_info.id}')
app.logger.debug(f'Submitted hash: {secret_word}')
app.logger.debug(f'Stored hash: {user_info.user_doc["secret_word"]}')
if secret_word == user_info.user_doc['secret_word']:
app.logger.debug('Match')
return 'success'
else:
app.logger.debug('Mismatch')
return 'bad password'
def create_user(self, user_id) -> str:
if user_id in self.user_db:
return 'user exists'
with open(os.path.join('data', 'words.txt'), 'r') as f:
all_words = [x.strip() for x in f]
secret_word = random.choice(all_words)
secret_hash = sha256(secret_word.encode('utf-8')).hexdigest()
self.user_db[user_id] = {
'secret_word': secret_hash,
'games': {}
}
return secret_word
# -----------------------------------------------------------
# users
# -----------------------------------------------------------
class User(UserMixin):
def __init__(self, user_id, db) -> None:
super().__init__()
self.id = user_id
self.db = db
@property
def user_doc(self):
return self.db[self.id]
@property
def today_game(self) -> dict:
game = self.user_doc['games'].get(str(date.today()))
user_doc = self.user_doc
if game is None:
user_doc['games'][str(date.today())] = {'found_words': []}
self.db[self.id] = user_doc
game = {'found_words': []}
return game
@property
def found_words(self) -> list:
return list(set(self.today_game['found_words']))
@property
def yesterday_found(self) -> list:
found_words = self.user_doc['games'].get(str(date.today() - timedelta(days = 1)))
if found_words is None:
found_words = []
else:
found_words = found_words['found_words']
return found_words
@property
def friends(self) -> list:
friend_list = self.user_doc.get('friends')
if friend_list is not None:
return friend_list
else:
return []
def add_friend(self, friend: str) -> None:
user_doc = self.user_doc
if 'friends' not in user_doc:
user_doc['friends'] = []
if friend not in user_doc['friends']:
user_doc['friends'].append(friend)
self.db[self.id] = user_doc
def find_word(self, word) -> None:
user_doc = self.user_doc
user_doc['games'][str(date.today())]['found_words'].append(word)
self.db[self.id] = user_doc
def compare_word_list(self, other_user) -> list:
return list(set(self.found_words) - set(other_user.found_words))
# -----------------------------------------------------------
# game mechanics
# -----------------------------------------------------------
class GameState:
def __init__(self) -> None:
self.db = Database()
self._letter_set = None
self._required = None
self._words = None
self._last_updated = None
def update_game(self) -> None:
self._words = self.db.today_game['word_list']
self._required = self.db.today_game['queen_letter']
self._letter_set = set(self.db.today_game['hive_letters'])
self._last_updated = datetime.now()
@property
def letter_set(self) -> set:
if self._letter_set is None or (datetime.now() - self._last_updated).total_seconds() > 3600:
self.update_game()
return self._letter_set
@property
def required(self) -> str:
if self._required is None or (datetime.now() - self._last_updated).total_seconds() > 3600:
self.update_game()
return self._required
@property
def words(self) -> list:
if self._words is None or (datetime.now() - self._last_updated).total_seconds() > 3600:
self.update_game()
return self._words
@property
def yesterday_words(self) -> list:
if self.db.yesterday_words:
return self.db.yesterday_words
else:
return ['No words yesterday.']
@property
def maximum_score(self) -> int:
if 'max_score' in self.db.today_game:
return self.db.today_game['max_score']
max_score = sum(self.score_word(x) for x in self.db.today_game['word_list'])
self.db.today_game['max_score'] = max_score
return max_score
@property
def thresholds(self) -> dict:
return {
'Beginner': 0,
'Good Start': round(0.02 * self.maximum_score),
'Moving Up': round(0.05 * self.maximum_score),
'Good': round(0.08 * self.maximum_score),
'Solid': round(0.15 * self.maximum_score),
'Nice': round(0.25 * self.maximum_score),
'Great': round(0.40 * self.maximum_score),
'Amazing': round(0.50 * self.maximum_score),
'Genius': round(0.70 * self.maximum_score),
'Queen Bee': self.maximum_score
}
def score_word(self, word: str) -> int:
if word not in self.words or len(word) < 4 or self.required not in word:
return 0
if len(word) == 4:
score = 1
else:
score = len(word)
if self.letter_set.intersection(set(word)) == self.letter_set:
score += 7
return score
def work_together(self, user:User, coop_user_id:str) -> dict:
return_dict = {'success': True, 'words': []}
coop_user = self.db.get_user(coop_user_id)
if coop_user is None:
return_dict['success'] = False
else:
user.add_friend(coop_user_id)
return_dict['words'] = list(user.compare_word_list(coop_user))
return return_dict
# -----------------------------------------------------------
# flask app
# -----------------------------------------------------------
app = Flask(
__name__,
static_folder=os.path.join('dist', 'static'),
template_folder='dist')
try:
app.secret_key = os.environ['SECRET_KEY']
except KeyError:
if app.debug:
app.logger.warning('$SECRET_KEY not in environment.')
app.secret_key = 'BAD_SECRET_KEY_FOR_DEVELOPMENT'
else:
app.logger.error('Must include secret key for production mode')
sys.exit(1)
game_state = GameState()
if os.environ.get('BEE_SHOW_WORDS') == 'true':
app.logger.debug(game_state.words)
login_manager = LoginManager()
login_manager.init_app(app)
@login_manager.user_loader
def load_user(user_id):
return game_state.db.get_user(user_id)
@app.route('/', methods = ['GET'])
def index():
return render_template('index.html')
def check_word(word):
score = game_state.score_word(word.lower())
if score != 0 and word not in current_user.found_words:
current_user.find_word(word)
return {'score': score}
@app.route('/api', methods = ['POST'])
def api():
global game_state
rj = request.get_json()
if rj['action'] == 'get_setup':
if current_user.is_authenticated:
to_return = {
'auth': True,
'user_id': current_user.id,
'friend_list': current_user.friends,
'required': game_state.required,
'letters': list(game_state.letter_set),
'thresholds': list(game_state.thresholds.keys()),
'score_levels': list(game_state.thresholds.values()),
'num_words': len(game_state.words),
'yesterday_words': game_state.yesterday_words,
'score': sum(
[game_state.score_word(x.lower()) for x in current_user.found_words]
),
'found_words': current_user.found_words,
'found_yesterday': current_user.yesterday_found
}
else:
to_return = {'auth': False}
return json.dumps(to_return), 200, {'ContentType': 'application/json'}
elif rj['action'] == 'check_word':
return json.dumps(check_word(rj['word'])), 200, {'ContentType': 'application/json'}
elif rj['action'] == 'work_together':
return json.dumps(game_state.work_together(current_user, rj['coop_user'])), 200, {'ContentType': 'application/json'}
@app.route('/login', methods = ['POST'])
def login():
global game_state
rj = request.get_json()
if rj['action'] == 'login':
result = game_state.db.authenticate_user(rj['user_id'], rj['secret_word'])
if result == 'success':
login_user(game_state.db.get_user(rj['user_id']), remember = True)
to_return = {'success': True, 'user_id': rj['user_id']}
else:
to_return = {'success': False, 'reason': result}
return json.dumps(to_return), 200, {'ContentType': 'application/json'}
elif rj['action'] == 'create_user':
result = game_state.db.create_user(rj['user_id'])
if result == 'user exists':
to_return = {'success': False, 'reason': 'user exists'}
else:
to_return = {'success': True, 'user_id': rj['user_id'], 'secret_word': result}
login_user(game_state.db.get_user(rj['user_id']), remember = True)
return json.dumps(to_return), 200, {'ContentType': 'application/json'}
|
[
"os.path.join",
"couchdb.Server",
"random.choice",
"datetime.date.today",
"json.dumps",
"os.environ.get",
"flask_login.current_user.find_word",
"datetime.timedelta",
"flask.render_template",
"flask_login.LoginManager",
"datetime.datetime.now",
"flask.request.get_json",
"sys.exit"
] |
[((9720, 9734), 'flask_login.LoginManager', 'LoginManager', ([], {}), '()\n', (9732, 9734), False, 'from flask_login import LoginManager, UserMixin, login_user, current_user\n'), ((9620, 9652), 'os.environ.get', 'os.environ.get', (['"""BEE_SHOW_WORDS"""'], {}), "('BEE_SHOW_WORDS')\n", (9634, 9652), False, 'import os\n'), ((9918, 9947), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (9933, 9947), False, 'from flask import Flask, render_template, request\n'), ((10227, 10245), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (10243, 10245), False, 'from flask import Flask, render_template, request\n'), ((11632, 11650), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (11648, 11650), False, 'from flask import Flask, render_template, request\n'), ((616, 675), 'couchdb.Server', 'couchdb.Server', (['f"""http://{username}:{password}@{host}:5984"""'], {}), "(f'http://{username}:{password}@{host}:5984')\n", (630, 675), False, 'import couchdb\n'), ((1393, 1420), 'random.choice', 'random.choice', (['pangram_sets'], {}), '(pangram_sets)\n', (1406, 1420), False, 'import random\n'), ((3805, 3829), 'random.choice', 'random.choice', (['all_words'], {}), '(all_words)\n', (3818, 3829), False, 'import random\n'), ((6534, 6548), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6546, 6548), False, 'from datetime import date, datetime, timedelta\n'), ((9222, 9252), 'os.path.join', 'os.path.join', (['"""dist"""', '"""static"""'], {}), "('dist', 'static')\n", (9234, 9252), False, 'import os\n'), ((10087, 10115), 'flask_login.current_user.find_word', 'current_user.find_word', (['word'], {}), '(word)\n', (10109, 10115), False, 'from flask_login import LoginManager, UserMixin, login_user, current_user\n'), ((1105, 1117), 'datetime.date.today', 'date.today', ([], {}), '()\n', (1115, 1117), False, 'from datetime import date, datetime, timedelta\n'), ((9578, 9589), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (9586, 9589), False, 'import sys\n'), ((11182, 11203), 'json.dumps', 'json.dumps', (['to_return'], {}), '(to_return)\n', (11192, 11203), False, 'import json\n'), ((12038, 12059), 'json.dumps', 'json.dumps', (['to_return'], {}), '(to_return)\n', (12048, 12059), False, 'import json\n'), ((1263, 1303), 'os.path.join', 'os.path.join', (['"""data"""', '"""pangram_sets.txt"""'], {}), "('data', 'pangram_sets.txt')\n", (1275, 1303), False, 'import os\n'), ((1913, 1946), 'os.path.join', 'os.path.join', (['"""data"""', '"""words.txt"""'], {}), "('data', 'words.txt')\n", (1925, 1946), False, 'import os\n'), ((2476, 2488), 'datetime.date.today', 'date.today', ([], {}), '()\n', (2486, 2488), False, 'from datetime import date, datetime, timedelta\n'), ((2491, 2508), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (2500, 2508), False, 'from datetime import date, datetime, timedelta\n'), ((3689, 3722), 'os.path.join', 'os.path.join', (['"""data"""', '"""words.txt"""'], {}), "('data', 'words.txt')\n", (3701, 3722), False, 'import os\n'), ((4496, 4508), 'datetime.date.today', 'date.today', ([], {}), '()\n', (4506, 4508), False, 'from datetime import date, datetime, timedelta\n'), ((12506, 12527), 'json.dumps', 'json.dumps', (['to_return'], {}), '(to_return)\n', (12516, 12527), False, 'import json\n'), ((4603, 4615), 'datetime.date.today', 'date.today', ([], {}), '()\n', (4613, 4615), False, 'from datetime import date, datetime, timedelta\n'), ((4954, 4966), 'datetime.date.today', 'date.today', ([], {}), '()\n', (4964, 4966), False, 'from datetime import date, datetime, timedelta\n'), ((4969, 4986), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (4978, 4986), False, 'from datetime import date, datetime, timedelta\n'), ((5739, 5751), 'datetime.date.today', 'date.today', ([], {}), '()\n', (5749, 5751), False, 'from datetime import date, datetime, timedelta\n'), ((6637, 6651), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6649, 6651), False, 'from datetime import date, datetime, timedelta\n'), ((6846, 6860), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6858, 6860), False, 'from datetime import date, datetime, timedelta\n'), ((7052, 7066), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7064, 7066), False, 'from datetime import date, datetime, timedelta\n')]
|
import pygame
from settings import Settings
class Player:
def __init__(self, name, order, is_human):
self.name = name
self.order = order
self.is_human = is_human
self.points = 0
def show_score(self, game):
# TODO - Refactor? Don't pass in canvas, maybe create local reference to game instead?
# TODO - Refactor - a lot of code copied from locations.FloorLocation
# TODO - Refactor - rename score to player_id?
if self.order == 0:
x, y = Settings.player_area_location
multiplier = Settings.player_area_multiplier
font = game.large_floor_tile_scores_font
else:
x, y = Settings.opponent_area_location
y += (self.order - 1) * Settings.area_height
multiplier = 1
font = game.small_floor_tile_scores_font
x += 9 * Settings.tile_width * multiplier
y += (Settings.area_height - Settings.tile_height * 1.9) * multiplier
width, height = Settings.tile_width, Settings.tile_height
if self.order == 0:
width *= Settings.player_area_multiplier
height *= Settings.player_area_multiplier
pygame.draw.rect(
game.canvas,
pygame.Color('white'), # TODO: move to Settings?
(x, y, width, height),
Settings.grid_line_width
)
game.canvas.blit(
font.render('{:2}'.format(self.points), True, Settings.floor_tile_scores_colour),
(
x + Settings.floor_tile_scores_spacing * multiplier,
y + Settings.floor_tile_scores_spacing * multiplier
)
)
|
[
"pygame.Color"
] |
[((1261, 1282), 'pygame.Color', 'pygame.Color', (['"""white"""'], {}), "('white')\n", (1273, 1282), False, 'import pygame\n')]
|
import numpy as np
class congestionInference:
def __init__(self, latency_jumps, jitter_analysis):
self.latency_jumps = latency_jumps
self.jitter_analysis = jitter_analysis
self.congestion = False
def fit(self):
self.congestion_inferences = []
for i in range(len(self.latency_jumps)):
if self.congestion and self.latency_jumps[i][2]:
# The link was already congested and it has jumped to a state of larger mean RTT
continue
else:
if self.latency_jumps[i][2] and self.jitter_analysis[i][2]:
self.congestion = True
else:
self.congestion = False
self.congestion_inferences.append((self.latency_jumps[i][0], self.latency_jumps[i][1], self.congestion))
def getInferences(self):
return np.array(self.congestion_inferences)
|
[
"numpy.array"
] |
[((899, 935), 'numpy.array', 'np.array', (['self.congestion_inferences'], {}), '(self.congestion_inferences)\n', (907, 935), True, 'import numpy as np\n')]
|
import json
from scrapy.commands import ScrapyCommand
from scrapy.exceptions import UsageError
from scrapy_redis.scheduler import Scheduler
class Command(ScrapyCommand):
requires_project = True
def syntax(self):
return '<spider>'
def add_options(self, parser):
ScrapyCommand.add_options(self, parser)
parser.add_option('-o', '--output',
help='dump stats into json file (use - for stdout)')
def short_desc(self):
return 'Print short queue stats summary, dump detailed stats to a file'
def run(self, args, opts):
if len(args) != 1:
raise UsageError()
crawler = self.crawler_process.create_crawler(args[0])
scheduler = Scheduler.from_settings(self.settings)
spider = crawler.spidercls.from_crawler(crawler)
scheduler.open(spider)
stats = scheduler.queue.get_stats()
print('\nQueue size: {len}, domains: {n_domains}\n'.format(**stats))
print_top = 10
printed_count = 0
queues = stats['queues']
print('{:<50}\tCount\tScore'.format('Domain'))
for queue, score, count in queues[:print_top]:
printed_count += count
domain = queue.rsplit(':')[-1]
print('{:<50}\t{}\t{:.0f}'.format(domain, count, score))
others_count = sum(count for _, _, count in queues) - printed_count
if others_count:
print('...')
print('{:<50}\t{}'.format(
'other {}:'.format(len(queues) - print_top), others_count))
print()
if opts.output:
with open(opts.output, 'w') as f:
json.dump(stats, f,
ensure_ascii=False, indent=True, sort_keys=True)
print('Stats dumped to {}'.format(opts.output))
|
[
"json.dump",
"scrapy.exceptions.UsageError",
"scrapy_redis.scheduler.Scheduler.from_settings",
"scrapy.commands.ScrapyCommand.add_options"
] |
[((295, 334), 'scrapy.commands.ScrapyCommand.add_options', 'ScrapyCommand.add_options', (['self', 'parser'], {}), '(self, parser)\n', (320, 334), False, 'from scrapy.commands import ScrapyCommand\n'), ((739, 777), 'scrapy_redis.scheduler.Scheduler.from_settings', 'Scheduler.from_settings', (['self.settings'], {}), '(self.settings)\n', (762, 777), False, 'from scrapy_redis.scheduler import Scheduler\n'), ((642, 654), 'scrapy.exceptions.UsageError', 'UsageError', ([], {}), '()\n', (652, 654), False, 'from scrapy.exceptions import UsageError\n'), ((1675, 1743), 'json.dump', 'json.dump', (['stats', 'f'], {'ensure_ascii': '(False)', 'indent': '(True)', 'sort_keys': '(True)'}), '(stats, f, ensure_ascii=False, indent=True, sort_keys=True)\n', (1684, 1743), False, 'import json\n')]
|
"""
Recommender
"""
from __future__ import annotations
from pathlib import Path
from typing import Any, Callable, Optional, Tuple, Union, cast
import numpy as np
import torch
from sklearn.neighbors import NearestNeighbors
from torch.utils.data import DataLoader
from tqdm.auto import tqdm
from wav2rec._utils.validation import check_is_fitted
from wav2rec.core.similarity import cosine_similarity, similarity_calculator
from wav2rec.data.dataset import Wav2RecDataset
from wav2rec.nn.lightening import Wav2RecNet
def _l2_normalize(array: np.ndarray, axis: int = -1) -> np.ndarray:
norm = np.linalg.norm(array, ord=2, axis=axis, keepdims=True)
array_norm: np.ndarray = array / np.maximum(norm, np.finfo(array.dtype).eps)
return array_norm
def _standardize_input(x: Union[torch.Tensor, np.ndarray]) -> torch.Tensor:
x = torch.as_tensor(x)
if x.ndim == 1:
return x.unsqueeze(0)
elif x.ndim == 2:
return x
else:
raise IndexError(f"Input must be 1D or 2D, got {x.ndim}D")
class Wav2Rec:
"""Waveform recommendation & matching engine.
Args:
model_path (Path): path to (training) checkpoint for ``Wav2RecNet``
distance_metric (str): distance metric to use for nearest neighbours search
normalize (bool): if ``True`` perform L2 normalization on all projections
similarity (callable, optional): a callable which accepts two 1D arrays
and returns a float. Must be compiled with ``numba.jit(nopython=True)``.
If ``None`` distances will be returned instead (see ``distance_metric``).
batch_size (int): number of audio files to send to the Wav2Rec neural network
model for projection simultaneously.
num_workers (int): number of subprocesses to use when loading data from the
dataset. See ``torch.utils.data.dataloader.DataLoader``.
pin_memory (bool): copy tensors to CUDA memory before the data loader
returns them.
prefetch_factor (int): Number of samples to load in advance of each worker.
See ``torch.utils.data.dataloader.DataLoader``.
device (torch.device, optional): device to run the model on.
If ``None``, the device will be selected automatically.
verbose (bool): if ``True`` display a progress bar while fitting.
**kwargs (Keyword Arguments): Keyword arguments to pass to ``NearestNeighbors``.
Warnings:
* By default, this class uses ``distance_metric='euclidean'`` and ``normalize=True``.
These settings have been purposefully chosen so that the distances computed
for nearest neighbours search accord with the default similarity metric used:
cosine similarity. (The euclidean distance between L2 normalized vectors is an
effective proxy of cosine similarity, see reference below.)
References:
* https://en.wikipedia.org/wiki/Cosine_similarity
"""
def __init__(
self,
model_path: Path,
distance_metric: str = "euclidean",
normalize: bool = True,
similarity_metric: Optional[
Callable[[np.ndarray, np.ndarray], float]
] = cosine_similarity,
batch_size: int = 1,
num_workers: int = 0,
pin_memory: bool = False,
prefetch_factor: int = 2,
device: Optional[torch.device] = None,
verbose: bool = True,
**kwargs: Any,
) -> None:
self.model_path = Path
self.normalize = normalize
self.similarity_metric = similarity_metric
self.batch_size = batch_size
self.num_workers = num_workers
self.pin_memory = pin_memory
self.prefetch_factor = prefetch_factor
self.device = device or torch.device(
"cuda" if torch.cuda.is_available() else "cpu"
)
self.verbose = verbose
self.net = Wav2RecNet.load_from_checkpoint(model_path).eval().to(self.device)
self._nneighbours = NearestNeighbors(
metric=kwargs.pop("metric", distance_metric),
n_jobs=kwargs.pop("n_jobs", -1),
**kwargs,
)
self.paths: np.ndarray = np.array([], dtype="str")
self.fitted: bool = False
@property
def _X(self) -> np.ndarray:
return cast(np.ndarray, self._nneighbours._fit_X)
def _dataset2loader(self, dataset: Wav2RecDataset) -> DataLoader:
return DataLoader(
dataset,
batch_size=self.batch_size,
shuffle=False,
num_workers=self.num_workers,
pin_memory=self.pin_memory,
prefetch_factor=self.prefetch_factor,
)
def get_projection(self, x: Union[torch.Tensor, np.ndarray]) -> np.ndarray:
"""Get the model's projection of a waveform ``x``.
Args:
x (np.ndarray, torch.Tensor): a 1D array or tensor with shape ``[FEATURES]``
or a 2D array or tensor with shape ``[BATCH, FEATURES]``.
Returns:
proj (np.ndarray): a projection of ``x``.
"""
with torch.inference_mode():
proj: np.ndarray = (
self.net(_standardize_input(x).to(self.device)).cpu().numpy()
)
return _l2_normalize(proj, axis=-1) if self.normalize else proj
def fit(self, dataset: Wav2RecDataset) -> Wav2Rec:
"""Fit the recommender to a dataset.
Fitting is composed of three steps:
1. Iterating over all files in the dataset
2. Computing `Wav2RecNet`` projections for each file
3. Fitting the nearest neighbours algorithm against the projections
Args:
dataset (Wav2RecDataset): a dataset to fit against.
Returns:
Wav2Rec
"""
all_paths, all_projections = list(), list()
with tqdm(desc="Fitting", disable=not self.verbose, total=len(dataset)) as pbar:
for paths, audio in self._dataset2loader(dataset):
all_paths.extend(paths)
all_projections.append(self.get_projection(audio))
pbar.update(len(audio))
self.paths = np.asarray(all_paths)
self._nneighbours.fit(np.concatenate(all_projections))
self.fitted = True
return self
def _get_neighbours(
self,
proj: np.ndarray,
n: int,
return_distance: bool,
) -> Union[np.ndarray, Tuple[np.ndarray, np.ndarray]]:
if n > len(self._X):
raise ValueError("`n` is larger than dataset")
neighbors: np.ndarray = self._nneighbours.kneighbors(
proj,
n_neighbors=n,
return_distance=return_distance,
)
return neighbors
@check_is_fitted
def recommend(
self,
x: Union[torch.Tensor, np.ndarray],
n: int = 5,
) -> Tuple[np.ndarray, np.ndarray]:
"""Recommend waveforms in ``dataset`` similar to ``x`.
Args:
x (np.ndarray, torch.Tensor): a 2D array or tensor
Shape: ``[BATCH, FEATURES]``.
n (int): number of recommendations to generate
Returns:
result (Tuple[np.ndarray, np.ndarray]): a tuple containing:
* ``metrics``: a 2D array of either similarity or distance metrics.
Shape: ``[BATCH, NEIGHBOURS]``.
* ``paths``: a 2D array of recommended file paths.
Shape: ``[BATCH, NEIGHBOURS]``.
"""
proj = self.get_projection(x)
if callable(self.similarity_metric):
ix = self._get_neighbours(proj, n=n, return_distance=False)
metrics = similarity_calculator(
X_query=proj,
X_neighbours=self._X[ix],
metric=self.similarity_metric,
)
else:
metrics, ix = self._get_neighbours(proj, n=n, return_distance=True)
return metrics, self.paths[ix]
|
[
"wav2rec.core.similarity.similarity_calculator",
"torch.inference_mode",
"torch.utils.data.DataLoader",
"wav2rec.nn.lightening.Wav2RecNet.load_from_checkpoint",
"typing.cast",
"numpy.asarray",
"numpy.finfo",
"numpy.array",
"numpy.linalg.norm",
"torch.cuda.is_available",
"torch.as_tensor",
"numpy.concatenate"
] |
[((603, 657), 'numpy.linalg.norm', 'np.linalg.norm', (['array'], {'ord': '(2)', 'axis': 'axis', 'keepdims': '(True)'}), '(array, ord=2, axis=axis, keepdims=True)\n', (617, 657), True, 'import numpy as np\n'), ((847, 865), 'torch.as_tensor', 'torch.as_tensor', (['x'], {}), '(x)\n', (862, 865), False, 'import torch\n'), ((4189, 4214), 'numpy.array', 'np.array', (['[]'], {'dtype': '"""str"""'}), "([], dtype='str')\n", (4197, 4214), True, 'import numpy as np\n'), ((4311, 4353), 'typing.cast', 'cast', (['np.ndarray', 'self._nneighbours._fit_X'], {}), '(np.ndarray, self._nneighbours._fit_X)\n', (4315, 4353), False, 'from typing import Any, Callable, Optional, Tuple, Union, cast\n'), ((4440, 4608), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'batch_size': 'self.batch_size', 'shuffle': '(False)', 'num_workers': 'self.num_workers', 'pin_memory': 'self.pin_memory', 'prefetch_factor': 'self.prefetch_factor'}), '(dataset, batch_size=self.batch_size, shuffle=False, num_workers=\n self.num_workers, pin_memory=self.pin_memory, prefetch_factor=self.\n prefetch_factor)\n', (4450, 4608), False, 'from torch.utils.data import DataLoader\n'), ((6169, 6190), 'numpy.asarray', 'np.asarray', (['all_paths'], {}), '(all_paths)\n', (6179, 6190), True, 'import numpy as np\n'), ((5098, 5120), 'torch.inference_mode', 'torch.inference_mode', ([], {}), '()\n', (5118, 5120), False, 'import torch\n'), ((6221, 6252), 'numpy.concatenate', 'np.concatenate', (['all_projections'], {}), '(all_projections)\n', (6235, 6252), True, 'import numpy as np\n'), ((7698, 7795), 'wav2rec.core.similarity.similarity_calculator', 'similarity_calculator', ([], {'X_query': 'proj', 'X_neighbours': 'self._X[ix]', 'metric': 'self.similarity_metric'}), '(X_query=proj, X_neighbours=self._X[ix], metric=self.\n similarity_metric)\n', (7719, 7795), False, 'from wav2rec.core.similarity import cosine_similarity, similarity_calculator\n'), ((712, 733), 'numpy.finfo', 'np.finfo', (['array.dtype'], {}), '(array.dtype)\n', (720, 733), True, 'import numpy as np\n'), ((3809, 3834), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (3832, 3834), False, 'import torch\n'), ((3907, 3950), 'wav2rec.nn.lightening.Wav2RecNet.load_from_checkpoint', 'Wav2RecNet.load_from_checkpoint', (['model_path'], {}), '(model_path)\n', (3938, 3950), False, 'from wav2rec.nn.lightening import Wav2RecNet\n')]
|
import math
import tensorflow as tf
# Internal
import dataset
EMBED_WIDTH = 16
TIMES_WIDTH = [ (16, 0.0) ]
INPUT_DROPOUT = 0.0
POST_RNN_DROPOUT = 0.0
DENSE_L2 = 0.0
RNN_WIDTH = [ 16 ]
DENSE_POST_WIDTH = [ (128, 0.0) ]
FEATURE_COUNT = 32
ANNEAL_MAX_STEP = 100.0
MAX_SPHERE_STRENGTH = 0.9
class Embedding():
def __init__(self, name, max_code, width, regularizer=None):
self.name = name
self.width = width
with tf.variable_scope(None, default_name=self.name):
self.weights = tf.get_variable('weights', shape=(max_code, width),
trainable=True,
regularizer=regularizer)
def apply(self, codes):
with tf.name_scope(None, values=[ codes ], default_name=self.name):
return tf.gather(self.weights, codes)
class Model():
def __init__(self, training):
self.l2 = tf.contrib.layers.l2_regularizer(DENSE_L2)
self.training = training
self.use_sphereface = False
self.use_arcface = True
self.arcface_m1 = 1.35 # cos(m1 * x + m2) - m3
self.arcface_m2 = 0.0
self.arcface_m3 = 0.0
self.anneal_distances = False
self.radius = 9.2
self.embedding = Embedding('embedding', dataset.MAX_CHAR + 2, EMBED_WIDTH)
self.rnn_cells = [
tf.contrib.rnn.LSTMBlockCell(name='lstm_cell_{}'.format(i),
num_units=width)
for i, width in enumerate(RNN_WIDTH)
]
self.input_dropout = tf.keras.layers.GaussianDropout(name='input_dropout',
rate=INPUT_DROPOUT)
self.post_rnn_dropout = tf.keras.layers.Dropout(
name='post_rnn_dropout',
rate=POST_RNN_DROPOUT)
self.process_times = [
{
'dense': tf.layers.Dense(name='process_times_{}'.format(i),
units=width,
activation=tf.nn.relu,
kernel_regularizer=self.l2),
'dropout': tf.keras.layers.Dropout(
name='process_times_dropout_{}'.format(i),
rate=dropout),
}
for i, (width, dropout) in enumerate(TIMES_WIDTH)
]
self.post = []
for i, (width, dropout) in enumerate(DENSE_POST_WIDTH):
dense = tf.layers.Dense(name='dense_post_{}'.format(i),
units=width,
activation=tf.nn.relu,
kernel_regularizer=self.l2)
dropout = tf.keras.layers.Dropout(
name='dropout_post_{}'.format(i),
rate=dropout)
self.post.append({ 'dense': dense, 'dropout': dropout })
self.features = tf.layers.Dense(name='features',
units=FEATURE_COUNT,
kernel_regularizer=self.l2)
def apply_embedding(self, holds, codes, deltas):
embedding = self.embedding.apply(codes)
holds = tf.expand_dims(holds, axis=-1, name='expanded_holds')
deltas = tf.expand_dims(deltas, axis=-1, name='expanded_deltas')
times = tf.concat([ holds, deltas ], axis=-1, name='times')
times = self.input_dropout(times, training=self.training)
# Process holds+deltas
for o in self.process_times:
times = o['dense'](times)
times = o['dropout'](times, training=self.training)
series = tf.concat([ times, embedding ], axis=-1, name='full_input')
return series, embedding
def build(self, holds, codes, deltas, sequence_len=None):
batch_size = tf.shape(codes)[0]
max_sequence_len = int(codes.shape[1])
if sequence_len is None:
sequence_len = tf.constant(max_sequence_len, dtype=tf.int32,
shape=(1,))
sequence_len = tf.tile(sequence_len, [ batch_size ])
series, embedding = self.apply_embedding(holds, codes, deltas)
series = tf.unstack(series, axis=1, name='unstacked_series')
for cell in self.rnn_cells:
series, _ = tf.nn.static_rnn(
cell=cell,
dtype=tf.float32,
inputs=series)
x = tf.stack(series, axis=1, name='stacked_outputs')
for entry in self.post:
x = entry['dense'](x)
x = entry['dropout'](x, training=self.training)
x = self.features(x)
x = self.post_rnn_dropout(x, training=self.training)
seq_index = tf.expand_dims(tf.range(max_sequence_len), axis=0,
name='seq_index')
mask = seq_index < tf.expand_dims(sequence_len, axis=-1)
mask = tf.cast(mask, dtype=tf.float32)
mask /= tf.reduce_sum(mask, axis=-1, keepdims=True, name='mask_sum') + 1e-23
mask = tf.expand_dims(mask, axis=-1, name='expanded_mask')
x *= mask
x = tf.reduce_sum(x, axis=1)
x = tf.math.l2_normalize(x, axis=-1)
return x
def get_proxy_common(self, proxies, output, categories, category_count, \
category_mask):
positives = tf.gather(proxies, categories, axis=0,
name='positive_proxies')
negative_masks = tf.one_hot(categories, category_count, on_value=False,
off_value=True, name='negative_mask')
negative_masks = tf.logical_and(negative_masks, \
tf.expand_dims(category_mask, axis=0))
def apply_mask(mask):
negatives = tf.boolean_mask(proxies, mask, axis=0, \
name='batch_negatives')
return negatives
negatives = tf.map_fn(apply_mask, negative_masks, name='negatives',
dtype=tf.float32)
def cosine(normed_target, normed_features):
cos = tf.reduce_sum(normed_target * normed_features, axis=-1)
dist = 1.0 - cos
return cos, dist
positive_distances, positive_metrics = cosine(positives, output)
negative_distances, negative_metrics = cosine(negatives, \
tf.expand_dims(output, axis=1))
metrics = {}
for percentile in [ 1, 5, 10, 25, 50, 75, 90, 95, 99 ]:
neg_p = tf.contrib.distributions.percentile(negative_metrics,
percentile, name='negative_{}'.format(percentile))
metrics['negative_{}'.format(percentile)] = neg_p
pos_p = tf.contrib.distributions.percentile(positive_metrics,
percentile, name='positive_{}'.format(percentile))
metrics['positive_{}'.format(percentile)] = pos_p
epsilon = 1e-12
metrics['ratio_25'] = metrics['negative_25'] / \
(metrics['positive_75'] + epsilon)
metrics['ratio_10'] = metrics['negative_10'] / \
(metrics['positive_90'] + epsilon)
metrics['ratio_5'] = metrics['negative_5'] / \
(metrics['positive_95'] + epsilon)
metrics['ratio_1'] = metrics['negative_1'] / \
(metrics['positive_99'] + epsilon)
return positive_distances, negative_distances, metrics
# As in https://arxiv.org/pdf/1703.07464.pdf
# More like in: http://openaccess.thecvf.com/content_cvpr_2018/papers/Wang_CosFace_Large_Margin_CVPR_2018_paper.pdf
# TODO(indutny): try http://ydwen.github.io/papers/WenECCV16.pdf
# TODO(indutny): try http://openaccess.thecvf.com/content_cvpr_2018/papers/Zheng_Ring_Loss_Convex_CVPR_2018_paper.pdf
# TODO(indutny): try https://arxiv.org/pdf/1704.08063.pdf
# TODO(indutny): try https://arxiv.org/pdf/1703.09507.pdf
# See http://proceedings.mlr.press/v48/liud16.pdf
# TODO(indutny): https://arxiv.org/pdf/1801.07698.pdf
def get_proxy_loss(self, output, categories, category_count, \
category_mask, step):
with tf.name_scope('proxy_loss',
values=[ output, categories, category_mask ]):
proxies_init = tf.initializers.random_uniform(-1.0, 1.0)( \
(category_count, FEATURE_COUNT,))
proxies = tf.get_variable('points',
trainable=True,
initializer=proxies_init)
proxies = tf.math.l2_normalize(proxies, axis=-1,
name='normalized_proxies')
positive_distances, negative_distances, _ = self.get_proxy_common( \
proxies, output, categories, category_count, category_mask)
# NOTE: We use same mean proxies for the metrics as in validation
mean_proxies = self.mean_proxies(output, categories, category_count)
_, _, metrics = self.get_proxy_common( \
mean_proxies, output, categories, category_count, category_mask)
epsilon = 1e-23
# Primitive spline with derivatives equal to zero at both start and
# max step
anneal_lambda = tf.clip_by_value(
tf.cast(step, dtype=tf.float32) / ANNEAL_MAX_STEP,
0.0,
1.0)
anneal_lambda = -2.0 * (anneal_lambda ** 3.0) + \
3.0 * (anneal_lambda ** 2.0)
radius = 1.0 + (self.radius - 1.0) * anneal_lambda
metrics['anneal_lambda'] = anneal_lambda
metrics['radius'] = radius
if self.use_sphereface:
# cos(2x) = 2.0 * cos^2(x) - 1
double_cos = 2.0 * (positive_distances ** 2.0) - 1.0
k = tf.cast(positive_distances <= 0.0, dtype=tf.float32)
sign = (-1.0) ** k
psi = sign * double_cos - 2 * k
# Regardless of annealing, do not cross maximum strengh of double cos
# It leads to gradient collapse, and possibly worse learning than
# it should be.
psi *= MAX_SPHERE_STRENGTH
psi += (1.0 - MAX_SPHERE_STRENGTH) * positive_distances
# TODO(indutny): try annealing again
if self.anneal_distances:
positive_distances *= (1.0 - anneal_lambda)
positive_distances += anneal_lambda * psi
else:
positive_distances = psi
elif self.use_arcface:
psi = tf.math.acos(positive_distances)
# cos(m1 * x + m2) - m3
psi *= self.arcface_m1
psi += self.arcface_m2
psi = tf.math.cos(psi)
psi -= self.arcface_m3
# according to the paper - no annealing is necessary
positive_distances = psi
else:
# Just apply margin
positive_distances -= self.arcface_m3
positive_distances *= radius
negative_distances *= radius
exp_pos = tf.exp(positive_distances, name='exp_pos')
exp_neg = tf.exp(negative_distances, name='exp_neg')
total_exp_neg = tf.reduce_sum(exp_neg, axis=-1, name='total_exp_neg')
ratio = exp_pos / (exp_pos + total_exp_neg + epsilon)
loss = -tf.log(ratio + epsilon, name='loss_vector')
loss = tf.reduce_mean(loss, name='loss')
metrics['loss'] = loss
return metrics
def get_proxy_val_metrics(self, output, categories, category_count, \
category_mask):
with tf.name_scope('proxy_val_metrics', values=[ output, categories, \
category_mask ]):
proxies = self.mean_proxies(output, categories, category_count)
_, _, metrics = self.get_proxy_common(proxies, output, categories, \
category_count, category_mask)
return metrics
def mean_proxies(self, output, categories, category_count):
# Compute proxies as mean points
def compute_mean_proxy(category):
points = tf.boolean_mask(output, tf.equal(categories, category),
'category_points')
return tf.reduce_mean(points, axis=0)
result = tf.map_fn(compute_mean_proxy, tf.range(category_count),
dtype=tf.float32)
result = tf.math.l2_normalize(result, axis=-1)
return result
|
[
"tensorflow.nn.static_rnn",
"tensorflow.reduce_sum",
"tensorflow.contrib.layers.l2_regularizer",
"tensorflow.get_variable",
"tensorflow.layers.Dense",
"tensorflow.one_hot",
"tensorflow.gather",
"tensorflow.concat",
"tensorflow.variable_scope",
"tensorflow.stack",
"tensorflow.cast",
"tensorflow.keras.layers.GaussianDropout",
"tensorflow.exp",
"tensorflow.map_fn",
"tensorflow.name_scope",
"tensorflow.equal",
"tensorflow.range",
"tensorflow.initializers.random_uniform",
"tensorflow.keras.layers.Dropout",
"tensorflow.math.acos",
"tensorflow.reduce_mean",
"tensorflow.constant",
"tensorflow.tile",
"tensorflow.log",
"tensorflow.math.l2_normalize",
"tensorflow.expand_dims",
"tensorflow.math.cos",
"tensorflow.shape",
"tensorflow.boolean_mask",
"tensorflow.unstack"
] |
[((871, 913), 'tensorflow.contrib.layers.l2_regularizer', 'tf.contrib.layers.l2_regularizer', (['DENSE_L2'], {}), '(DENSE_L2)\n', (903, 913), True, 'import tensorflow as tf\n'), ((1439, 1512), 'tensorflow.keras.layers.GaussianDropout', 'tf.keras.layers.GaussianDropout', ([], {'name': '"""input_dropout"""', 'rate': 'INPUT_DROPOUT'}), "(name='input_dropout', rate=INPUT_DROPOUT)\n", (1470, 1512), True, 'import tensorflow as tf\n'), ((1549, 1620), 'tensorflow.keras.layers.Dropout', 'tf.keras.layers.Dropout', ([], {'name': '"""post_rnn_dropout"""', 'rate': 'POST_RNN_DROPOUT'}), "(name='post_rnn_dropout', rate=POST_RNN_DROPOUT)\n", (1572, 1620), True, 'import tensorflow as tf\n'), ((2611, 2697), 'tensorflow.layers.Dense', 'tf.layers.Dense', ([], {'name': '"""features"""', 'units': 'FEATURE_COUNT', 'kernel_regularizer': 'self.l2'}), "(name='features', units=FEATURE_COUNT, kernel_regularizer=\n self.l2)\n", (2626, 2697), True, 'import tensorflow as tf\n'), ((2873, 2926), 'tensorflow.expand_dims', 'tf.expand_dims', (['holds'], {'axis': '(-1)', 'name': '"""expanded_holds"""'}), "(holds, axis=-1, name='expanded_holds')\n", (2887, 2926), True, 'import tensorflow as tf\n'), ((2940, 2995), 'tensorflow.expand_dims', 'tf.expand_dims', (['deltas'], {'axis': '(-1)', 'name': '"""expanded_deltas"""'}), "(deltas, axis=-1, name='expanded_deltas')\n", (2954, 2995), True, 'import tensorflow as tf\n'), ((3009, 3058), 'tensorflow.concat', 'tf.concat', (['[holds, deltas]'], {'axis': '(-1)', 'name': '"""times"""'}), "([holds, deltas], axis=-1, name='times')\n", (3018, 3058), True, 'import tensorflow as tf\n'), ((3288, 3345), 'tensorflow.concat', 'tf.concat', (['[times, embedding]'], {'axis': '(-1)', 'name': '"""full_input"""'}), "([times, embedding], axis=-1, name='full_input')\n", (3297, 3345), True, 'import tensorflow as tf\n'), ((3776, 3827), 'tensorflow.unstack', 'tf.unstack', (['series'], {'axis': '(1)', 'name': '"""unstacked_series"""'}), "(series, axis=1, name='unstacked_series')\n", (3786, 3827), True, 'import tensorflow as tf\n'), ((3986, 4034), 'tensorflow.stack', 'tf.stack', (['series'], {'axis': '(1)', 'name': '"""stacked_outputs"""'}), "(series, axis=1, name='stacked_outputs')\n", (3994, 4034), True, 'import tensorflow as tf\n'), ((4394, 4425), 'tensorflow.cast', 'tf.cast', (['mask'], {'dtype': 'tf.float32'}), '(mask, dtype=tf.float32)\n', (4401, 4425), True, 'import tensorflow as tf\n'), ((4518, 4569), 'tensorflow.expand_dims', 'tf.expand_dims', (['mask'], {'axis': '(-1)', 'name': '"""expanded_mask"""'}), "(mask, axis=-1, name='expanded_mask')\n", (4532, 4569), True, 'import tensorflow as tf\n'), ((4593, 4617), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['x'], {'axis': '(1)'}), '(x, axis=1)\n', (4606, 4617), True, 'import tensorflow as tf\n'), ((4627, 4659), 'tensorflow.math.l2_normalize', 'tf.math.l2_normalize', (['x'], {'axis': '(-1)'}), '(x, axis=-1)\n', (4647, 4659), True, 'import tensorflow as tf\n'), ((4789, 4852), 'tensorflow.gather', 'tf.gather', (['proxies', 'categories'], {'axis': '(0)', 'name': '"""positive_proxies"""'}), "(proxies, categories, axis=0, name='positive_proxies')\n", (4798, 4852), True, 'import tensorflow as tf\n'), ((4883, 4980), 'tensorflow.one_hot', 'tf.one_hot', (['categories', 'category_count'], {'on_value': '(False)', 'off_value': '(True)', 'name': '"""negative_mask"""'}), "(categories, category_count, on_value=False, off_value=True, name\n ='negative_mask')\n", (4893, 4980), True, 'import tensorflow as tf\n'), ((5245, 5318), 'tensorflow.map_fn', 'tf.map_fn', (['apply_mask', 'negative_masks'], {'name': '"""negatives"""', 'dtype': 'tf.float32'}), "(apply_mask, negative_masks, name='negatives', dtype=tf.float32)\n", (5254, 5318), True, 'import tensorflow as tf\n'), ((11012, 11049), 'tensorflow.math.l2_normalize', 'tf.math.l2_normalize', (['result'], {'axis': '(-1)'}), '(result, axis=-1)\n', (11032, 11049), True, 'import tensorflow as tf\n'), ((429, 476), 'tensorflow.variable_scope', 'tf.variable_scope', (['None'], {'default_name': 'self.name'}), '(None, default_name=self.name)\n', (446, 476), True, 'import tensorflow as tf\n'), ((499, 595), 'tensorflow.get_variable', 'tf.get_variable', (['"""weights"""'], {'shape': '(max_code, width)', 'trainable': '(True)', 'regularizer': 'regularizer'}), "('weights', shape=(max_code, width), trainable=True,\n regularizer=regularizer)\n", (514, 595), True, 'import tensorflow as tf\n'), ((702, 761), 'tensorflow.name_scope', 'tf.name_scope', (['None'], {'values': '[codes]', 'default_name': 'self.name'}), '(None, values=[codes], default_name=self.name)\n', (715, 761), True, 'import tensorflow as tf\n'), ((778, 808), 'tensorflow.gather', 'tf.gather', (['self.weights', 'codes'], {}), '(self.weights, codes)\n', (787, 808), True, 'import tensorflow as tf\n'), ((3456, 3471), 'tensorflow.shape', 'tf.shape', (['codes'], {}), '(codes)\n', (3464, 3471), True, 'import tensorflow as tf\n'), ((3568, 3625), 'tensorflow.constant', 'tf.constant', (['max_sequence_len'], {'dtype': 'tf.int32', 'shape': '(1,)'}), '(max_sequence_len, dtype=tf.int32, shape=(1,))\n', (3579, 3625), True, 'import tensorflow as tf\n'), ((3657, 3692), 'tensorflow.tile', 'tf.tile', (['sequence_len', '[batch_size]'], {}), '(sequence_len, [batch_size])\n', (3664, 3692), True, 'import tensorflow as tf\n'), ((3879, 3939), 'tensorflow.nn.static_rnn', 'tf.nn.static_rnn', ([], {'cell': 'cell', 'dtype': 'tf.float32', 'inputs': 'series'}), '(cell=cell, dtype=tf.float32, inputs=series)\n', (3895, 3939), True, 'import tensorflow as tf\n'), ((4260, 4286), 'tensorflow.range', 'tf.range', (['max_sequence_len'], {}), '(max_sequence_len)\n', (4268, 4286), True, 'import tensorflow as tf\n'), ((4345, 4382), 'tensorflow.expand_dims', 'tf.expand_dims', (['sequence_len'], {'axis': '(-1)'}), '(sequence_len, axis=-1)\n', (4359, 4382), True, 'import tensorflow as tf\n'), ((4438, 4498), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['mask'], {'axis': '(-1)', 'keepdims': '(True)', 'name': '"""mask_sum"""'}), "(mask, axis=-1, keepdims=True, name='mask_sum')\n", (4451, 4498), True, 'import tensorflow as tf\n'), ((5046, 5083), 'tensorflow.expand_dims', 'tf.expand_dims', (['category_mask'], {'axis': '(0)'}), '(category_mask, axis=0)\n', (5060, 5083), True, 'import tensorflow as tf\n'), ((5130, 5192), 'tensorflow.boolean_mask', 'tf.boolean_mask', (['proxies', 'mask'], {'axis': '(0)', 'name': '"""batch_negatives"""'}), "(proxies, mask, axis=0, name='batch_negatives')\n", (5145, 5192), True, 'import tensorflow as tf\n'), ((5388, 5443), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(normed_target * normed_features)'], {'axis': '(-1)'}), '(normed_target * normed_features, axis=-1)\n', (5401, 5443), True, 'import tensorflow as tf\n'), ((5631, 5661), 'tensorflow.expand_dims', 'tf.expand_dims', (['output'], {'axis': '(1)'}), '(output, axis=1)\n', (5645, 5661), True, 'import tensorflow as tf\n'), ((7257, 7328), 'tensorflow.name_scope', 'tf.name_scope', (['"""proxy_loss"""'], {'values': '[output, categories, category_mask]'}), "('proxy_loss', values=[output, categories, category_mask])\n", (7270, 7328), True, 'import tensorflow as tf\n'), ((7466, 7533), 'tensorflow.get_variable', 'tf.get_variable', (['"""points"""'], {'trainable': '(True)', 'initializer': 'proxies_init'}), "('points', trainable=True, initializer=proxies_init)\n", (7481, 7533), True, 'import tensorflow as tf\n'), ((7570, 7635), 'tensorflow.math.l2_normalize', 'tf.math.l2_normalize', (['proxies'], {'axis': '(-1)', 'name': '"""normalized_proxies"""'}), "(proxies, axis=-1, name='normalized_proxies')\n", (7590, 7635), True, 'import tensorflow as tf\n'), ((9819, 9861), 'tensorflow.exp', 'tf.exp', (['positive_distances'], {'name': '"""exp_pos"""'}), "(positive_distances, name='exp_pos')\n", (9825, 9861), True, 'import tensorflow as tf\n'), ((9878, 9920), 'tensorflow.exp', 'tf.exp', (['negative_distances'], {'name': '"""exp_neg"""'}), "(negative_distances, name='exp_neg')\n", (9884, 9920), True, 'import tensorflow as tf\n'), ((9944, 9997), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['exp_neg'], {'axis': '(-1)', 'name': '"""total_exp_neg"""'}), "(exp_neg, axis=-1, name='total_exp_neg')\n", (9957, 9997), True, 'import tensorflow as tf\n'), ((10131, 10164), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['loss'], {'name': '"""loss"""'}), "(loss, name='loss')\n", (10145, 10164), True, 'import tensorflow as tf\n'), ((10321, 10399), 'tensorflow.name_scope', 'tf.name_scope', (['"""proxy_val_metrics"""'], {'values': '[output, categories, category_mask]'}), "('proxy_val_metrics', values=[output, categories, category_mask])\n", (10334, 10399), True, 'import tensorflow as tf\n'), ((10872, 10902), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['points'], {'axis': '(0)'}), '(points, axis=0)\n', (10886, 10902), True, 'import tensorflow as tf\n'), ((10947, 10971), 'tensorflow.range', 'tf.range', (['category_count'], {}), '(category_count)\n', (10955, 10971), True, 'import tensorflow as tf\n'), ((7361, 7402), 'tensorflow.initializers.random_uniform', 'tf.initializers.random_uniform', (['(-1.0)', '(1.0)'], {}), '(-1.0, 1.0)\n', (7391, 7402), True, 'import tensorflow as tf\n'), ((8686, 8738), 'tensorflow.cast', 'tf.cast', (['(positive_distances <= 0.0)'], {'dtype': 'tf.float32'}), '(positive_distances <= 0.0, dtype=tf.float32)\n', (8693, 8738), True, 'import tensorflow as tf\n'), ((10074, 10117), 'tensorflow.log', 'tf.log', (['(ratio + epsilon)'], {'name': '"""loss_vector"""'}), "(ratio + epsilon, name='loss_vector')\n", (10080, 10117), True, 'import tensorflow as tf\n'), ((10798, 10828), 'tensorflow.equal', 'tf.equal', (['categories', 'category'], {}), '(categories, category)\n', (10806, 10828), True, 'import tensorflow as tf\n'), ((8228, 8259), 'tensorflow.cast', 'tf.cast', (['step'], {'dtype': 'tf.float32'}), '(step, dtype=tf.float32)\n', (8235, 8259), True, 'import tensorflow as tf\n'), ((9361, 9393), 'tensorflow.math.acos', 'tf.math.acos', (['positive_distances'], {}), '(positive_distances)\n', (9373, 9393), True, 'import tensorflow as tf\n'), ((9502, 9518), 'tensorflow.math.cos', 'tf.math.cos', (['psi'], {}), '(psi)\n', (9513, 9518), True, 'import tensorflow as tf\n')]
|
import os
import pandas
import uuid
import math
import plotly.graph_objects as go
from scipy.stats import sem, norm
import numpy
from plotly.subplots import make_subplots
import glob
from models import Gillespie, CellDivision, DeterministicCellDivision
n_A = 6.023E23 # Avogadro's Number
e_coli_vol = 6.5E-16 # Liters
def division(df):
result = zip(df["Time"], df["Counter"])
traces = []
for i in result:
if i[1] == 0:
trace = dict(
type="line",
x0=i[0],
y0=0,
x1=i[0],
y1=25,
line=dict(
color="Black",
width=1,
dash="dashdot"
))
traces.append(trace)
else:
pass
return traces
def gaussian(u, s, x):
gauss = (1/(s * math.sqrt(2 * math.pi))) * math.exp(-0.5 * ((x-u)/s)**2)
return gauss
def average_cycle_times(sim, save, cell_cycle):
path = os.path.join(sim, "*.csv")
for fname in glob.glob(path):
df = pandas.read_csv(fname, sep='\t')
time_at_division = []
time_at_two_genes = []
result = zip(df["Clock"].tolist(), df["Counter"].tolist())
for i in result:
if i[1] == cell_cycle:
time_at_division.append(i[0])
elif i[1] == cell_cycle/2:
time_at_two_genes.append(i[0])
time_at_division = [x / 60 for x in time_at_division]
time_at_two_genes =[x / 60 for x in time_at_two_genes]
print("Max time division: ", max(time_at_division))
print("Min time division: ", min(time_at_division))
print("Division mean: ", numpy.array(time_at_division).mean())
print("Division std: ", numpy.array(time_at_division).std())
print("Max time at 2 genes: ", max(time_at_two_genes))
print("Min time at 2 genes: ", min(time_at_two_genes))
print("Two genes mean: ", numpy.array(time_at_two_genes).mean())
print("Two genes std: ", numpy.array(time_at_two_genes).std())
genes_mean = numpy.array(time_at_two_genes).mean()
genes_std = numpy.array(time_at_two_genes).std()
divide_mean = numpy.array(time_at_division).mean()
divide_std = numpy.array(time_at_division).std()
gauss_divide_x = numpy.linspace(min(time_at_division), max(time_at_division), len(time_at_division))
gauss_divide_y = norm.pdf(gauss_divide_x, divide_mean, divide_std)
gauss_genes_x = numpy.linspace(min(time_at_two_genes), max(time_at_two_genes), len(time_at_two_genes))
gauss_genes_y = norm.pdf(gauss_genes_x, genes_mean, genes_std)
fig = make_subplots(rows=2, cols=1)
fig.update_yaxes(title_text="Probability of <b>Two Genes</b> (minutes)", row=2, col=1)
fig.update_xaxes(title_text="Time (Min)")
fig.update_layout(
title="Probability of time of division and two genes in cell ({n} cell cycles)".format(n=len(time_at_division)),
yaxis_title="Probability of <b>division</b> (minutes)",
legend_title="Legend",
font=dict(
family="Courier New, monospace",
size=12,
color="Black"))
division_trace = go.Histogram(x=time_at_division, histnorm='probability density', name="Division Histogram")
division_gauss_trace = go.Scatter(x=gauss_divide_x, y=gauss_divide_y, name="Gaussian")
two_gene_trace = go.Histogram(x=time_at_two_genes, histnorm='probability density', name="2 Genes Histogram")
two_gene_gauss_trace = go.Scatter(x=gauss_genes_x, y=gauss_genes_y, name="Gaussian")
fig.add_trace(division_trace, row=1, col=1)
fig.add_trace(division_gauss_trace, row=1, col=1,)
fig.add_trace(two_gene_trace, row=2, col=1)
fig.add_trace(two_gene_gauss_trace, row=2, col=1)
fig.show()
if save:
html = os.path.join(sim, "Average_times.html")
image = os.path.join(sim, "Average_times.png")
fig.write_html(html)
fig.write_image(image)
else:
pass
def combine_cell_cycles(sim, save, const):
path = os.path.join(sim, "*.csv")
for fname in glob.glob(path):
df = pandas.read_csv(fname, sep='\t')
# df = df.iloc[25200:]
ml = []
tl = []
pl = []
number_of_cycles = []
mrna_cell_cycles = pandas.DataFrame()
protein_cell_cycles = pandas.DataFrame()
counter = pandas.DataFrame()
result = zip(df["Counter"].tolist(), df["mRNA"].tolist(), df["Divide"].tolist(), df["Proteins"].tolist())
for i in result:
if i[2] == "Yes":
x = str(uuid.uuid4())
mrna_cell_cycles["Cycle_{}".format(x)] = ml
protein_cell_cycles["Cycle_{}".format(x)] = pl
counter["Cycle_{}".format(x)] = tl
number_of_cycles.append(i[0])
ml.clear()
tl.clear()
pl.clear()
ml.append(i[1])
tl.append(i[0])
pl.append(i[3])
else:
ml.append(i[1])
tl.append(i[0])
pl.append(i[3])
mrna_cell_cycles["Average"] = mrna_cell_cycles.mean(axis=1)
mrna_cell_cycles["std"] = mrna_cell_cycles.std(axis=1)
mrna_cell_cycles["Counter"] = numpy.linspace(0, 1, len(mrna_cell_cycles["Average"].tolist()))
protein_cell_cycles["Average"] = protein_cell_cycles.mean(axis=1)
protein_cell_cycles["std"] = protein_cell_cycles.std(axis=1)
protein_cell_cycles["Counter"] = numpy.linspace(0, 1, len(protein_cell_cycles["Average"].tolist()))
mrna_std_minus = []
mrna_std_plus = []
prot_std_minus = []
prot_std_plus = []
mrna_stats = zip(mrna_cell_cycles["Average"].tolist(), mrna_cell_cycles["std"].tolist())
prot_stats = zip(protein_cell_cycles["Average"].tolist(), protein_cell_cycles["std"].tolist())
for i, j in mrna_stats:
mrna_std_minus.append(i-j)
mrna_std_plus.append(i+j)
for i, j in prot_stats:
prot_std_minus.append(i-j)
prot_std_plus.append(i+j)
numerical = DeterministicCellDivision(tmax=36000,
num_of_datapoints=36000,
m0=7.59,
p0=1014.145,
const=const)
numerical_run = numerical.numerical_sim()
numerical_run = numerical_run.iloc[32401:]
numerical_x = numpy.linspace(0, 1, 3600)
"""Just graphing things"""
fig = make_subplots(rows=2, cols=1, vertical_spacing=0.02, shared_xaxes=True)
# Making traces for plot
mrna_std_minus_trace = go.Scatter(x=mrna_cell_cycles["Counter"],
y=mrna_std_minus,
name="Gillespie mRNA STD",
line=dict(color='darkgrey'),
fill=None,
showlegend=False)
mrna_std_plus_trace = go.Scatter(x=mrna_cell_cycles["Counter"],
y=mrna_std_plus,
name="Gillespie mRNA STD",
line=dict(color='darkgrey'),
fill='tonexty',
showlegend=False)
mrna_trace = go.Scatter(x=mrna_cell_cycles["Counter"],
y=mrna_cell_cycles["Average"],
name="Gillespie mRNA",
line=dict(color='royalblue'))
numerical_mrna_trace = go.Scatter(x=numerical_x,
y=numerical_run["mRNA"],
name="Numerical mRNA",
line=dict(color='royalblue',
dash='dash'))
prot_std_minus_trace = go.Scatter(x=protein_cell_cycles["Counter"],
y=prot_std_minus,
name="Gillespie Protein STD",
line=dict(color='darkgrey'),
fill=None,
showlegend=False)
prot_std_plus_trace = go.Scatter(x=protein_cell_cycles["Counter"],
y=prot_std_plus,
name="Gillespie Protein STD",
line=dict(color='darkgrey'),
fill='tonexty',
showlegend=False)
prot_trace = go.Scatter(x=protein_cell_cycles["Counter"],
y=protein_cell_cycles["Average"],
name="Gillespie Protein",
line=dict(color='firebrick'))
numerical_prot_trace = go.Scatter(x=numerical_x,
y=numerical_run["Proteins"],
name="Numerical Proteins",
line=dict(color='firebrick',
dash='dash'))
fig.add_trace(prot_trace, row=1, col=1)
fig.add_trace(numerical_prot_trace, row=1, col=1)
fig.add_trace(prot_std_minus_trace, row=1, col=1)
fig.add_trace(prot_std_plus_trace, row=1, col=1,)
fig.add_trace(numerical_mrna_trace, row=2, col=1)
fig.add_trace(mrna_trace, row=2, col=1)
fig.add_trace(mrna_std_minus_trace, row=2, col=1)
fig.add_trace(mrna_std_plus_trace, row=2, col=1)
fig.update_yaxes(title_text="Number of <b>mRNA</b> Molecules", row=2, col=1)
fig.update_xaxes(title_text="Time (Hours)", row=2, col=1)
fig.update_layout(
title="Cell cycle average of Protein and mRNA over time for {n} cell cycles".format(n=len(number_of_cycles)),
yaxis_title="Number of <b>Protein</b> Molecules",
legend_title="Legend",
font=dict(
family="Courier New, monospace",
size=12,
color="Black"
)
)
fig.show()
if save:
html = os.path.join(sim, "Cell_cycle_average.html")
image = os.path.join(sim, "Cell_cycle_average.png")
fig.write_html(html)
fig.write_image(image)
else:
pass
def numerical_plot(run, save):
"""Plot Analytical model"""
numerical_run = run.numerical_sim()
numerical_fig = make_subplots(rows=2, cols=1, shared_xaxes=True, vertical_spacing=0.02)
mrna_trace = go.Scatter(x=numerical_run["Time"],
y=numerical_run["mRNA"],
name="mRNA")
prot_trace = go.Scatter(x=numerical_run["Time"],
y=numerical_run["Proteins"],
name="Proteins")
numerical_fig.add_trace(mrna_trace, row=2, col=1)
numerical_fig.add_trace(prot_trace, row=1, col=1)
numerical_fig.update_layout(
title="Numerical Cell division comparison of mRNA and Protein molecules over time",
yaxis_title="Number of <b>Protein</b> Molecules",
legend_title="Legend",
barmode="group",
font=dict(
family="Courier New, monospace",
size=12,
color="Black"
)
)
numerical_fig.update_yaxes(title_text="Number of <b>mRNA</b> Molecules", row=2, col=1)
numerical_fig.update_xaxes(title_text="Time (Hours)", row=2, col=1)
numerical_fig.show()
if save:
numerical_fig.write_html("numerical.html")
numerical_fig.write_image("numerical.png")
else:
pass
def histogram_plot(sim, save):
norm_hist = make_subplots(rows=2, cols=1, vertical_spacing=0.02)
norm_hist.update_layout(
title="Probability distribution of mRNA and Protein",
yaxis_title="Probability of <b>mRNA</b> Molecules",
legend_title="Legend",
font=dict(
family="Courier New, monospace",
size=12,
color="Black"))
norm_hist.update_yaxes(title_text="Probability of <b>Protein</b>", row=2, col=1)
norm_hist.update_xaxes(title_text="Number of <b>Molecules</b>", row=2, col=1)
"""Get data from runs"""
path = os.path.join(sim, "*.csv")
mrna = []
prot = []
for fname in glob.glob(path):
df = pandas.read_csv(fname, sep='\t')
mrna.extend(df["mRNA"].tolist())
prot.extend(df["Proteins"].tolist())
mrna_hist = go.Histogram(x=mrna, name="mRNA Histogram")
prot_hist = go.Histogram(x=prot, name="Protein Histogram")
norm_hist.add_trace(mrna_hist, row=1, col=1)
norm_hist.add_trace(prot_hist, row=2, col=1)
norm_hist.show()
if save:
html_file = os.path.join(sim, "hist.html")
png_file = os.path.join(sim, "hist.png")
norm_hist.write_html(html_file)
norm_hist.write_image(png_file)
else:
pass
def plot_statistics(sim, save, const):
stat_fig = make_subplots(rows=2, cols=2, vertical_spacing=0.02)
path = os.path.join(sim, "*.csv")
mrna_cell_cycles = pandas.DataFrame()
protein_cell_cycles = pandas.DataFrame()
ml = []
pl = []
for fname in glob.glob(path):
df = pandas.read_csv(fname, sep='\t')
result = zip(df["Counter"].tolist(), df["mRNA"].tolist(), df["Divide"].tolist(), df["Proteins"].tolist())
for i in result:
if i[2] == "Yes":
x = str(uuid.uuid4())
mrna_cell_cycles["Cycle_{}".format(x)] = ml
protein_cell_cycles["Cycle_{}".format(x)] = pl
ml.clear()
pl.clear()
ml.append(i[1])
pl.append(i[3])
else:
ml.append(i[1])
pl.append(i[3])
mrna_mean = mrna_cell_cycles.mean(axis=0).tolist()
mrna_var = mrna_cell_cycles.var(axis=0).tolist()
prot_mean = protein_cell_cycles.mean(axis=0).tolist()
prot_var = protein_cell_cycles.var(axis=0).tolist()
mrna_sem = sem(mrna_mean)
prot_sem = sem(prot_mean)
numerical = DeterministicCellDivision(tmax=36000,
num_of_datapoints=36000,
m0=7.59,
p0=1014.145,
const=const)
numerical_run = numerical.numerical_sim()
numerical_run = numerical_run.iloc[32400:]
num_protein_mean = go.Bar(x=["Numerical Protein Mean"],
y=[numerical_run["Proteins"].mean()],
name="Numerical Protein",
marker=dict(color=["darkgrey"]))
num_protein_var = go.Bar(x=["Numerical Protein Variance"],
y=[numerical_run["Proteins"].var()],
name="Numerical Protein",
marker=dict(color=["darkgrey"]))
num_mrna_mean = go.Bar(x=["Numerical mRNA Mean"],
y=[numerical_run["mRNA"].mean()],
name="Numerical mRNA",
marker=dict(color=["darkgrey"]))
num_mrna_var = go.Bar(x=["Numerical mRNA Variance"],
y=[numerical_run["mRNA"].var()],
name="Numerical mRNA",
marker=dict(color=["darkgrey"]))
gill_protein_mean = go.Bar(x=["Gillespie Protein Mean"],
y=[numpy.array(prot_mean).mean()],
text=prot_mean,
name="Gillespie Protein",
marker=dict(color=["firebrick"]),
error_y=dict(type='data', array=[prot_sem])
)
gill_mrna_mean = go.Bar(x=["Gillespie mRNA Mean"],
y=[numpy.array(mrna_mean).mean()],
text=mrna_mean,
name="Gillespie mRNA",
marker=dict(color=["royalblue"]),
error_y=dict(type='data', array=[mrna_sem])
)
gill_protein_var = go.Bar(x=["Gillespie Protein Variance"],
y=[numpy.array(prot_var).mean()],
text=prot_var,
name="Gillespie Protein",
marker=dict(color=["firebrick"]),
error_y=dict(type='data', array=[sem(prot_var)]))
gill_mrna_var = go.Bar(x=["Gillespie mRNA Variance"],
y=[numpy.array(mrna_var).mean()],
text=mrna_var,
name="Gillespie mRNA",
marker=dict(color=["royalblue"]),
error_y=dict(type='data', array=[sem(mrna_var)]))
stat_fig.add_trace(gill_protein_mean, row=1, col=1)
stat_fig.add_trace(num_protein_mean, row=1, col=1)
stat_fig.add_trace(gill_mrna_mean, row=1, col=2)
stat_fig.add_trace(num_mrna_mean, row=1, col=2)
stat_fig.add_trace(gill_protein_var, row=2, col=1)
stat_fig.add_trace(num_protein_var, row=2, col=1)
stat_fig.add_trace(gill_mrna_var, row=2, col=2)
stat_fig.add_trace(num_mrna_var, row=2, col=2)
stat_fig.update_layout(
title="Mean and Variance for dividing for {n} cell cycles".format(n=len(mrna_mean)),
yaxis_title="Number of Molecules",
showlegend=False,
font=dict(
family="Courier New, monospace",
size=12,
color="Black"
)
)
stat_fig.show()
if save:
html_file = os.path.join(sim, "stats.html")
png_file = os.path.join(sim, "stats.png")
stat_fig.write_html(html_file)
stat_fig.write_image(png_file)
else:
pass
def plot_gillespie(number_of_runs, sim, save):
cell_div_fig = make_subplots(specs=[[{"secondary_y": True}],
[{"secondary_y": False}]],
rows=2,
cols=1,
row_heights=[0.8, 0.2],
shared_xaxes=True,
vertical_spacing=0.02)
cell_div_fig.update_layout(
title="Cell division comparison of mRNA and Protein molecules over time",
yaxis_title="Number of <b>Protein</b> Molecules",
legend_title="Legend",
font=dict(
family="Courier New, monospace",
size=12,
color="Black"
)
)
cell_div_fig.update_yaxes(title_text="Number of <b>mRNA</b> Molecules", secondary_y=True)
cell_div_fig.update_xaxes(title_text="Time (Hours)", row=2, col=1)
cell_div_fig.update_yaxes(title_text="Number of <b>Genes</b>", row=2, col=1)
path = os.path.join(sim, "*.csv")
if number_of_runs == 1:
for fname in glob.glob(path):
df = pandas.read_csv(fname, sep='\t')
"""Gillespie model of cell division"""
mrna_trace = go.Scatter(x=df["Time"],
y=df["mRNA"],
name="mRNA",
line=dict(color='royalblue', )
)
protein_trace = go.Scatter(x=df["Time"],
y=df["Proteins"],
name="Protein",
line=dict(color='firebrick', )
)
genes_trace = go.Scatter(x=df["Time"],
y=df["Gene Number"],
name="Number of genes")
cell_div_fig.add_trace(mrna_trace, secondary_y=True, row=1, col=1)
cell_div_fig.add_trace(protein_trace, secondary_y=False, row=1, col=1)
cell_div_fig.add_trace(genes_trace, row=2, col=1)
for i in division(df):
cell_div_fig.add_shape(i, row=1, col=1, secondary_y=True)
cell_div_fig.show()
else:
pass
if save:
html_path = os.path.join(sim, "Gillespie.html")
image_path = os.path.join(sim, "Gillespie.png")
cell_div_fig.write_image(image_path)
cell_div_fig.write_html(html_path)
else:
pass
def main():
"""Constants to be changed by user"""
# seconds for sim
tmax = 360000
number_of_datapoints = 43200
# k0 (mRNA), k1 (protein), dm, dp
const = [0.0167, 0.167, 0.0022, 0.0]
# m0, p0 [0, 0]
initial_conditions = [7, 1014]
number_of_simulations = 1
# time points (in unites of dt)
cell_cycle = 6300
save = True
# """Initiate Numerical sim"""
# numerical = DeterministicCellDivision(tmax=tmax,
# num_of_datapoints=number_of_datapoints,
# m0=initial_conditions[0],
# p0=initial_conditions[1],
# const=const)
# numerical_plot(numerical, save)
"""Begin Gillespie Simulation"""
gillespie_cell_model = CellDivision(tmax=tmax, m0=initial_conditions[0], p0=initial_conditions[1], const=const,
number_of_sims=number_of_simulations, cell_cycle=cell_cycle)
run = gillespie_cell_model.multiple_cells()
combine_cell_cycles(sim=run, save=save, const=const)
average_cycle_times(sim=run, save=save, cell_cycle=cell_cycle)
# """Different plots for the Gillespie data"""
histogram_plot(sim=run, save=save)
plot_statistics(sim=run, save=save, const=const)
plot_gillespie(number_of_runs=number_of_simulations, sim=run, save=save)
if __name__ == '__main__':
main()
|
[
"pandas.DataFrame",
"plotly.graph_objects.Scatter",
"math.exp",
"uuid.uuid4",
"plotly.graph_objects.Histogram",
"math.sqrt",
"models.DeterministicCellDivision",
"models.CellDivision",
"pandas.read_csv",
"scipy.stats.norm.pdf",
"numpy.array",
"scipy.stats.sem",
"numpy.linspace",
"glob.glob",
"plotly.subplots.make_subplots",
"os.path.join"
] |
[((1001, 1027), 'os.path.join', 'os.path.join', (['sim', '"""*.csv"""'], {}), "(sim, '*.csv')\n", (1013, 1027), False, 'import os\n'), ((1045, 1060), 'glob.glob', 'glob.glob', (['path'], {}), '(path)\n', (1054, 1060), False, 'import glob\n'), ((2356, 2405), 'scipy.stats.norm.pdf', 'norm.pdf', (['gauss_divide_x', 'divide_mean', 'divide_std'], {}), '(gauss_divide_x, divide_mean, divide_std)\n', (2364, 2405), False, 'from scipy.stats import sem, norm\n'), ((2533, 2579), 'scipy.stats.norm.pdf', 'norm.pdf', (['gauss_genes_x', 'genes_mean', 'genes_std'], {}), '(gauss_genes_x, genes_mean, genes_std)\n', (2541, 2579), False, 'from scipy.stats import sem, norm\n'), ((2591, 2620), 'plotly.subplots.make_subplots', 'make_subplots', ([], {'rows': '(2)', 'cols': '(1)'}), '(rows=2, cols=1)\n', (2604, 2620), False, 'from plotly.subplots import make_subplots\n'), ((3132, 3228), 'plotly.graph_objects.Histogram', 'go.Histogram', ([], {'x': 'time_at_division', 'histnorm': '"""probability density"""', 'name': '"""Division Histogram"""'}), "(x=time_at_division, histnorm='probability density', name=\n 'Division Histogram')\n", (3144, 3228), True, 'import plotly.graph_objects as go\n'), ((3251, 3314), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 'gauss_divide_x', 'y': 'gauss_divide_y', 'name': '"""Gaussian"""'}), "(x=gauss_divide_x, y=gauss_divide_y, name='Gaussian')\n", (3261, 3314), True, 'import plotly.graph_objects as go\n'), ((3337, 3433), 'plotly.graph_objects.Histogram', 'go.Histogram', ([], {'x': 'time_at_two_genes', 'histnorm': '"""probability density"""', 'name': '"""2 Genes Histogram"""'}), "(x=time_at_two_genes, histnorm='probability density', name=\n '2 Genes Histogram')\n", (3349, 3433), True, 'import plotly.graph_objects as go\n'), ((3456, 3517), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 'gauss_genes_x', 'y': 'gauss_genes_y', 'name': '"""Gaussian"""'}), "(x=gauss_genes_x, y=gauss_genes_y, name='Gaussian')\n", (3466, 3517), True, 'import plotly.graph_objects as go\n'), ((4001, 4027), 'os.path.join', 'os.path.join', (['sim', '"""*.csv"""'], {}), "(sim, '*.csv')\n", (4013, 4027), False, 'import os\n'), ((4045, 4060), 'glob.glob', 'glob.glob', (['path'], {}), '(path)\n', (4054, 4060), False, 'import glob\n'), ((4220, 4238), 'pandas.DataFrame', 'pandas.DataFrame', ([], {}), '()\n', (4236, 4238), False, 'import pandas\n'), ((4265, 4283), 'pandas.DataFrame', 'pandas.DataFrame', ([], {}), '()\n', (4281, 4283), False, 'import pandas\n'), ((4298, 4316), 'pandas.DataFrame', 'pandas.DataFrame', ([], {}), '()\n', (4314, 4316), False, 'import pandas\n'), ((5925, 6027), 'models.DeterministicCellDivision', 'DeterministicCellDivision', ([], {'tmax': '(36000)', 'num_of_datapoints': '(36000)', 'm0': '(7.59)', 'p0': '(1014.145)', 'const': 'const'}), '(tmax=36000, num_of_datapoints=36000, m0=7.59, p0=\n 1014.145, const=const)\n', (5950, 6027), False, 'from models import Gillespie, CellDivision, DeterministicCellDivision\n'), ((6302, 6328), 'numpy.linspace', 'numpy.linspace', (['(0)', '(1)', '(3600)'], {}), '(0, 1, 3600)\n', (6316, 6328), False, 'import numpy\n'), ((6370, 6441), 'plotly.subplots.make_subplots', 'make_subplots', ([], {'rows': '(2)', 'cols': '(1)', 'vertical_spacing': '(0.02)', 'shared_xaxes': '(True)'}), '(rows=2, cols=1, vertical_spacing=0.02, shared_xaxes=True)\n', (6383, 6441), False, 'from plotly.subplots import make_subplots\n'), ((10280, 10351), 'plotly.subplots.make_subplots', 'make_subplots', ([], {'rows': '(2)', 'cols': '(1)', 'shared_xaxes': '(True)', 'vertical_spacing': '(0.02)'}), '(rows=2, cols=1, shared_xaxes=True, vertical_spacing=0.02)\n', (10293, 10351), False, 'from plotly.subplots import make_subplots\n'), ((10369, 10442), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': "numerical_run['Time']", 'y': "numerical_run['mRNA']", 'name': '"""mRNA"""'}), "(x=numerical_run['Time'], y=numerical_run['mRNA'], name='mRNA')\n", (10379, 10442), True, 'import plotly.graph_objects as go\n'), ((10517, 10603), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': "numerical_run['Time']", 'y': "numerical_run['Proteins']", 'name': '"""Proteins"""'}), "(x=numerical_run['Time'], y=numerical_run['Proteins'], name=\n 'Proteins')\n", (10527, 10603), True, 'import plotly.graph_objects as go\n'), ((11505, 11557), 'plotly.subplots.make_subplots', 'make_subplots', ([], {'rows': '(2)', 'cols': '(1)', 'vertical_spacing': '(0.02)'}), '(rows=2, cols=1, vertical_spacing=0.02)\n', (11518, 11557), False, 'from plotly.subplots import make_subplots\n'), ((12088, 12114), 'os.path.join', 'os.path.join', (['sim', '"""*.csv"""'], {}), "(sim, '*.csv')\n", (12100, 12114), False, 'import os\n'), ((12160, 12175), 'glob.glob', 'glob.glob', (['path'], {}), '(path)\n', (12169, 12175), False, 'import glob\n'), ((12325, 12368), 'plotly.graph_objects.Histogram', 'go.Histogram', ([], {'x': 'mrna', 'name': '"""mRNA Histogram"""'}), "(x=mrna, name='mRNA Histogram')\n", (12337, 12368), True, 'import plotly.graph_objects as go\n'), ((12385, 12431), 'plotly.graph_objects.Histogram', 'go.Histogram', ([], {'x': 'prot', 'name': '"""Protein Histogram"""'}), "(x=prot, name='Protein Histogram')\n", (12397, 12431), True, 'import plotly.graph_objects as go\n'), ((12823, 12875), 'plotly.subplots.make_subplots', 'make_subplots', ([], {'rows': '(2)', 'cols': '(2)', 'vertical_spacing': '(0.02)'}), '(rows=2, cols=2, vertical_spacing=0.02)\n', (12836, 12875), False, 'from plotly.subplots import make_subplots\n'), ((12887, 12913), 'os.path.join', 'os.path.join', (['sim', '"""*.csv"""'], {}), "(sim, '*.csv')\n", (12899, 12913), False, 'import os\n'), ((12937, 12955), 'pandas.DataFrame', 'pandas.DataFrame', ([], {}), '()\n', (12953, 12955), False, 'import pandas\n'), ((12982, 13000), 'pandas.DataFrame', 'pandas.DataFrame', ([], {}), '()\n', (12998, 13000), False, 'import pandas\n'), ((13042, 13057), 'glob.glob', 'glob.glob', (['path'], {}), '(path)\n', (13051, 13057), False, 'import glob\n'), ((13872, 13886), 'scipy.stats.sem', 'sem', (['mrna_mean'], {}), '(mrna_mean)\n', (13875, 13886), False, 'from scipy.stats import sem, norm\n'), ((13902, 13916), 'scipy.stats.sem', 'sem', (['prot_mean'], {}), '(prot_mean)\n', (13905, 13916), False, 'from scipy.stats import sem, norm\n'), ((13934, 14036), 'models.DeterministicCellDivision', 'DeterministicCellDivision', ([], {'tmax': '(36000)', 'num_of_datapoints': '(36000)', 'm0': '(7.59)', 'p0': '(1014.145)', 'const': 'const'}), '(tmax=36000, num_of_datapoints=36000, m0=7.59, p0=\n 1014.145, const=const)\n', (13959, 14036), False, 'from models import Gillespie, CellDivision, DeterministicCellDivision\n'), ((17797, 17959), 'plotly.subplots.make_subplots', 'make_subplots', ([], {'specs': "[[{'secondary_y': True}], [{'secondary_y': False}]]", 'rows': '(2)', 'cols': '(1)', 'row_heights': '[0.8, 0.2]', 'shared_xaxes': '(True)', 'vertical_spacing': '(0.02)'}), "(specs=[[{'secondary_y': True}], [{'secondary_y': False}]],\n rows=2, cols=1, row_heights=[0.8, 0.2], shared_xaxes=True,\n vertical_spacing=0.02)\n", (17810, 17959), False, 'from plotly.subplots import make_subplots\n'), ((18745, 18771), 'os.path.join', 'os.path.join', (['sim', '"""*.csv"""'], {}), "(sim, '*.csv')\n", (18757, 18771), False, 'import os\n'), ((21028, 21181), 'models.CellDivision', 'CellDivision', ([], {'tmax': 'tmax', 'm0': 'initial_conditions[0]', 'p0': 'initial_conditions[1]', 'const': 'const', 'number_of_sims': 'number_of_simulations', 'cell_cycle': 'cell_cycle'}), '(tmax=tmax, m0=initial_conditions[0], p0=initial_conditions[1],\n const=const, number_of_sims=number_of_simulations, cell_cycle=cell_cycle)\n', (21040, 21181), False, 'from models import Gillespie, CellDivision, DeterministicCellDivision\n'), ((893, 928), 'math.exp', 'math.exp', (['(-0.5 * ((x - u) / s) ** 2)'], {}), '(-0.5 * ((x - u) / s) ** 2)\n', (901, 928), False, 'import math\n'), ((1075, 1107), 'pandas.read_csv', 'pandas.read_csv', (['fname'], {'sep': '"""\t"""'}), "(fname, sep='\\t')\n", (1090, 1107), False, 'import pandas\n'), ((3767, 3806), 'os.path.join', 'os.path.join', (['sim', '"""Average_times.html"""'], {}), "(sim, 'Average_times.html')\n", (3779, 3806), False, 'import os\n'), ((3823, 3861), 'os.path.join', 'os.path.join', (['sim', '"""Average_times.png"""'], {}), "(sim, 'Average_times.png')\n", (3835, 3861), False, 'import os\n'), ((4075, 4107), 'pandas.read_csv', 'pandas.read_csv', (['fname'], {'sep': '"""\t"""'}), "(fname, sep='\\t')\n", (4090, 4107), False, 'import pandas\n'), ((9967, 10011), 'os.path.join', 'os.path.join', (['sim', '"""Cell_cycle_average.html"""'], {}), "(sim, 'Cell_cycle_average.html')\n", (9979, 10011), False, 'import os\n'), ((10028, 10071), 'os.path.join', 'os.path.join', (['sim', '"""Cell_cycle_average.png"""'], {}), "(sim, 'Cell_cycle_average.png')\n", (10040, 10071), False, 'import os\n'), ((12190, 12222), 'pandas.read_csv', 'pandas.read_csv', (['fname'], {'sep': '"""\t"""'}), "(fname, sep='\\t')\n", (12205, 12222), False, 'import pandas\n'), ((12584, 12614), 'os.path.join', 'os.path.join', (['sim', '"""hist.html"""'], {}), "(sim, 'hist.html')\n", (12596, 12614), False, 'import os\n'), ((12634, 12663), 'os.path.join', 'os.path.join', (['sim', '"""hist.png"""'], {}), "(sim, 'hist.png')\n", (12646, 12663), False, 'import os\n'), ((13072, 13104), 'pandas.read_csv', 'pandas.read_csv', (['fname'], {'sep': '"""\t"""'}), "(fname, sep='\\t')\n", (13087, 13104), False, 'import pandas\n'), ((17546, 17577), 'os.path.join', 'os.path.join', (['sim', '"""stats.html"""'], {}), "(sim, 'stats.html')\n", (17558, 17577), False, 'import os\n'), ((17597, 17627), 'os.path.join', 'os.path.join', (['sim', '"""stats.png"""'], {}), "(sim, 'stats.png')\n", (17609, 17627), False, 'import os\n'), ((18821, 18836), 'glob.glob', 'glob.glob', (['path'], {}), '(path)\n', (18830, 18836), False, 'import glob\n'), ((19448, 19517), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': "df['Time']", 'y': "df['Gene Number']", 'name': '"""Number of genes"""'}), "(x=df['Time'], y=df['Gene Number'], name='Number of genes')\n", (19458, 19517), True, 'import plotly.graph_objects as go\n'), ((19982, 20017), 'os.path.join', 'os.path.join', (['sim', '"""Gillespie.html"""'], {}), "(sim, 'Gillespie.html')\n", (19994, 20017), False, 'import os\n'), ((20039, 20073), 'os.path.join', 'os.path.join', (['sim', '"""Gillespie.png"""'], {}), "(sim, 'Gillespie.png')\n", (20051, 20073), False, 'import os\n'), ((2030, 2060), 'numpy.array', 'numpy.array', (['time_at_two_genes'], {}), '(time_at_two_genes)\n', (2041, 2060), False, 'import numpy\n'), ((2084, 2114), 'numpy.array', 'numpy.array', (['time_at_two_genes'], {}), '(time_at_two_genes)\n', (2095, 2114), False, 'import numpy\n'), ((2139, 2168), 'numpy.array', 'numpy.array', (['time_at_division'], {}), '(time_at_division)\n', (2150, 2168), False, 'import numpy\n'), ((2193, 2222), 'numpy.array', 'numpy.array', (['time_at_division'], {}), '(time_at_division)\n', (2204, 2222), False, 'import numpy\n'), ((18855, 18887), 'pandas.read_csv', 'pandas.read_csv', (['fname'], {'sep': '"""\t"""'}), "(fname, sep='\\t')\n", (18870, 18887), False, 'import pandas\n'), ((866, 888), 'math.sqrt', 'math.sqrt', (['(2 * math.pi)'], {}), '(2 * math.pi)\n', (875, 888), False, 'import math\n'), ((1655, 1684), 'numpy.array', 'numpy.array', (['time_at_division'], {}), '(time_at_division)\n', (1666, 1684), False, 'import numpy\n'), ((1721, 1750), 'numpy.array', 'numpy.array', (['time_at_division'], {}), '(time_at_division)\n', (1732, 1750), False, 'import numpy\n'), ((1907, 1937), 'numpy.array', 'numpy.array', (['time_at_two_genes'], {}), '(time_at_two_genes)\n', (1918, 1937), False, 'import numpy\n'), ((1975, 2005), 'numpy.array', 'numpy.array', (['time_at_two_genes'], {}), '(time_at_two_genes)\n', (1986, 2005), False, 'import numpy\n'), ((4494, 4506), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (4504, 4506), False, 'import uuid\n'), ((13298, 13310), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (13308, 13310), False, 'import uuid\n'), ((15333, 15355), 'numpy.array', 'numpy.array', (['prot_mean'], {}), '(prot_mean)\n', (15344, 15355), False, 'import numpy\n'), ((15729, 15751), 'numpy.array', 'numpy.array', (['mrna_mean'], {}), '(mrna_mean)\n', (15740, 15751), False, 'import numpy\n'), ((16118, 16139), 'numpy.array', 'numpy.array', (['prot_var'], {}), '(prot_var)\n', (16129, 16139), False, 'import numpy\n'), ((16377, 16390), 'scipy.stats.sem', 'sem', (['prot_var'], {}), '(prot_var)\n', (16380, 16390), False, 'from scipy.stats import sem, norm\n'), ((16483, 16504), 'numpy.array', 'numpy.array', (['mrna_var'], {}), '(mrna_var)\n', (16494, 16504), False, 'import numpy\n'), ((16727, 16740), 'scipy.stats.sem', 'sem', (['mrna_var'], {}), '(mrna_var)\n', (16730, 16740), False, 'from scipy.stats import sem, norm\n')]
|
from dataclasses import dataclass
import json
from classes.commands import CommandType
@dataclass
class ServerInfo:
PlayerCount: int = 0
Map: str = ''
@dataclass
class LaunchOptions:
Map: str = ''
Servername: str = 'Cow Server'
Gamemode: str = ''
Port: int = 7778
BotCount: int = 0
MaxPlayers: int = 16
Playlist: str = 'DM'
SCP: int = 0
TimeLimit: int = None
def LoadFromJson(jsonStr):
return LaunchOptions(**jsonStr)
@dataclass
class ServerOptions:
LaunchOptions: LaunchOptions = LaunchOptions()
AutoRestartInLobby: bool = False
AllowedCommands: 'list[CommandType]' = None
def LoadFromFile(fileName: str):
try:
file = open(fileName)
data = json.loads(file.read())
return ServerOptions(LaunchOptions.LoadFromJson(data['LaunchOptions']), data['AutoRestartInLobby'], list(map(lambda x: CommandType(x), data['AllowedCommands'])))
except:
print('Failed to read configuration file: {}'.format(fileName))
return None
|
[
"classes.commands.CommandType"
] |
[((908, 922), 'classes.commands.CommandType', 'CommandType', (['x'], {}), '(x)\n', (919, 922), False, 'from classes.commands import CommandType\n')]
|
import sys
import higu.model
import higu.web_session
LEVEL_MAP = {
'none' : higu.model.ACCESS_LEVEL_NONE,
'read' : higu.model.ACCESS_LEVEL_READ,
'edit' : higu.model.ACCESS_LEVEL_EDIT,
'admin' : higu.model.ACCESS_LEVEL_ADMIN,
}
if( __name__ == '__main__' ):
import optparse
parser = optparse.OptionParser( usage = 'Usage: %prog [options] user' )
parser.add_option( '-c', '--config',
dest = 'config',
help = 'Configuration File' )
parser.add_option( '-r', '--remove',
dest = 'remove', action = 'store_true', default = False,
help = 'Remove user' )
parser.add_option( '-a', '--add',
dest = 'add', action = 'store_true', default = False,
help = 'Add user' )
parser.add_option( '-p', '--password',
dest = 'password', default = None,
help = 'Set / Change user password' )
parser.add_option( '-l', '--level',
dest = 'level', default = None,
help = 'Set / Change user access level (none, read, edit, admin)' )
opts, args = parser.parse_args()
if( len( args ) < 1 ):
parser.print_help()
sys.exit( 0 )
user = args[0]
if( len( sys.argv ) > 1 ):
cfg = higu.config.init( opts.config )
else:
cfg = higu.config.init()
higu.web_session.init( cfg.get_path( 'library' ) )
access = higu.web_session.WebSessionAccess()
if( opts.remove ):
access.drop_user( user )
sys.exit( 0 )
elif( opts.add ):
if( opts.password is None ):
parser.print_help()
sys.exit( 0 )
if( opts.level is None ):
access_level = higu.model.ACCESS_LEVEL_NONE
else:
access_level = LEVEL_MAP[opts.level]
access.create_user( user, opts.password, access_level )
else:
if( opts.password is not None ):
access.set_password( user, opts.password )
if( opts.level is not None ):
access_level = LEVEL_MAP[opts.level]
access.promote( user, access_level )
|
[
"sys.exit",
"optparse.OptionParser"
] |
[((311, 369), 'optparse.OptionParser', 'optparse.OptionParser', ([], {'usage': '"""Usage: %prog [options] user"""'}), "(usage='Usage: %prog [options] user')\n", (332, 369), False, 'import optparse\n'), ((1134, 1145), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1142, 1145), False, 'import sys\n'), ((1459, 1470), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1467, 1470), False, 'import sys\n'), ((1577, 1588), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1585, 1588), False, 'import sys\n')]
|
# The following comments couldn't be translated into the new config version:
#,
import FWCore.ParameterSet.Config as cms
#------------------
# clustering:
#------------------
# BasicCluster producer
from RecoEcal.EgammaClusterProducers.cosmicBasicClusters_cfi import *
# SuperCluster producer
from RecoEcal.EgammaClusterProducers.cosmicSuperClusters_cfi import *
# SuperCluster with Preshower producer
#include "RecoEcal/EgammaClusterProducers/data/SuperClustersWithPreshower.cfi"
# create sequence for clustering
cosmicClusteringSequence = cms.Sequence(cosmicBasicClusters * cosmicSuperClusters)
|
[
"FWCore.ParameterSet.Config.Sequence"
] |
[((547, 602), 'FWCore.ParameterSet.Config.Sequence', 'cms.Sequence', (['(cosmicBasicClusters * cosmicSuperClusters)'], {}), '(cosmicBasicClusters * cosmicSuperClusters)\n', (559, 602), True, 'import FWCore.ParameterSet.Config as cms\n')]
|
# -*- coding: utf-8 -*-
import json
import re
import scrapy
from locations.items import GeojsonPointItem
from locations.hours import OpeningHours
DAY_MAPPING = {
'Monday': 'Mo',
'Tuesday': 'Tu',
'Wednesday': 'We',
'Thursday': 'Th',
'Friday': 'Fr',
'Saturday': 'Sa',
'Sunday': 'Su'
}
class BoneFishGrillSpider(scrapy.Spider):
download_delay = 0.2
name = "bonefishgrill"
allowed_domains = ["bonefishgrill.com"]
start_urls = (
'https://www.bonefishgrill.com/locations/all',
)
def parse(self, response):
urls = response.xpath('//li[@class="location-row"]/a/@href').extract()
for url in urls:
yield scrapy.Request(response.urljoin(url), callback=self.parse_location)
def parse_location(self, response):
data = response.xpath('//script[contains(text(), "initLocationDetail")][1]/text()').extract_first()
try:
properties = {
'ref': re.search(r'"UnitId":"(.*?)"', data).group(1),
'name': re.search(r'"City":"(.*?)"', data).group(1),
'addr_full': re.search(r'"Address":"(.*?)"', data).group(1),
'city': re.search(r'"City":"(.*?)"', data).group(1),
'state': re.search(r'"State":"(.*?)"', data).group(1),
'postcode': re.search(r'"Zip":"(.*?)"', data).group(1),
'phone': re.search(r'"Phone":"(.*?)"', data).group(1),
'lat': re.search(r'"Latitude":"(.*?)"', data).group(1),
'lon': re.search(r'"Longitude":"(.*?)"', data).group(1),
'website': response.url
}
hours = self.parse_hours(re.search(r'"Hours":(.*?})', data).group(1))
if hours:
properties['opening_hours'] = hours
yield GeojsonPointItem(**properties)
except:
pass
def parse_hours(self, response):
opening_hours = OpeningHours()
weekdays = response
hrs = json.loads(weekdays)
WEEKDAYS = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']
for DAY in WEEKDAYS:
open = hrs.get(DAY+'Open')
close = hrs.get(DAY+'Close')
opening_hours.add_range(day=DAY_MAPPING[DAY],
open_time=open,
close_time=close,
time_format='%H:%M %p')
return opening_hours.as_opening_hours()
|
[
"locations.items.GeojsonPointItem",
"re.search",
"locations.hours.OpeningHours",
"json.loads"
] |
[((1946, 1960), 'locations.hours.OpeningHours', 'OpeningHours', ([], {}), '()\n', (1958, 1960), False, 'from locations.hours import OpeningHours\n'), ((2003, 2023), 'json.loads', 'json.loads', (['weekdays'], {}), '(weekdays)\n', (2013, 2023), False, 'import json\n'), ((1820, 1850), 'locations.items.GeojsonPointItem', 'GeojsonPointItem', ([], {}), '(**properties)\n', (1836, 1850), False, 'from locations.items import GeojsonPointItem\n'), ((969, 1004), 're.search', 're.search', (['""""UnitId":"(.*?)\\""""', 'data'], {}), '(\'"UnitId":"(.*?)"\', data)\n', (978, 1004), False, 'import re\n'), ((1040, 1073), 're.search', 're.search', (['""""City":"(.*?)\\""""', 'data'], {}), '(\'"City":"(.*?)"\', data)\n', (1049, 1073), False, 'import re\n'), ((1114, 1150), 're.search', 're.search', (['""""Address":"(.*?)\\""""', 'data'], {}), '(\'"Address":"(.*?)"\', data)\n', (1123, 1150), False, 'import re\n'), ((1186, 1219), 're.search', 're.search', (['""""City":"(.*?)\\""""', 'data'], {}), '(\'"City":"(.*?)"\', data)\n', (1195, 1219), False, 'import re\n'), ((1256, 1290), 're.search', 're.search', (['""""State":"(.*?)\\""""', 'data'], {}), '(\'"State":"(.*?)"\', data)\n', (1265, 1290), False, 'import re\n'), ((1330, 1362), 're.search', 're.search', (['""""Zip":"(.*?)\\""""', 'data'], {}), '(\'"Zip":"(.*?)"\', data)\n', (1339, 1362), False, 'import re\n'), ((1399, 1433), 're.search', 're.search', (['""""Phone":"(.*?)\\""""', 'data'], {}), '(\'"Phone":"(.*?)"\', data)\n', (1408, 1433), False, 'import re\n'), ((1468, 1505), 're.search', 're.search', (['""""Latitude":"(.*?)\\""""', 'data'], {}), '(\'"Latitude":"(.*?)"\', data)\n', (1477, 1505), False, 'import re\n'), ((1540, 1578), 're.search', 're.search', (['""""Longitude":"(.*?)\\""""', 'data'], {}), '(\'"Longitude":"(.*?)"\', data)\n', (1549, 1578), False, 'import re\n'), ((1682, 1715), 're.search', 're.search', (['""""Hours":(.*?})"""', 'data'], {}), '(\'"Hours":(.*?})\', data)\n', (1691, 1715), False, 'import re\n')]
|
# ------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License (MIT). See LICENSE in the repo root for license information.
# ------------------------------------------------------------------------------------------
from typing import Any, List, Optional
import numpy as np
import pytest
import torch
from torch.cuda.amp import GradScaler
from torch.nn import Identity
from InnerEye.Common import common_util
from InnerEye.Common.common_util import MetricsDataframeLoggers
from InnerEye.Common.output_directories import OutputFolderForTests
from InnerEye.ML.common import ModelExecutionMode
from InnerEye.ML.config import SegmentationModelBase
from InnerEye.ML.configs.classification.DummyClassification import DummyClassification
from InnerEye.ML.deep_learning_config import DeepLearningConfig
from InnerEye.ML.model_training import model_train
from InnerEye.ML.model_training_steps import ModelTrainingStepsForScalarModel, TrainValidateParameters, \
get_scalar_model_inputs_and_labels
from InnerEye.ML.models.architectures.base_model import BaseModel, CropSizeConstraints
from InnerEye.ML.models.parallel.data_parallel import DataParallelModel
from InnerEye.ML.pipelines.forward_pass import SegmentationForwardPass
from InnerEye.ML.utils import ml_util
from InnerEye.ML.utils.device_aware_module import DeviceAwareModule
from InnerEye.ML.utils.io_util import ImageDataType
from InnerEye.ML.utils.metrics_util import SummaryWriters
from InnerEye.ML.utils.model_util import ModelAndInfo
from Tests.ML.configs.ClassificationModelForTesting import ClassificationModelForTesting
from Tests.ML.models.architectures.DummyScalarModel import DummyScalarModel
from Tests.ML.util import machine_has_gpu, no_gpu_available
from Tests.ML.util import get_default_checkpoint_handler
class SimpleModel(BaseModel):
def __init__(self, input_channels: int, channels: list, n_classes: int, kernel_size: int,
insert_value_in_output: Optional[float] = None,
crop_size_constraints: CropSizeConstraints = None):
super().__init__(input_channels=input_channels, name="SimpleModel", crop_size_constraints=crop_size_constraints)
self.channels = channels
self.n_classes = n_classes
self.kernel_size = kernel_size
self.insert_value_in_output = insert_value_in_output
self._model = torch.nn.Sequential(
torch.nn.Conv3d(input_channels, channels[0], kernel_size=self.kernel_size),
torch.nn.ConvTranspose3d(channels[0], n_classes, kernel_size=self.kernel_size)
)
def forward(self, x: Any) -> Any: # type: ignore
x = self._model(x)
if self.insert_value_in_output:
x[..., 0] = self.insert_value_in_output
return x
def get_all_child_layers(self) -> List[torch.nn.Module]:
return list(self._model.children())
@pytest.mark.parametrize("value_to_insert", [1.0, np.NaN, np.Inf])
@pytest.mark.parametrize("in_training_mode", [True, False])
def test_anomaly_detection(value_to_insert: float, in_training_mode: bool) -> None:
"""
Test anomaly detection for the segmentation forward pass.
:param value_to_insert: The value to insert in the image image (nan, inf, or a valid float)
:param in_training_mode: If true, run the segmentation forward pass in training mode, otherwise use the
settings for running on the validation set.
:return:
"""
image_size = [1, 1, 4, 4, 4]
labels_size = [1, 2, 4, 4, 4]
mask_size = [1, 4, 4, 4]
crop_size = (4, 4, 4)
inference_stride_size = (2, 2, 2)
ground_truth_ids = ["Lung"]
# image to run inference on
image = torch.from_numpy(np.random.uniform(size=image_size).astype(ImageDataType.IMAGE.value))
# labels for criterion
labels = torch.from_numpy(np.random.uniform(size=labels_size).astype(ImageDataType.SEGMENTATION.value))
# create a random mask if required
mask = torch.from_numpy((np.round(np.random.uniform(size=mask_size)).astype(dtype=ImageDataType.MASK.value)))
config = SegmentationModelBase(
crop_size=crop_size,
inference_stride_size=inference_stride_size,
image_channels=["ct"],
ground_truth_ids=ground_truth_ids,
should_validate=False,
detect_anomaly=True
)
model_and_info = ModelAndInfo(config=config, model_execution_mode=ModelExecutionMode.TRAIN,
checkpoint_path=None)
model_and_info._model: BaseModel = SimpleModel(1, [1], 2, 2) # type: ignore
model_and_info.create_summary_and_adjust_model_for_gpus()
model_and_info.try_create_optimizer_and_load_from_checkpoint()
config.use_gpu = False
model = model_and_info.model
optimizer = model_and_info.optimizer
# Create the loss criterion
criterion = lambda x, y: torch.tensor(value_to_insert, requires_grad=True)
pipeline = SegmentationForwardPass(model,
config,
batch_size=1,
optimizer=optimizer,
in_training_mode=in_training_mode,
criterion=criterion)
image[0, 0, 0, 0, 0] = value_to_insert
if np.isnan(value_to_insert) or np.isinf(value_to_insert):
with pytest.raises(RuntimeError) as ex:
pipeline.forward_pass_patches(patches=image, mask=mask, labels=labels)
assert f"loss computation returned {value_to_insert}" in str(ex)
else:
pipeline.forward_pass_patches(patches=image, mask=mask, labels=labels)
@pytest.mark.gpu
@pytest.mark.skipif(no_gpu_available, reason="Testing AMP requires a GPU")
@pytest.mark.parametrize("use_model_parallel", [False, True])
@pytest.mark.parametrize("use_mixed_precision", [False, True])
@pytest.mark.parametrize("execution_mode", [ModelExecutionMode.TRAIN, ModelExecutionMode.TEST])
def test_amp_activated(use_model_parallel: bool,
execution_mode: ModelExecutionMode,
use_mixed_precision: bool) -> None:
"""
Tests the mix precision flag and the model parallel flag.
"""
assert machine_has_gpu, "This test must be executed on a GPU machine."
assert torch.cuda.device_count() > 1, "This test must be executed on a multi-GPU machine"
# image, labels, and mask to run forward and backward passes
image = torch.from_numpy(np.random.uniform(size=[1, 1, 4, 4, 4]).astype(ImageDataType.IMAGE.value))
labels = torch.from_numpy(np.random.uniform(size=[1, 2, 4, 4, 4]).astype(ImageDataType.SEGMENTATION.value))
mask = torch.from_numpy((np.round(np.random.uniform(size=[1, 4, 4, 4])).astype(dtype=ImageDataType.MASK.value)))
crop_size = (4, 4, 4)
model_config = SegmentationModelBase(crop_size=crop_size,
image_channels=["ct"],
ground_truth_ids=["Lung"],
use_mixed_precision=use_mixed_precision,
use_model_parallel=use_model_parallel,
should_validate=False)
assert model_config.use_gpu
model_and_info = ModelAndInfo(config=model_config, model_execution_mode=execution_mode,
checkpoint_path=None)
model_and_info._model = SimpleModel(1, [1], 2, 2) # type: ignore
# Move the model to the GPU. This is mostly to avoid issues with AMP, which has trouble
# with first using a GPU model and later using a CPU-based one.
try:
model_and_info.create_summary_and_adjust_model_for_gpus()
except NotImplementedError as ex:
if use_model_parallel:
# The SimpleModel does not implement model partitioning, and should hence fail at this step.
assert "Model partitioning is not implemented" in str(ex)
return
else:
raise ValueError(f"Expected this call to succeed, but got: {ex}")
model_and_info.try_create_optimizer_and_load_from_checkpoint()
model = model_and_info.model
optimizer = model_and_info.optimizer
# This is the same logic spelt out in adjust_model_for_gpus
use_data_parallel = (execution_mode == ModelExecutionMode.TRAIN) or (not use_model_parallel)
if use_data_parallel:
assert isinstance(model, DataParallelModel)
gradient_scaler = GradScaler() if use_mixed_precision else None
criterion = lambda x, y: torch.tensor([0.0], requires_grad=True).cuda()
pipeline = SegmentationForwardPass(model,
model_config,
batch_size=1,
optimizer=optimizer,
gradient_scaler=gradient_scaler,
criterion=criterion)
logits, _ = pipeline._compute_loss(image, labels)
# When using DataParallel, we expect to get a list of tensors back, one per GPU.
if use_data_parallel:
assert isinstance(logits, list)
first_logit = logits[0]
else:
first_logit = logits
if use_mixed_precision:
assert first_logit.dtype == torch.float16
else:
assert first_logit.dtype == torch.float32
# Verify that forward and backward passes do not throw an exception
pipeline._forward_pass(patches=image, mask=mask, labels=labels)
@pytest.mark.skipif(common_util.is_windows(), reason="Has issues on windows build")
@pytest.mark.cpu_and_gpu
@pytest.mark.parametrize("use_gpu_override", [False, True])
def test_use_gpu_flag(use_gpu_override: bool) -> None:
config = DeepLearningConfig(should_validate=False)
# On a model that does not have a use_gpu_override, the use_gpu flag should return True exactly when a GPU is
# actually present.
assert config.use_gpu == machine_has_gpu
if machine_has_gpu:
# If a GPU is present, the use_gpu flag should exactly return whatever the override says
# (we can run in CPU mode even on a GPU)
config.use_gpu = use_gpu_override
assert config.use_gpu == use_gpu_override
else:
if use_gpu_override:
# We are on a machine without a GPU, but the override says we should use the GPU: fail.
with pytest.raises(ValueError) as ex:
config.use_gpu = use_gpu_override
assert "use_gpu to True if there is not CUDA capable GPU present" in str(ex)
else:
config.use_gpu = use_gpu_override
assert config.use_gpu == use_gpu_override
@pytest.mark.azureml
def test_mean_teacher_model(test_output_dirs: OutputFolderForTests) -> None:
"""
Test training and weight updates of the mean teacher model computation.
"""
def _get_parameters_of_model(model: DeviceAwareModule) -> Any:
"""
Returns the iterator of model parameters
"""
if isinstance(model, DataParallelModel):
return model.module.parameters()
else:
return model.parameters()
config = DummyClassification()
config.set_output_to(test_output_dirs.root_dir)
checkpoint_handler = get_default_checkpoint_handler(model_config=config,
project_root=test_output_dirs.root_dir)
config.num_epochs = 1
# Set train batch size to be arbitrary big to ensure we have only one training step
# i.e. one mean teacher update.
config.train_batch_size = 100
# Train without mean teacher
model_train(config, checkpoint_handler=checkpoint_handler)
# Retrieve the weight after one epoch
model_and_info = ModelAndInfo(config=config, model_execution_mode=ModelExecutionMode.TEST,
checkpoint_path=config.get_path_to_checkpoint(epoch=1))
model_and_info.try_create_model_and_load_from_checkpoint()
model = model_and_info.model
model_weight = next(_get_parameters_of_model(model))
# Get the starting weight of the mean teacher model
ml_util.set_random_seed(config.get_effective_random_seed())
model_and_info_mean_teacher = ModelAndInfo(config=config,
model_execution_mode=ModelExecutionMode.TEST,
checkpoint_path=None)
model_and_info_mean_teacher.try_create_model_and_load_from_checkpoint()
model_and_info_mean_teacher.try_create_mean_teacher_model_and_load_from_checkpoint()
mean_teach_model = model_and_info_mean_teacher.mean_teacher_model
assert mean_teach_model is not None # for mypy
initial_weight_mean_teacher_model = next(_get_parameters_of_model(mean_teach_model))
# Now train with mean teacher and check the update of the weight
alpha = 0.999
config.mean_teacher_alpha = alpha
model_train(config, checkpoint_handler=checkpoint_handler)
# Retrieve weight of mean teacher model saved in the checkpoint
model_and_info_mean_teacher = ModelAndInfo(config=config, model_execution_mode=ModelExecutionMode.TEST,
checkpoint_path=config.get_path_to_checkpoint(1))
model_and_info_mean_teacher.try_create_mean_teacher_model_and_load_from_checkpoint()
mean_teacher_model = model_and_info_mean_teacher.mean_teacher_model
assert mean_teacher_model is not None # for mypy
result_weight = next(_get_parameters_of_model(mean_teacher_model))
# Retrieve the associated student weight
model_and_info_mean_teacher.try_create_model_and_load_from_checkpoint()
student_model = model_and_info_mean_teacher.model
student_model_weight = next(_get_parameters_of_model(student_model))
# Assert that the student weight corresponds to the weight of a simple training without mean teacher
# computation
assert student_model_weight.allclose(model_weight)
# Check the update of the parameters
assert torch.all(alpha * initial_weight_mean_teacher_model + (1 - alpha) * student_model_weight == result_weight)
@pytest.mark.gpu
@pytest.mark.skipif(no_gpu_available, reason="Testing AMP requires a GPU")
@pytest.mark.parametrize("use_mixed_precision", [False, True])
@pytest.mark.parametrize("execution_mode", [ModelExecutionMode.TRAIN, ModelExecutionMode.VAL])
def test_amp_and_parallel_for_scalar_models(test_output_dirs: OutputFolderForTests,
execution_mode: ModelExecutionMode,
use_mixed_precision: bool) -> None:
"""
Tests the mix precision flag and data parallel for scalar models.
"""
class ClassificationModelWithIdentity(ClassificationModelForTesting):
def create_model(self) -> Any:
return DummyScalarModel(expected_image_size_zyx=config.expected_image_size_zyx,
activation=Identity(),
use_mixed_precision=use_mixed_precision)
assert machine_has_gpu, "This test must be executed on a GPU machine."
assert torch.cuda.device_count() > 1, "This test must be executed on a multi-GPU machine"
config = ClassificationModelWithIdentity()
config.use_mixed_precision = use_mixed_precision
model_and_info = ModelAndInfo(config=config, model_execution_mode=execution_mode,
checkpoint_path=None)
model_and_info.try_create_model_load_from_checkpoint_and_adjust()
model = model_and_info.model
# This is the same logic spelt out in update_model_for_multiple_gpu
# execution_mode == ModelExecutionMode.TRAIN or (not use_model_parallel), which is always True in our case
use_data_parallel = True
if use_data_parallel:
assert isinstance(model, DataParallelModel)
data_loaders = config.create_data_loaders()
gradient_scaler = GradScaler() if use_mixed_precision else None
train_val_parameters: TrainValidateParameters = TrainValidateParameters(
model=model,
data_loader=data_loaders[execution_mode],
in_training_mode=execution_mode == ModelExecutionMode.TRAIN,
gradient_scaler=gradient_scaler,
dataframe_loggers=MetricsDataframeLoggers(test_output_dirs.root_dir),
summary_writers=SummaryWriters(train=None, val=None) # type: ignore
)
training_steps = ModelTrainingStepsForScalarModel(config, train_val_parameters)
sample = list(data_loaders[execution_mode])[0]
model_input = get_scalar_model_inputs_and_labels(config, model, sample)
logits, posteriors, loss = training_steps._compute_model_output_and_loss(model_input)
# When using DataParallel, we expect to get a list of tensors back, one per GPU.
if use_data_parallel:
assert isinstance(logits, list)
first_logit = logits[0]
else:
first_logit = logits
if use_mixed_precision:
assert first_logit.dtype == torch.float16
assert posteriors.dtype == torch.float16
# BCEWithLogitsLoss outputs float32, even with float16 args
assert loss.dtype == torch.float32
else:
assert first_logit.dtype == torch.float32
assert posteriors.dtype == torch.float32
assert loss.dtype == torch.float32
# Verify that forward pass does not throw. It would for example if it fails to gather tensors or not convert
# float16 to float32
_, _, _ = training_steps._compute_model_output_and_loss(model_input)
|
[
"InnerEye.Common.common_util.MetricsDataframeLoggers",
"torch.nn.ConvTranspose3d",
"InnerEye.ML.config.SegmentationModelBase",
"numpy.isnan",
"torch.cuda.device_count",
"pytest.mark.skipif",
"pytest.mark.parametrize",
"InnerEye.ML.utils.model_util.ModelAndInfo",
"torch.nn.Conv3d",
"InnerEye.ML.deep_learning_config.DeepLearningConfig",
"pytest.raises",
"InnerEye.ML.utils.metrics_util.SummaryWriters",
"InnerEye.ML.configs.classification.DummyClassification.DummyClassification",
"InnerEye.ML.model_training_steps.get_scalar_model_inputs_and_labels",
"InnerEye.ML.pipelines.forward_pass.SegmentationForwardPass",
"numpy.isinf",
"torch.cuda.amp.GradScaler",
"torch.nn.Identity",
"InnerEye.ML.model_training.model_train",
"numpy.random.uniform",
"InnerEye.Common.common_util.is_windows",
"InnerEye.ML.model_training_steps.ModelTrainingStepsForScalarModel",
"Tests.ML.util.get_default_checkpoint_handler",
"torch.tensor",
"torch.all"
] |
[((2985, 3050), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""value_to_insert"""', '[1.0, np.NaN, np.Inf]'], {}), "('value_to_insert', [1.0, np.NaN, np.Inf])\n", (3008, 3050), False, 'import pytest\n'), ((3052, 3110), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""in_training_mode"""', '[True, False]'], {}), "('in_training_mode', [True, False])\n", (3075, 3110), False, 'import pytest\n'), ((5744, 5817), 'pytest.mark.skipif', 'pytest.mark.skipif', (['no_gpu_available'], {'reason': '"""Testing AMP requires a GPU"""'}), "(no_gpu_available, reason='Testing AMP requires a GPU')\n", (5762, 5817), False, 'import pytest\n'), ((5819, 5879), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""use_model_parallel"""', '[False, True]'], {}), "('use_model_parallel', [False, True])\n", (5842, 5879), False, 'import pytest\n'), ((5881, 5942), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""use_mixed_precision"""', '[False, True]'], {}), "('use_mixed_precision', [False, True])\n", (5904, 5942), False, 'import pytest\n'), ((5944, 6042), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""execution_mode"""', '[ModelExecutionMode.TRAIN, ModelExecutionMode.TEST]'], {}), "('execution_mode', [ModelExecutionMode.TRAIN,\n ModelExecutionMode.TEST])\n", (5967, 6042), False, 'import pytest\n'), ((9677, 9735), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""use_gpu_override"""', '[False, True]'], {}), "('use_gpu_override', [False, True])\n", (9700, 9735), False, 'import pytest\n'), ((14214, 14287), 'pytest.mark.skipif', 'pytest.mark.skipif', (['no_gpu_available'], {'reason': '"""Testing AMP requires a GPU"""'}), "(no_gpu_available, reason='Testing AMP requires a GPU')\n", (14232, 14287), False, 'import pytest\n'), ((14289, 14350), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""use_mixed_precision"""', '[False, True]'], {}), "('use_mixed_precision', [False, True])\n", (14312, 14350), False, 'import pytest\n'), ((14352, 14449), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""execution_mode"""', '[ModelExecutionMode.TRAIN, ModelExecutionMode.VAL]'], {}), "('execution_mode', [ModelExecutionMode.TRAIN,\n ModelExecutionMode.VAL])\n", (14375, 14449), False, 'import pytest\n'), ((4164, 4363), 'InnerEye.ML.config.SegmentationModelBase', 'SegmentationModelBase', ([], {'crop_size': 'crop_size', 'inference_stride_size': 'inference_stride_size', 'image_channels': "['ct']", 'ground_truth_ids': 'ground_truth_ids', 'should_validate': '(False)', 'detect_anomaly': '(True)'}), "(crop_size=crop_size, inference_stride_size=\n inference_stride_size, image_channels=['ct'], ground_truth_ids=\n ground_truth_ids, should_validate=False, detect_anomaly=True)\n", (4185, 4363), False, 'from InnerEye.ML.config import SegmentationModelBase\n'), ((4430, 4530), 'InnerEye.ML.utils.model_util.ModelAndInfo', 'ModelAndInfo', ([], {'config': 'config', 'model_execution_mode': 'ModelExecutionMode.TRAIN', 'checkpoint_path': 'None'}), '(config=config, model_execution_mode=ModelExecutionMode.TRAIN,\n checkpoint_path=None)\n', (4442, 4530), False, 'from InnerEye.ML.utils.model_util import ModelAndInfo\n'), ((5000, 5133), 'InnerEye.ML.pipelines.forward_pass.SegmentationForwardPass', 'SegmentationForwardPass', (['model', 'config'], {'batch_size': '(1)', 'optimizer': 'optimizer', 'in_training_mode': 'in_training_mode', 'criterion': 'criterion'}), '(model, config, batch_size=1, optimizer=optimizer,\n in_training_mode=in_training_mode, criterion=criterion)\n', (5023, 5133), False, 'from InnerEye.ML.pipelines.forward_pass import SegmentationForwardPass\n'), ((6898, 7101), 'InnerEye.ML.config.SegmentationModelBase', 'SegmentationModelBase', ([], {'crop_size': 'crop_size', 'image_channels': "['ct']", 'ground_truth_ids': "['Lung']", 'use_mixed_precision': 'use_mixed_precision', 'use_model_parallel': 'use_model_parallel', 'should_validate': '(False)'}), "(crop_size=crop_size, image_channels=['ct'],\n ground_truth_ids=['Lung'], use_mixed_precision=use_mixed_precision,\n use_model_parallel=use_model_parallel, should_validate=False)\n", (6919, 7101), False, 'from InnerEye.ML.config import SegmentationModelBase\n'), ((7352, 7448), 'InnerEye.ML.utils.model_util.ModelAndInfo', 'ModelAndInfo', ([], {'config': 'model_config', 'model_execution_mode': 'execution_mode', 'checkpoint_path': 'None'}), '(config=model_config, model_execution_mode=execution_mode,\n checkpoint_path=None)\n', (7364, 7448), False, 'from InnerEye.ML.utils.model_util import ModelAndInfo\n'), ((8682, 8820), 'InnerEye.ML.pipelines.forward_pass.SegmentationForwardPass', 'SegmentationForwardPass', (['model', 'model_config'], {'batch_size': '(1)', 'optimizer': 'optimizer', 'gradient_scaler': 'gradient_scaler', 'criterion': 'criterion'}), '(model, model_config, batch_size=1, optimizer=\n optimizer, gradient_scaler=gradient_scaler, criterion=criterion)\n', (8705, 8820), False, 'from InnerEye.ML.pipelines.forward_pass import SegmentationForwardPass\n'), ((9804, 9845), 'InnerEye.ML.deep_learning_config.DeepLearningConfig', 'DeepLearningConfig', ([], {'should_validate': '(False)'}), '(should_validate=False)\n', (9822, 9845), False, 'from InnerEye.ML.deep_learning_config import DeepLearningConfig\n'), ((9587, 9611), 'InnerEye.Common.common_util.is_windows', 'common_util.is_windows', ([], {}), '()\n', (9609, 9611), False, 'from InnerEye.Common import common_util\n'), ((11226, 11247), 'InnerEye.ML.configs.classification.DummyClassification.DummyClassification', 'DummyClassification', ([], {}), '()\n', (11245, 11247), False, 'from InnerEye.ML.configs.classification.DummyClassification import DummyClassification\n'), ((11325, 11421), 'Tests.ML.util.get_default_checkpoint_handler', 'get_default_checkpoint_handler', ([], {'model_config': 'config', 'project_root': 'test_output_dirs.root_dir'}), '(model_config=config, project_root=\n test_output_dirs.root_dir)\n', (11355, 11421), False, 'from Tests.ML.util import get_default_checkpoint_handler\n'), ((11695, 11753), 'InnerEye.ML.model_training.model_train', 'model_train', (['config'], {'checkpoint_handler': 'checkpoint_handler'}), '(config, checkpoint_handler=checkpoint_handler)\n', (11706, 11753), False, 'from InnerEye.ML.model_training import model_train\n'), ((12291, 12390), 'InnerEye.ML.utils.model_util.ModelAndInfo', 'ModelAndInfo', ([], {'config': 'config', 'model_execution_mode': 'ModelExecutionMode.TEST', 'checkpoint_path': 'None'}), '(config=config, model_execution_mode=ModelExecutionMode.TEST,\n checkpoint_path=None)\n', (12303, 12390), False, 'from InnerEye.ML.utils.model_util import ModelAndInfo\n'), ((12988, 13046), 'InnerEye.ML.model_training.model_train', 'model_train', (['config'], {'checkpoint_handler': 'checkpoint_handler'}), '(config, checkpoint_handler=checkpoint_handler)\n', (12999, 13046), False, 'from InnerEye.ML.model_training import model_train\n'), ((14087, 14197), 'torch.all', 'torch.all', (['(alpha * initial_weight_mean_teacher_model + (1 - alpha) *\n student_model_weight == result_weight)'], {}), '(alpha * initial_weight_mean_teacher_model + (1 - alpha) *\n student_model_weight == result_weight)\n', (14096, 14197), False, 'import torch\n'), ((15410, 15500), 'InnerEye.ML.utils.model_util.ModelAndInfo', 'ModelAndInfo', ([], {'config': 'config', 'model_execution_mode': 'execution_mode', 'checkpoint_path': 'None'}), '(config=config, model_execution_mode=execution_mode,\n checkpoint_path=None)\n', (15422, 15500), False, 'from InnerEye.ML.utils.model_util import ModelAndInfo\n'), ((16481, 16543), 'InnerEye.ML.model_training_steps.ModelTrainingStepsForScalarModel', 'ModelTrainingStepsForScalarModel', (['config', 'train_val_parameters'], {}), '(config, train_val_parameters)\n', (16513, 16543), False, 'from InnerEye.ML.model_training_steps import ModelTrainingStepsForScalarModel, TrainValidateParameters, get_scalar_model_inputs_and_labels\n'), ((16613, 16670), 'InnerEye.ML.model_training_steps.get_scalar_model_inputs_and_labels', 'get_scalar_model_inputs_and_labels', (['config', 'model', 'sample'], {}), '(config, model, sample)\n', (16647, 16670), False, 'from InnerEye.ML.model_training_steps import ModelTrainingStepsForScalarModel, TrainValidateParameters, get_scalar_model_inputs_and_labels\n'), ((4935, 4984), 'torch.tensor', 'torch.tensor', (['value_to_insert'], {'requires_grad': '(True)'}), '(value_to_insert, requires_grad=True)\n', (4947, 4984), False, 'import torch\n'), ((5375, 5400), 'numpy.isnan', 'np.isnan', (['value_to_insert'], {}), '(value_to_insert)\n', (5383, 5400), True, 'import numpy as np\n'), ((5404, 5429), 'numpy.isinf', 'np.isinf', (['value_to_insert'], {}), '(value_to_insert)\n', (5412, 5429), True, 'import numpy as np\n'), ((6370, 6395), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (6393, 6395), False, 'import torch\n'), ((8545, 8557), 'torch.cuda.amp.GradScaler', 'GradScaler', ([], {}), '()\n', (8555, 8557), False, 'from torch.cuda.amp import GradScaler\n'), ((15205, 15230), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (15228, 15230), False, 'import torch\n'), ((15995, 16007), 'torch.cuda.amp.GradScaler', 'GradScaler', ([], {}), '()\n', (16005, 16007), False, 'from torch.cuda.amp import GradScaler\n'), ((2508, 2582), 'torch.nn.Conv3d', 'torch.nn.Conv3d', (['input_channels', 'channels[0]'], {'kernel_size': 'self.kernel_size'}), '(input_channels, channels[0], kernel_size=self.kernel_size)\n', (2523, 2582), False, 'import torch\n'), ((2596, 2674), 'torch.nn.ConvTranspose3d', 'torch.nn.ConvTranspose3d', (['channels[0]', 'n_classes'], {'kernel_size': 'self.kernel_size'}), '(channels[0], n_classes, kernel_size=self.kernel_size)\n', (2620, 2674), False, 'import torch\n'), ((5444, 5471), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (5457, 5471), False, 'import pytest\n'), ((16325, 16375), 'InnerEye.Common.common_util.MetricsDataframeLoggers', 'MetricsDataframeLoggers', (['test_output_dirs.root_dir'], {}), '(test_output_dirs.root_dir)\n', (16348, 16375), False, 'from InnerEye.Common.common_util import MetricsDataframeLoggers\n'), ((16401, 16437), 'InnerEye.ML.utils.metrics_util.SummaryWriters', 'SummaryWriters', ([], {'train': 'None', 'val': 'None'}), '(train=None, val=None)\n', (16415, 16437), False, 'from InnerEye.ML.utils.metrics_util import SummaryWriters\n'), ((3792, 3826), 'numpy.random.uniform', 'np.random.uniform', ([], {'size': 'image_size'}), '(size=image_size)\n', (3809, 3826), True, 'import numpy as np\n'), ((3919, 3954), 'numpy.random.uniform', 'np.random.uniform', ([], {'size': 'labels_size'}), '(size=labels_size)\n', (3936, 3954), True, 'import numpy as np\n'), ((6547, 6586), 'numpy.random.uniform', 'np.random.uniform', ([], {'size': '[1, 1, 4, 4, 4]'}), '(size=[1, 1, 4, 4, 4])\n', (6564, 6586), True, 'import numpy as np\n'), ((6652, 6691), 'numpy.random.uniform', 'np.random.uniform', ([], {'size': '[1, 2, 4, 4, 4]'}), '(size=[1, 2, 4, 4, 4])\n', (6669, 6691), True, 'import numpy as np\n'), ((8620, 8659), 'torch.tensor', 'torch.tensor', (['[0.0]'], {'requires_grad': '(True)'}), '([0.0], requires_grad=True)\n', (8632, 8659), False, 'import torch\n'), ((10447, 10472), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (10460, 10472), False, 'import pytest\n'), ((4074, 4107), 'numpy.random.uniform', 'np.random.uniform', ([], {'size': 'mask_size'}), '(size=mask_size)\n', (4091, 4107), True, 'import numpy as np\n'), ((6772, 6808), 'numpy.random.uniform', 'np.random.uniform', ([], {'size': '[1, 4, 4, 4]'}), '(size=[1, 4, 4, 4])\n', (6789, 6808), True, 'import numpy as np\n'), ((15029, 15039), 'torch.nn.Identity', 'Identity', ([], {}), '()\n', (15037, 15039), False, 'from torch.nn import Identity\n')]
|
import os
import functools
import torch
import torch.nn as nn
import torch.optim as optim
from torch.cuda.amp import autocast, GradScaler
from torchvision.utils import save_image
from ..StyleGAN2.model import Generator, Discriminator, init_weight_N01
from ..general.dataset_base import Image, make_default_transform
from ..general import to_loader
from ..general import get_device, Status, save_args
from ..gan_utils import sample_nnoise, AdaBelief, update_ema, DiffAugment
from ..gan_utils.losses import GANLoss, GradPenalty
'''dataset classes with Image + Blur'''
import glob
import random
from PIL import Image as pilImage
class ImageBlur(Image):
def __init__(self, image_size, resize_ratio=1.):
transform = make_default_transform(image_size, resize_ratio)
super().__init__(transform)
self.blurs = self._load_blur()
def _load_blur(self):
raise NotImplementedError()
def __getitem__(self, index):
image = self.images[index]
blur = self.blurs[index]
image = pilImage.open(image).convert('RGB')
blur = pilImage.open(blur).convert('RGB')
image = self.transform(image)
blur = self.transform(blur)
return image, blur
def shuffle_blur(self):
random.shuffle(self.blurs)
class AnimeFaceBlur(ImageBlur):
def __init__(self, image_size, min_year=2005, resize_ratio=1.):
super().__init__(image_size, resize_ratio)
self.images = [path for path in self.images if self._year_from_path(path) >= min_year]
self.blurs = self._load_blur()
self.length = len(self.images)
def _load(self):
return glob.glob('/usr/src/data/animefacedataset/images/*')
def _load_blur(self):
blurs = [path.replace('images', 'blur') for path in self.images]
random.shuffle(blurs)
return blurs
def _year_from_path(self, path):
name, _ = os.path.splitext(os.path.basename(path))
year = int(name.split('_')[-1])
return year
class DanbooruPortraitBlur(ImageBlur):
def __init__(self, image_size, num_images=None, resize_ratio=1.2):
super().__init__(image_size, resize_ratio)
if num_images:
random.shuffle(self.images)
self.images = self.images[:num_images]
self.length = len(self.images)
self.blurs = self._load_blur()
def _load(self):
return glob.glob('/usr/src/data/danbooru/portraits/portraits/*')
def _load_blur(self):
blurs = [path.replace('portraits/portraits', 'portraits/blur') for path in self.images]
random.shuffle(blurs)
return blurs
softplus = nn.Softplus()
def edge_adv_loss(edge_prob):
# train blured edge images as fake
edge_loss = softplus(edge_prob).mean()
return edge_loss
def train(
max_iter, dataset, sampler, const_z,
G, G_ema, D, optimizer_G, optimizer_D,
r1_lambda, d_k, policy,
edge_loss_from,
device, amp,
save=1000
):
status = Status(max_iter)
pl_mean = 0.
loss = GANLoss()
gp = GradPenalty()
scaler = GradScaler() if amp else None
augment = functools.partial(DiffAugment, policy=policy)
if G_ema is not None:
G_ema.eval()
while status.batches_done < max_iter:
for index, (real, blur) in enumerate(dataset):
optimizer_G.zero_grad()
optimizer_D.zero_grad()
real = real.to(device)
blur = blur.to(device)
'''discriminator'''
z = sampler()
with autocast(amp):
# D(real)
real_aug = augment(real)
real_prob = D(real_aug)
# D(blured)
blur = augment(blur)
blur_prob = D(blur)
# D(G(z))
fake, _ = G(z)
fake_aug = augment(fake)
fake_prob = D(fake_aug.detach())
# loss
if status.batches_done % d_k == 0 \
and r1_lambda > 0 \
and status.batches_done is not 0:
# lazy regularization
r1 = gp.r1_regularizer(real, D, scaler)
D_loss = r1 * r1_lambda * d_k
else:
# gan loss on other iter
D_loss = loss.d_loss(real_prob, fake_prob)
# add gan loss for blured edges
if edge_loss_from > status.batches_done:
D_loss = D_loss + edge_adv_loss(blur_prob)
if scaler is not None:
scaler.scale(D_loss).backward()
scaler.step(optimizer_D)
else:
D_loss.backward()
optimizer_D.step()
'''generator'''
with autocast(amp):
# D(G(z))
fake_prob = D(fake_aug)
# loss
G_loss = loss.g_loss(fake_prob)
if scaler is not None:
scaler.scale(G_loss).backward()
scaler.step(optimizer_G)
else:
G_loss.backward()
optimizer_G.step()
if G_ema is not None:
update_ema(G, G_ema)
# save
if status.batches_done % save == 0:
with torch.no_grad():
images, _ = G_ema(const_z)
save_image(images, f'implementations/edge/result/{status.batches_done}.jpg', nrow=4, normalize=True, range=(-1, 1))
torch.save(G_ema.state_dict(), f'implementations/edge/result/G_{status.batches_done}.pt')
save_image(fake, f'running.jpg', nrow=4, normalize=True, range=(-1, 1))
# updates
loss_dict = dict(
G=G_loss.item() if not torch.isnan(G_loss).any() else 0,
D=D_loss.item() if not torch.isnan(D_loss).any() else 0
)
status.update(loss_dict)
if scaler is not None:
scaler.update()
if status.batches_done == max_iter:
break
dataset.dataset.shuffle_blur()
status.plot()
def add_argument(parser):
# args for StyleGAN2
# model
parser.add_argument('--image-channels', default=3, type=int, help='number of channels for the generated image')
parser.add_argument('--style-dim', default=512, type=int, help='style feature dimension')
parser.add_argument('--channels', default=32, type=int, help='channel width multiplier')
parser.add_argument('--max-channels', default=512, type=int, help='maximum channels')
parser.add_argument('--block-num-conv', default=2, type=int, help='number of convolution layers in residual block')
parser.add_argument('--map-num-layers', default=8, type=int, help='number of layers in mapping network')
parser.add_argument('--map-lr', default=0.01, type=float, help='learning rate for mapping network')
parser.add_argument('--disable-map-norm', default=False, action='store_true', help='disable pixel normalization in mapping network')
parser.add_argument('--mbsd-groups', default=4, type=int, help='number of groups in mini-batch standard deviation')
# training
parser.add_argument('--lr', default=0.001, type=float, help='learning rate')
parser.add_argument('--beta1', default=0., type=float, help='beta1')
parser.add_argument('--beta2', default=0.99, type=float, help='beta2')
# parser.add_argument('--g-k', default=8, type=int, help='for lazy regularization. calculate perceptual path length loss every g_k iters')
parser.add_argument('--d-k', default=16, type=int, help='for lazy regularization. calculate gradient penalty each d_k iters')
parser.add_argument('--r1-lambda', default=10, type=float, help='lambda for r1')
# parser.add_argument('--pl-lambda', default=0., type=float, help='lambda for perceptual path length loss')
parser.add_argument('--policy', default='color,translation', type=str, help='policy for DiffAugment')
# args for edge
parser.add_argument('--wait-edge-epoch', default=0, type=int, help='epochs to wait before adding edge adv loss')
return parser
def main(parser):
parser = add_argument(parser)
args = parser.parse_args()
save_args(args)
normalize = not args.disable_map_norm
betas = (args.beta1, args.beta2)
amp = not args.disable_amp
device = get_device(not args.disable_gpu)
# dataset
if args.dataset == 'animeface':
dataset = AnimeFaceBlur(args.image_size, args.min_year)
elif args.dataset == 'danbooru':
dataset = DanbooruPortraitBlur(args.image_size, args.num_images)
dataset = to_loader(
dataset, args.batch_size, shuffle=True,
num_workers=os.cpu_count(), use_gpu=torch.cuda.is_available())
# random noise sampler
sampler = functools.partial(sample_nnoise, (args.batch_size, args.style_dim), device=device)
# const input for eval
const_z = sample_nnoise((16, args.style_dim), device=device)
# models
G = Generator(
args.image_size, args.image_channels, args.style_dim, args.channels, args.max_channels,
args.block_num_conv, args.map_num_layers, normalize, args.map_lr)
G_ema = Generator(
args.image_size, args.image_channels, args.style_dim, args.channels, args.max_channels,
args.block_num_conv, args.map_num_layers, normalize, args.map_lr)
D = Discriminator(
args.image_size, args.image_channels, args.channels, args.max_channels,
args.block_num_conv, args.mbsd_groups)
## init
G.init_weight(
map_init_func=functools.partial(init_weight_N01, lr=args.map_lr),
syn_init_func=init_weight_N01)
G_ema.eval()
update_ema(G, G_ema, decay=0)
D.apply(init_weight_N01)
G.to(device)
G_ema.to(device)
D.to(device)
# optimizer
g_lr, g_betas = args.lr, betas
if args.r1_lambda > 0:
d_ratio = args.d_k / (args.d_k + 1)
d_lr = args.lr * d_ratio
d_betas = (betas[0]**d_ratio, betas[1]**d_ratio)
else: d_lr, d_betas = args.lr, betas
optimizer_G = optim.Adam(G.parameters(), lr=g_lr, betas=g_betas)
optimizer_D = optim.Adam(D.parameters(), lr=d_lr, betas=d_betas)
if args.max_iters < 0:
args.max_iters = len(dataset) * args.default_epochs
edge_loss_from = len(dataset) * args.wait_edge_epoch
train(
args.max_iters, dataset, sampler, const_z,
G, G_ema, D, optimizer_G, optimizer_D,
args.r1_lambda, args.d_k,
args.policy, edge_loss_from, device, amp, 1000
)
|
[
"functools.partial",
"torch.cuda.amp.autocast",
"os.path.basename",
"random.shuffle",
"torch.nn.Softplus",
"PIL.Image.open",
"os.cpu_count",
"torchvision.utils.save_image",
"torch.cuda.is_available",
"glob.glob",
"torch.cuda.amp.GradScaler",
"torch.no_grad",
"torch.isnan"
] |
[((2653, 2666), 'torch.nn.Softplus', 'nn.Softplus', ([], {}), '()\n', (2664, 2666), True, 'import torch.nn as nn\n'), ((3141, 3186), 'functools.partial', 'functools.partial', (['DiffAugment'], {'policy': 'policy'}), '(DiffAugment, policy=policy)\n', (3158, 3186), False, 'import functools\n'), ((8909, 8996), 'functools.partial', 'functools.partial', (['sample_nnoise', '(args.batch_size, args.style_dim)'], {'device': 'device'}), '(sample_nnoise, (args.batch_size, args.style_dim), device=\n device)\n', (8926, 8996), False, 'import functools\n'), ((1260, 1286), 'random.shuffle', 'random.shuffle', (['self.blurs'], {}), '(self.blurs)\n', (1274, 1286), False, 'import random\n'), ((1652, 1704), 'glob.glob', 'glob.glob', (['"""/usr/src/data/animefacedataset/images/*"""'], {}), "('/usr/src/data/animefacedataset/images/*')\n", (1661, 1704), False, 'import glob\n'), ((1812, 1833), 'random.shuffle', 'random.shuffle', (['blurs'], {}), '(blurs)\n', (1826, 1833), False, 'import random\n'), ((2410, 2467), 'glob.glob', 'glob.glob', (['"""/usr/src/data/danbooru/portraits/portraits/*"""'], {}), "('/usr/src/data/danbooru/portraits/portraits/*')\n", (2419, 2467), False, 'import glob\n'), ((2598, 2619), 'random.shuffle', 'random.shuffle', (['blurs'], {}), '(blurs)\n', (2612, 2619), False, 'import random\n'), ((3097, 3109), 'torch.cuda.amp.GradScaler', 'GradScaler', ([], {}), '()\n', (3107, 3109), False, 'from torch.cuda.amp import autocast, GradScaler\n'), ((1927, 1949), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (1943, 1949), False, 'import os\n'), ((2208, 2235), 'random.shuffle', 'random.shuffle', (['self.images'], {}), '(self.images)\n', (2222, 2235), False, 'import random\n'), ((5675, 5746), 'torchvision.utils.save_image', 'save_image', (['fake', 'f"""running.jpg"""'], {'nrow': '(4)', 'normalize': '(True)', 'range': '(-1, 1)'}), "(fake, f'running.jpg', nrow=4, normalize=True, range=(-1, 1))\n", (5685, 5746), False, 'from torchvision.utils import save_image\n'), ((8816, 8830), 'os.cpu_count', 'os.cpu_count', ([], {}), '()\n', (8828, 8830), False, 'import os\n'), ((8840, 8865), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (8863, 8865), False, 'import torch\n'), ((9708, 9758), 'functools.partial', 'functools.partial', (['init_weight_N01'], {'lr': 'args.map_lr'}), '(init_weight_N01, lr=args.map_lr)\n', (9725, 9758), False, 'import functools\n'), ((1034, 1054), 'PIL.Image.open', 'pilImage.open', (['image'], {}), '(image)\n', (1047, 1054), True, 'from PIL import Image as pilImage\n'), ((1086, 1105), 'PIL.Image.open', 'pilImage.open', (['blur'], {}), '(blur)\n', (1099, 1105), True, 'from PIL import Image as pilImage\n'), ((3553, 3566), 'torch.cuda.amp.autocast', 'autocast', (['amp'], {}), '(amp)\n', (3561, 3566), False, 'from torch.cuda.amp import autocast, GradScaler\n'), ((4824, 4837), 'torch.cuda.amp.autocast', 'autocast', (['amp'], {}), '(amp)\n', (4832, 4837), False, 'from torch.cuda.amp import autocast, GradScaler\n'), ((5441, 5560), 'torchvision.utils.save_image', 'save_image', (['images', 'f"""implementations/edge/result/{status.batches_done}.jpg"""'], {'nrow': '(4)', 'normalize': '(True)', 'range': '(-1, 1)'}), "(images, f'implementations/edge/result/{status.batches_done}.jpg',\n nrow=4, normalize=True, range=(-1, 1))\n", (5451, 5560), False, 'from torchvision.utils import save_image\n'), ((5361, 5376), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (5374, 5376), False, 'import torch\n'), ((5839, 5858), 'torch.isnan', 'torch.isnan', (['G_loss'], {}), '(G_loss)\n', (5850, 5858), False, 'import torch\n'), ((5912, 5931), 'torch.isnan', 'torch.isnan', (['D_loss'], {}), '(D_loss)\n', (5923, 5931), False, 'import torch\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import tensorflow as tf
class CGAN(tf.keras.Model):
def __init__(self, latent_dim, epochs = 50, batch_size = 100, learning_rate = 0.001,
im_dim = 28, n_filters = 32, g_factor = 1, drop_rate = 0.5):
""" initialize model layers and parameters """
super(CGAN, self).__init__()
self.epochs = epochs
self.batch_size = batch_size
self.learning_rate = learning_rate
self.g_factor = g_factor
self.optimizer_d = tf.keras.optimizers.Adam(self.learning_rate)
self.optimizer_g = tf.keras.optimizers.Adam(self.learning_rate*self.g_factor)
self.latent_dim = latent_dim
self.im_dim = im_dim
self.n_filters = n_filters
self.drop_rate = drop_rate
self.generative_net = tf.keras.Sequential(
[
tf.keras.layers.InputLayer(input_shape=(latent_dim,)),
tf.keras.layers.Dropout(self.drop_rate),
tf.keras.layers.Dense(units=(int((self.im_dim/2)**2)*self.n_filters), activation="relu", name = "g1"),
tf.keras.layers.Reshape(target_shape=(int(self.im_dim/2), int((self.im_dim/2)), self.n_filters)),
tf.keras.layers.Conv2DTranspose(
filters=int(self.n_filters),
kernel_size=3,
strides=(2, 2),
padding="SAME",
activation = "relu",
name = "g2"),
tf.keras.layers.Conv2DTranspose(
filters=1, kernel_size=3, strides=(1, 1), padding="SAME", activation = "relu", name = "g3"),
tf.keras.layers.Conv2DTranspose(
filters=1, kernel_size=1, strides=(1, 1), padding="SAME", activation="sigmoid", name = "g4"),
])
self.discriminator_net = tf.keras.Sequential(
[
tf.keras.layers.InputLayer(input_shape=(int(self.im_dim), int(self.im_dim), 1)),
tf.keras.layers.Conv2D(
filters=int(self.n_filters), kernel_size=3, strides=(2, 2), activation = tf.nn.leaky_relu, name = "d1"),
tf.keras.layers.Conv2D(
filters=int(self.n_filters), kernel_size=3, strides=(2, 2), activation = tf.nn.leaky_relu, name = "d2"),
tf.keras.layers.Flatten(),
tf.keras.layers.Dropout(self.drop_rate),
tf.keras.layers.Dense(units = int((self.im_dim/4)**2), activation = tf.nn.leaky_relu, name = "d3"),
tf.keras.layers.Dropout(self.drop_rate),
tf.keras.layers.Dense(units = 1, name = "d4"),
])
# @tf.function
def generate(self, eps=None, num = None):
""" generate fake sample using generative net """
if num is None:
num = self.batch_size
if eps is None:
eps = tf.random.normal(shape=(num, self.latent_dim))
return self.generative_net(eps)
# @tf.function
def discriminate(self, x, apply_sigmoid=False):
""" discriminate between fake and real samples """
logits = self.discriminator_net(x)
if apply_sigmoid:
probs = tf.sigmoid(logits)
return probs
else:
return logits
def compute_loss_discriminator(self, x):
""" compute log-loss for discriminator """
x_logit = self.discriminate(x)
gen = self.generate()
gen_logit = self.discriminate(gen)
cross_ent_x = tf.nn.sigmoid_cross_entropy_with_logits(logits=x_logit, labels=tf.ones(shape=x_logit.shape))
cross_ent_gen = tf.nn.sigmoid_cross_entropy_with_logits(logits=-gen_logit, labels=tf.ones(shape=gen_logit.shape))
return tf.reduce_mean(cross_ent_gen) + tf.reduce_mean(cross_ent_x)
def compute_loss_generator(self):
""" compute log-loss for generator """
gen = self.generate()
gen_logit = self.discriminate(gen)
cross_ent_gen = tf.nn.sigmoid_cross_entropy_with_logits(logits=gen_logit, labels=tf.ones(shape=gen_logit.shape))
return tf.reduce_mean(cross_ent_gen)
def compute_gradients(self, x, sub):
"""
compute dynamic gradients with separate optimizers which could theoretically undergo dynamic adjustment during training
"""
with tf.GradientTape() as tape:
if sub == "discriminator":
loss = self.compute_loss_discriminator(x)
t_v = [el for el in self.trainable_variables if "d" in el.name]
return tape.gradient(loss, t_v), t_v, loss
elif sub == "generator":
loss = self.compute_loss_generator()
t_v = [el for el in self.trainable_variables if "g" in el.name]
return tape.gradient(loss, t_v), t_v, loss
def apply_gradients(self, gradients, t_v, sub):
""" apply adam gradient descent optimizer for learning process """
if sub == "discriminator":
self.optimizer_d.apply_gradients(zip(gradients, t_v))
elif sub == "generator":
self.optimizer_g.apply_gradients(zip(gradients, t_v))
def train(self, train_dataset):
""" main training call for CGAN """
num_samples = int(train_dataset.shape[0]/self.batch_size)
train_dataset = tf.data.Dataset.from_tensor_slices(train_dataset).shuffle(train_dataset.shape[0]).batch(self.batch_size)
for i in range(self.epochs):
j = 1
norm_d = 0
Loss_d = 0
norm_g = 0
Loss_g = 0
print("Epoch: %s" % str(i+1))
for train_x in train_dataset:
# disciminator step
gradients, t_v, loss = self.compute_gradients(train_x, sub = "discriminator")
Loss_d += loss
norm_d += tf.reduce_mean([tf.norm(g) for g in gradients])
self.apply_gradients(gradients, t_v, sub = "discriminator")
# generator step
gradients, t_v, loss = self.compute_gradients(train_x, sub = "generator")
Loss_g += loss
norm_g += tf.reduce_mean([tf.norm(g) for g in gradients])
self.apply_gradients(gradients, t_v, sub = "generator")
if j != 1 and j % 20 == 0:
# good to print out euclidean norm of gradients
tf.print("Epoch: %s, Batch: %s/%s" % (i+1,j,num_samples))
tf.print("Mean discriminator loss: ", Loss_d/j, ", Mean discriminator gradient norm: ", norm_d/j)
tf.print("Mean generator loss: ", Loss_g/j, ", Mean generator gradient norm: ", norm_g/j)
j += 1
|
[
"tensorflow.ones",
"tensorflow.keras.layers.Dropout",
"tensorflow.random.normal",
"tensorflow.keras.layers.Dense",
"tensorflow.print",
"tensorflow.reduce_mean",
"tensorflow.keras.layers.InputLayer",
"tensorflow.data.Dataset.from_tensor_slices",
"tensorflow.sigmoid",
"tensorflow.keras.optimizers.Adam",
"tensorflow.keras.layers.Conv2DTranspose",
"tensorflow.norm",
"tensorflow.GradientTape",
"tensorflow.keras.layers.Flatten"
] |
[((612, 656), 'tensorflow.keras.optimizers.Adam', 'tf.keras.optimizers.Adam', (['self.learning_rate'], {}), '(self.learning_rate)\n', (636, 656), True, 'import tensorflow as tf\n'), ((684, 744), 'tensorflow.keras.optimizers.Adam', 'tf.keras.optimizers.Adam', (['(self.learning_rate * self.g_factor)'], {}), '(self.learning_rate * self.g_factor)\n', (708, 744), True, 'import tensorflow as tf\n'), ((4038, 4067), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['cross_ent_gen'], {}), '(cross_ent_gen)\n', (4052, 4067), True, 'import tensorflow as tf\n'), ((2831, 2877), 'tensorflow.random.normal', 'tf.random.normal', ([], {'shape': '(num, self.latent_dim)'}), '(shape=(num, self.latent_dim))\n', (2847, 2877), True, 'import tensorflow as tf\n'), ((3138, 3156), 'tensorflow.sigmoid', 'tf.sigmoid', (['logits'], {}), '(logits)\n', (3148, 3156), True, 'import tensorflow as tf\n'), ((3683, 3712), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['cross_ent_gen'], {}), '(cross_ent_gen)\n', (3697, 3712), True, 'import tensorflow as tf\n'), ((3715, 3742), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['cross_ent_x'], {}), '(cross_ent_x)\n', (3729, 3742), True, 'import tensorflow as tf\n'), ((4275, 4292), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {}), '()\n', (4290, 4292), True, 'import tensorflow as tf\n'), ((950, 1003), 'tensorflow.keras.layers.InputLayer', 'tf.keras.layers.InputLayer', ([], {'input_shape': '(latent_dim,)'}), '(input_shape=(latent_dim,))\n', (976, 1003), True, 'import tensorflow as tf\n'), ((1015, 1054), 'tensorflow.keras.layers.Dropout', 'tf.keras.layers.Dropout', (['self.drop_rate'], {}), '(self.drop_rate)\n', (1038, 1054), True, 'import tensorflow as tf\n'), ((1525, 1648), 'tensorflow.keras.layers.Conv2DTranspose', 'tf.keras.layers.Conv2DTranspose', ([], {'filters': '(1)', 'kernel_size': '(3)', 'strides': '(1, 1)', 'padding': '"""SAME"""', 'activation': '"""relu"""', 'name': '"""g3"""'}), "(filters=1, kernel_size=3, strides=(1, 1),\n padding='SAME', activation='relu', name='g3')\n", (1556, 1648), True, 'import tensorflow as tf\n'), ((1675, 1801), 'tensorflow.keras.layers.Conv2DTranspose', 'tf.keras.layers.Conv2DTranspose', ([], {'filters': '(1)', 'kernel_size': '(1)', 'strides': '(1, 1)', 'padding': '"""SAME"""', 'activation': '"""sigmoid"""', 'name': '"""g4"""'}), "(filters=1, kernel_size=1, strides=(1, 1),\n padding='SAME', activation='sigmoid', name='g4')\n", (1706, 1801), True, 'import tensorflow as tf\n'), ((2300, 2325), 'tensorflow.keras.layers.Flatten', 'tf.keras.layers.Flatten', ([], {}), '()\n', (2323, 2325), True, 'import tensorflow as tf\n'), ((2337, 2376), 'tensorflow.keras.layers.Dropout', 'tf.keras.layers.Dropout', (['self.drop_rate'], {}), '(self.drop_rate)\n', (2360, 2376), True, 'import tensorflow as tf\n'), ((2498, 2537), 'tensorflow.keras.layers.Dropout', 'tf.keras.layers.Dropout', (['self.drop_rate'], {}), '(self.drop_rate)\n', (2521, 2537), True, 'import tensorflow as tf\n'), ((2549, 2590), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', ([], {'units': '(1)', 'name': '"""d4"""'}), "(units=1, name='d4')\n", (2570, 2590), True, 'import tensorflow as tf\n'), ((3516, 3544), 'tensorflow.ones', 'tf.ones', ([], {'shape': 'x_logit.shape'}), '(shape=x_logit.shape)\n', (3523, 3544), True, 'import tensorflow as tf\n'), ((3636, 3666), 'tensorflow.ones', 'tf.ones', ([], {'shape': 'gen_logit.shape'}), '(shape=gen_logit.shape)\n', (3643, 3666), True, 'import tensorflow as tf\n'), ((3991, 4021), 'tensorflow.ones', 'tf.ones', ([], {'shape': 'gen_logit.shape'}), '(shape=gen_logit.shape)\n', (3998, 4021), True, 'import tensorflow as tf\n'), ((6340, 6401), 'tensorflow.print', 'tf.print', (["('Epoch: %s, Batch: %s/%s' % (i + 1, j, num_samples))"], {}), "('Epoch: %s, Batch: %s/%s' % (i + 1, j, num_samples))\n", (6348, 6401), True, 'import tensorflow as tf\n'), ((6418, 6523), 'tensorflow.print', 'tf.print', (['"""Mean discriminator loss: """', '(Loss_d / j)', '""", Mean discriminator gradient norm: """', '(norm_d / j)'], {}), "('Mean discriminator loss: ', Loss_d / j,\n ', Mean discriminator gradient norm: ', norm_d / j)\n", (6426, 6523), True, 'import tensorflow as tf\n'), ((6536, 6633), 'tensorflow.print', 'tf.print', (['"""Mean generator loss: """', '(Loss_g / j)', '""", Mean generator gradient norm: """', '(norm_g / j)'], {}), "('Mean generator loss: ', Loss_g / j,\n ', Mean generator gradient norm: ', norm_g / j)\n", (6544, 6633), True, 'import tensorflow as tf\n'), ((5262, 5311), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['train_dataset'], {}), '(train_dataset)\n', (5296, 5311), True, 'import tensorflow as tf\n'), ((5801, 5811), 'tensorflow.norm', 'tf.norm', (['g'], {}), '(g)\n', (5808, 5811), True, 'import tensorflow as tf\n'), ((6105, 6115), 'tensorflow.norm', 'tf.norm', (['g'], {}), '(g)\n', (6112, 6115), True, 'import tensorflow as tf\n')]
|
#!/usr/bin/env python3
"""Test the configuration module."""
import multiprocessing
import os
import sys
import os.path
import unittest
import shutil
import random
import string
import tempfile
import yaml
# Try to create a working PYTHONPATH
TEST_DIRECTORY = os.path.dirname(os.path.realpath(__file__))
SWITCHMAP_DIRECTORY = os.path.abspath(os.path.join(TEST_DIRECTORY, os.pardir))
ROOT_DIRECTORY = os.path.abspath(os.path.join(SWITCHMAP_DIRECTORY, os.pardir))
if TEST_DIRECTORY.endswith('/switchmap-ng/switchmap/test') is True:
sys.path.append(ROOT_DIRECTORY)
else:
print(
'This script is not installed in the "switchmap-ng/bin" directory. '
'Please fix.')
sys.exit(2)
from switchmap.utils import configuration
class TestConfig(unittest.TestCase):
"""Checks all functions and methods."""
#########################################################################
# General object setup
#########################################################################
random_string = ''.join([random.choice(
string.ascii_letters + string.digits) for n in range(9)])
log_directory = tempfile.mkdtemp()
cache_directory = tempfile.mkdtemp()
good_config = ("""\
main:
log_directory: {}
cache_directory: {}
agent_threads: 25
bind_port: 3000
hostnames:
- 192.168.1.1
- 192.168.1.2
- 192.168.1.3
- 192.168.1.4
listen_address: 0.0.0.0
log_level: debug
polling_interval: 20
""".format(log_directory, cache_directory))
# Convert good_config to dictionary
good_dict = yaml.safe_load(bytes(good_config, 'utf-8'))
# Set the environmental variable for the configuration directory
directory = tempfile.mkdtemp()
os.environ['SWITCHMAP_CONFIGDIR'] = directory
config_file = '{}/test_config.yaml'.format(directory)
# Write good_config to file
with open(config_file, 'w') as f_handle:
yaml.dump(good_dict, f_handle, default_flow_style=True)
# Create configuration object
config = configuration.Config()
@classmethod
def tearDownClass(cls):
"""Post test cleanup."""
os.rmdir(cls.log_directory)
os.rmdir(cls.config.topology_directory())
os.rmdir(cls.config.idle_directory())
os.rmdir(cls.cache_directory)
os.remove(cls.config_file)
os.rmdir(cls.directory)
def test_init(self):
"""Testing method init."""
# Testing with non-existant directory
directory = 'bogus'
os.environ['SWITCHMAP_CONFIGDIR'] = directory
with self.assertRaises(SystemExit):
configuration.Config()
# Testing with an empty directory
empty_directory = tempfile.mkdtemp()
os.environ['SWITCHMAP_CONFIGDIR'] = empty_directory
with self.assertRaises(SystemExit):
configuration.Config()
# Write bad_config to file
empty_config_file = '{}/test_config.yaml'.format(empty_directory)
with open(empty_config_file, 'w') as f_handle:
f_handle.write('')
# Create configuration object
config = configuration.Config()
with self.assertRaises(SystemExit):
config.log_file()
# Cleanup files in temp directories
_delete_files(directory)
def test_log_file(self):
"""Testing method log_file."""
# Test the log_file with a good_dict
# good key and key_value
result = self.config.log_file()
self.assertEqual(
result, '{}/switchmap-ng.log'.format(self.log_directory))
def test_web_log_file(self):
"""Testing method web_log_file ."""
# Testing web_log_file with a good dictionary.
result = self.config.web_log_file()
self.assertEqual(
result, '{}/switchmap-ng-api.log'.format(self.log_directory))
def test_log_level(self):
"""Testing method log_level."""
# Tesing with a good_dictionary
# good key and good key_value
result = self.config.log_level()
self.assertEqual(result, 'debug')
self.assertEqual(result, self.good_dict['main']['log_level'])
# Set the environmental variable for the configuration directory
directory = tempfile.mkdtemp()
os.environ['SWITCHMAP_CONFIGDIR'] = directory
config_file = '{}/test_config.yaml'.format(directory)
# Testing log_level with blank key and blank key_value
key = ''
key_value = ''
bad_config = ("""\
main:
{} {}
""".format(key, key_value))
bad_dict = yaml.safe_load(bytes(bad_config, 'utf-8'))
# Write bad_config to file
with open(config_file, 'w') as f_handle:
yaml.dump(bad_dict, f_handle, default_flow_style=True)
# Create configuration object
config = configuration.Config()
with self.assertRaises(SystemExit):
config.log_level()
# Testing log_level with good key and blank key_value
key = 'log_level:'
key_value = ''
bad_config = ("""\
main:
{} {}
""".format(key, key_value))
bad_dict = yaml.safe_load(bytes(bad_config, 'utf-8'))
# Write bad_config to file
with open(config_file, 'w') as f_handle:
yaml.dump(bad_dict, f_handle, default_flow_style=True)
# Create configuration object
config = configuration.Config()
with self.assertRaises(SystemExit):
config.log_level()
# Cleanup files in temp directories
_delete_files(directory)
def test_cache_directory(self):
"""Testing method cache_directory."""
# Testing cache_directory with temp directory
# Set the environmental variable for the configuration directory
directory = tempfile.mkdtemp()
os.environ['SWITCHMAP_CONFIGDIR'] = directory
config_file = '{}/test_config.yaml'.format(directory)
# Testing cache_directory with blank key_value(filepath)
key = ''
key_value = ''
bad_config = ("""\
main:
{} {}
""".format(key, key_value))
bad_dict = yaml.safe_load(bytes(bad_config, 'utf-8'))
with open(config_file, 'w') as f_handle:
yaml.dump(bad_dict, f_handle, default_flow_style=True)
# Create configuration object
config = configuration.Config()
with self.assertRaises(SystemExit):
config.cache_directory()
# Cleanup files in temp directories
_delete_files(directory)
def test_agent_threads(self):
"""Testing method agent_threads."""
# Testing agent_threads with good_dict
# good key and key_value
result = self.config.agent_threads()
# Get CPU cores
cores = multiprocessing.cpu_count()
desired_max_threads = max(1, cores - 1)
# We don't want a value that's too big that the CPU cannot cope
expected = min(result, desired_max_threads)
self.assertEqual(result, expected)
def test_polling_interval(self):
"""Testing method polling_interval."""
# Testing polling_interval with good_dictionary
# good key and key_value
result = self.config.polling_interval()
self.assertEqual(result, 20)
self.assertEqual(result, self.good_dict['main']['polling_interval'])
# Set the environmental variable for the configuration directory
directory = tempfile.mkdtemp()
os.environ['SWITCHMAP_CONFIGDIR'] = directory
config_file = '{}/test_config.yaml'.format(directory)
# Testing polling_interval with blank key and blank key_value
key = ''
key_value = ''
bad_config = ("""\
main:
{} {}
""".format(key, key_value))
bad_dict = yaml.safe_load(bytes(bad_config, 'utf-8'))
# Write bad_config to file
with open(config_file, 'w') as f_handle:
yaml.dump(bad_dict, f_handle, default_flow_style=True)
# Create configuration object
config = configuration.Config()
with self.assertRaises(SystemExit):
config.polling_interval()
# Testing polling_interval with good key and blank key_value
key = 'polling_interval:'
key_value = ''
bad_config = ("""\
main:
{} {}
""".format(key, key_value))
bad_dict = yaml.safe_load(bytes(bad_config, 'utf-8'))
# Write bad_config to file
with open(config_file, 'w') as f_handle:
yaml.dump(bad_dict, f_handle, default_flow_style=True)
# Create configuration object
config = configuration.Config()
result = config.polling_interval()
self.assertEqual(result, 86400)
# Cleanup files in temp directories
_delete_files(directory)
def test_bind_port(self):
"""Testing method bind_port."""
# Testing bind_port with good_dictionary
# good key and key_value
result = self.config.bind_port()
self.assertEqual(result, 3000)
self.assertEqual(result, self.good_dict['main']['bind_port'])
# Set the environmental variable for the configuration directory
directory = tempfile.mkdtemp()
os.environ['SWITCHMAP_CONFIGDIR'] = directory
config_file = '{}/test_config.yaml'.format(directory)
# Testing bind_port with blank key and blank key_value
key = ''
key_value = ''
bad_config = ("""\
main:
{} {}
""".format(key, key_value))
bad_dict = yaml.safe_load(bytes(bad_config, 'utf-8'))
# Write bad_config to file
with open(config_file, 'w') as f_handle:
yaml.dump(bad_dict, f_handle, default_flow_style=True)
# Create configuration object
config = configuration.Config()
with self.assertRaises(SystemExit):
config.bind_port()
# Testing bind_port with good key and blank key_value
key = 'bind_port:'
key_value = ''
bad_config = ("""\
main:
{} {}
""".format(key, key_value))
bad_dict = yaml.safe_load(bytes(bad_config, 'utf-8'))
# Write bad_config to file
with open(config_file, 'w') as f_handle:
yaml.dump(bad_dict, f_handle, default_flow_style=True)
# Create configuration object
config = configuration.Config()
result = config.bind_port()
self.assertEqual(result, 7000)
# Cleanup files in temp directories
_delete_files(directory)
def test_idle_directory(self):
"""Testing function idle_directory."""
# Verify that directory exists
result = self.config.idle_directory()
self.assertEqual(os.path.exists(result), True)
self.assertEqual(os.path.isdir(result), True)
# Doesn't fail because directory now exists
result = self.config.idle_directory()
expected = '{}/idle'.format(
self.good_dict['main']['cache_directory'])
self.assertEqual(result, expected)
def test_topology_directory(self):
"""Testing function topology_directory."""
# Verify that directory exists
result = self.config.topology_directory()
self.assertEqual(os.path.exists(result), True)
self.assertEqual(os.path.isdir(result), True)
# Doesn't fail because directory now exists
result = self.config.topology_directory()
expected = '{}/topology'.format(
self.good_dict['main']['cache_directory'])
self.assertEqual(result, expected)
def test_topology_device_file(self):
"""Testing function topology_device_file."""
# Recreate the path to the device file
result = self.config.topology_device_file(self.random_string)
expected = '{}/{}.yaml'.format(
self.config.topology_directory(), self.random_string)
self.assertEqual(result, expected)
def test_hostnames(self):
"""Testing function hostnames."""
# Test expected versus returned values
result = self.config.hostnames()
expected = sorted(self.good_dict['main']['hostnames'])
self.assertEqual(result, expected)
def test_log_directory(self):
"""Testing method log_directory."""
# Testing log_directory with temp directory
# Set the environmental variable for the configuration directory
directory = tempfile.mkdtemp()
os.environ['SWITCHMAP_CONFIGDIR'] = directory
config_file = '{}/test_config.yaml'.format(directory)
# Testing log_directory with blank key_value(filepath)
key = ''
key_value = ''
bad_config = ("""\
main:
{} {}
""".format(key, key_value))
bad_dict = yaml.safe_load(bytes(bad_config, 'utf-8'))
with open(config_file, 'w') as f_handle:
yaml.dump(bad_dict, f_handle, default_flow_style=True)
# Create configuration object
config = configuration.Config()
with self.assertRaises(SystemExit):
config.log_directory()
# Cleanup files in temp directories
_delete_files(directory)
class TestConfigSNMP(unittest.TestCase):
"""Checks all functions and methods."""
# ---------------------------------------------------------------------- #
# General object setup
# ---------------------------------------------------------------------- #
# Required
maxDiff = None
@classmethod
def setUpClass(cls):
"""Setup the environmental before testing begins."""
# Define agent name
cls.group_name = ''.join([random.choice(
string.ascii_letters + string.digits) for n in range(9)])
# Create logfile
cls.log_file = tempfile.NamedTemporaryFile(delete=False).name
# Create temporary configuration directory
cls.test_config_dir = tempfile.mkdtemp()
# Initializing key variables
text_configuration = ("""
snmp_groups:
- group_name: {}
snmp_version: 3
snmp_secname: woohoo
snmp_community:
snmp_port: 161
snmp_authprotocol: sha
snmp_authpassword: <PASSWORD>
snmp_privprotocol: des
snmp_privpassword: <PASSWORD>
- group_name: Remote Sites
snmp_version: 3
snmp_secname: foobar
snmp_community:
snmp_port: 161
snmp_authprotocol: sha
snmp_authpassword: <PASSWORD>
snmp_privprotocol: aes
snmp_privpassword: <PASSWORD>
""".format(cls.group_name))
cls.configuration_dict = yaml.safe_load(text_configuration)
# Create the configuration file on disk
test_config_file = '{}/config.yaml'.format(cls.test_config_dir)
with open(test_config_file, 'w') as f_handle:
f_handle.write(text_configuration)
# Instantiate object to test
os.environ['SWITCHMAP_CONFIGDIR'] = cls.test_config_dir
cls.testobj = configuration.ConfigSNMP()
@classmethod
def tearDownClass(cls):
"""Cleanup the environmental after testing ends."""
# Cleanup temporary files when done
shutil.rmtree(cls.test_config_dir)
os.remove(cls.log_file)
def test_snmp_auth(self):
"""Testing method / function snmp_auth."""
# Initializing key variables
expected_list = [
{
'group_name': 'Remote Sites',
'snmp_version': 3,
'snmp_secname': 'foobar',
'snmp_community': None,
'snmp_port': 161,
'snmp_authprotocol': 'sha',
'snmp_authpassword': '<PASSWORD>',
'snmp_privprotocol': 'aes',
'snmp_privpassword': '<PASSWORD>'
},
{
'group_name': self.group_name,
'snmp_version': 3,
'snmp_secname': 'woohoo',
'snmp_community': None,
'snmp_port': 161,
'snmp_authprotocol': 'sha',
'snmp_authpassword': '<PASSWORD>',
'snmp_privprotocol': 'des',
'snmp_privpassword': '<PASSWORD>'
}
]
# Get results from configuration file
groups = self.testobj.snmp_auth()
# Iterate through each item in the snmp parameters list received
for group in groups:
for expected_dict in expected_list:
if expected_dict['group_name'] == group['group_name']:
for key in expected_dict.keys():
self.assertEqual(
group[key], expected_dict[key])
def _delete_files(directory):
"""Delete all files in directory."""
# Verify that directory exists
if os.path.isdir(directory) is False:
return
# Cleanup files in temp directories
filenames = [filename for filename in os.listdir(
directory) if os.path.isfile(
os.path.join(directory, filename))]
# Get the full filepath for the cache file and remove filepath
for filename in filenames:
filepath = os.path.join(directory, filename)
os.remove(filepath)
# Remove directory after files are deleted.
os.rmdir(directory)
if __name__ == '__main__':
# Do the unit test
unittest.main()
|
[
"os.remove",
"yaml.dump",
"yaml.safe_load",
"shutil.rmtree",
"os.path.join",
"multiprocessing.cpu_count",
"sys.path.append",
"unittest.main",
"os.path.exists",
"tempfile.mkdtemp",
"switchmap.utils.configuration.Config",
"os.path.realpath",
"os.rmdir",
"os.listdir",
"sys.exit",
"tempfile.NamedTemporaryFile",
"os.path.isdir",
"random.choice",
"switchmap.utils.configuration.ConfigSNMP"
] |
[((277, 303), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (293, 303), False, 'import os\n'), ((343, 382), 'os.path.join', 'os.path.join', (['TEST_DIRECTORY', 'os.pardir'], {}), '(TEST_DIRECTORY, os.pardir)\n', (355, 382), False, 'import os\n'), ((417, 461), 'os.path.join', 'os.path.join', (['SWITCHMAP_DIRECTORY', 'os.pardir'], {}), '(SWITCHMAP_DIRECTORY, os.pardir)\n', (429, 461), False, 'import os\n'), ((535, 566), 'sys.path.append', 'sys.path.append', (['ROOT_DIRECTORY'], {}), '(ROOT_DIRECTORY)\n', (550, 566), False, 'import sys\n'), ((688, 699), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (696, 699), False, 'import sys\n'), ((1142, 1160), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (1158, 1160), False, 'import tempfile\n'), ((1183, 1201), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (1199, 1201), False, 'import tempfile\n'), ((1712, 1730), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (1728, 1730), False, 'import tempfile\n'), ((2029, 2051), 'switchmap.utils.configuration.Config', 'configuration.Config', ([], {}), '()\n', (2049, 2051), False, 'from switchmap.utils import configuration\n'), ((17407, 17426), 'os.rmdir', 'os.rmdir', (['directory'], {}), '(directory)\n', (17415, 17426), False, 'import os\n'), ((17484, 17499), 'unittest.main', 'unittest.main', ([], {}), '()\n', (17497, 17499), False, 'import unittest\n'), ((1925, 1980), 'yaml.dump', 'yaml.dump', (['good_dict', 'f_handle'], {'default_flow_style': '(True)'}), '(good_dict, f_handle, default_flow_style=True)\n', (1934, 1980), False, 'import yaml\n'), ((2139, 2166), 'os.rmdir', 'os.rmdir', (['cls.log_directory'], {}), '(cls.log_directory)\n', (2147, 2166), False, 'import os\n'), ((2271, 2300), 'os.rmdir', 'os.rmdir', (['cls.cache_directory'], {}), '(cls.cache_directory)\n', (2279, 2300), False, 'import os\n'), ((2309, 2335), 'os.remove', 'os.remove', (['cls.config_file'], {}), '(cls.config_file)\n', (2318, 2335), False, 'import os\n'), ((2344, 2367), 'os.rmdir', 'os.rmdir', (['cls.directory'], {}), '(cls.directory)\n', (2352, 2367), False, 'import os\n'), ((2705, 2723), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (2721, 2723), False, 'import tempfile\n'), ((3115, 3137), 'switchmap.utils.configuration.Config', 'configuration.Config', ([], {}), '()\n', (3135, 3137), False, 'from switchmap.utils import configuration\n'), ((4246, 4264), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (4262, 4264), False, 'import tempfile\n'), ((4826, 4848), 'switchmap.utils.configuration.Config', 'configuration.Config', ([], {}), '()\n', (4846, 4848), False, 'from switchmap.utils import configuration\n'), ((5378, 5400), 'switchmap.utils.configuration.Config', 'configuration.Config', ([], {}), '()\n', (5398, 5400), False, 'from switchmap.utils import configuration\n'), ((5784, 5802), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (5800, 5802), False, 'import tempfile\n'), ((6331, 6353), 'switchmap.utils.configuration.Config', 'configuration.Config', ([], {}), '()\n', (6351, 6353), False, 'from switchmap.utils import configuration\n'), ((6758, 6785), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (6783, 6785), False, 'import multiprocessing\n'), ((7433, 7451), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (7449, 7451), False, 'import tempfile\n'), ((8020, 8042), 'switchmap.utils.configuration.Config', 'configuration.Config', ([], {}), '()\n', (8040, 8042), False, 'from switchmap.utils import configuration\n'), ((8593, 8615), 'switchmap.utils.configuration.Config', 'configuration.Config', ([], {}), '()\n', (8613, 8615), False, 'from switchmap.utils import configuration\n'), ((9174, 9192), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (9190, 9192), False, 'import tempfile\n'), ((9754, 9776), 'switchmap.utils.configuration.Config', 'configuration.Config', ([], {}), '()\n', (9774, 9776), False, 'from switchmap.utils import configuration\n'), ((10306, 10328), 'switchmap.utils.configuration.Config', 'configuration.Config', ([], {}), '()\n', (10326, 10328), False, 'from switchmap.utils import configuration\n'), ((12376, 12394), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (12392, 12394), False, 'import tempfile\n'), ((12921, 12943), 'switchmap.utils.configuration.Config', 'configuration.Config', ([], {}), '()\n', (12941, 12943), False, 'from switchmap.utils import configuration\n'), ((13838, 13856), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (13854, 13856), False, 'import tempfile\n'), ((14740, 14774), 'yaml.safe_load', 'yaml.safe_load', (['text_configuration'], {}), '(text_configuration)\n', (14754, 14774), False, 'import yaml\n'), ((15121, 15147), 'switchmap.utils.configuration.ConfigSNMP', 'configuration.ConfigSNMP', ([], {}), '()\n', (15145, 15147), False, 'from switchmap.utils import configuration\n'), ((15306, 15340), 'shutil.rmtree', 'shutil.rmtree', (['cls.test_config_dir'], {}), '(cls.test_config_dir)\n', (15319, 15340), False, 'import shutil\n'), ((15349, 15372), 'os.remove', 'os.remove', (['cls.log_file'], {}), '(cls.log_file)\n', (15358, 15372), False, 'import os\n'), ((16943, 16967), 'os.path.isdir', 'os.path.isdir', (['directory'], {}), '(directory)\n', (16956, 16967), False, 'import os\n'), ((17292, 17325), 'os.path.join', 'os.path.join', (['directory', 'filename'], {}), '(directory, filename)\n', (17304, 17325), False, 'import os\n'), ((17334, 17353), 'os.remove', 'os.remove', (['filepath'], {}), '(filepath)\n', (17343, 17353), False, 'import os\n'), ((1040, 1091), 'random.choice', 'random.choice', (['(string.ascii_letters + string.digits)'], {}), '(string.ascii_letters + string.digits)\n', (1053, 1091), False, 'import random\n'), ((2613, 2635), 'switchmap.utils.configuration.Config', 'configuration.Config', ([], {}), '()\n', (2633, 2635), False, 'from switchmap.utils import configuration\n'), ((2840, 2862), 'switchmap.utils.configuration.Config', 'configuration.Config', ([], {}), '()\n', (2860, 2862), False, 'from switchmap.utils import configuration\n'), ((4715, 4769), 'yaml.dump', 'yaml.dump', (['bad_dict', 'f_handle'], {'default_flow_style': '(True)'}), '(bad_dict, f_handle, default_flow_style=True)\n', (4724, 4769), False, 'import yaml\n'), ((5267, 5321), 'yaml.dump', 'yaml.dump', (['bad_dict', 'f_handle'], {'default_flow_style': '(True)'}), '(bad_dict, f_handle, default_flow_style=True)\n', (5276, 5321), False, 'import yaml\n'), ((6220, 6274), 'yaml.dump', 'yaml.dump', (['bad_dict', 'f_handle'], {'default_flow_style': '(True)'}), '(bad_dict, f_handle, default_flow_style=True)\n', (6229, 6274), False, 'import yaml\n'), ((7909, 7963), 'yaml.dump', 'yaml.dump', (['bad_dict', 'f_handle'], {'default_flow_style': '(True)'}), '(bad_dict, f_handle, default_flow_style=True)\n', (7918, 7963), False, 'import yaml\n'), ((8482, 8536), 'yaml.dump', 'yaml.dump', (['bad_dict', 'f_handle'], {'default_flow_style': '(True)'}), '(bad_dict, f_handle, default_flow_style=True)\n', (8491, 8536), False, 'import yaml\n'), ((9643, 9697), 'yaml.dump', 'yaml.dump', (['bad_dict', 'f_handle'], {'default_flow_style': '(True)'}), '(bad_dict, f_handle, default_flow_style=True)\n', (9652, 9697), False, 'import yaml\n'), ((10195, 10249), 'yaml.dump', 'yaml.dump', (['bad_dict', 'f_handle'], {'default_flow_style': '(True)'}), '(bad_dict, f_handle, default_flow_style=True)\n', (10204, 10249), False, 'import yaml\n'), ((10675, 10697), 'os.path.exists', 'os.path.exists', (['result'], {}), '(result)\n', (10689, 10697), False, 'import os\n'), ((10730, 10751), 'os.path.isdir', 'os.path.isdir', (['result'], {}), '(result)\n', (10743, 10751), False, 'import os\n'), ((11198, 11220), 'os.path.exists', 'os.path.exists', (['result'], {}), '(result)\n', (11212, 11220), False, 'import os\n'), ((11253, 11274), 'os.path.isdir', 'os.path.isdir', (['result'], {}), '(result)\n', (11266, 11274), False, 'import os\n'), ((12810, 12864), 'yaml.dump', 'yaml.dump', (['bad_dict', 'f_handle'], {'default_flow_style': '(True)'}), '(bad_dict, f_handle, default_flow_style=True)\n', (12819, 12864), False, 'import yaml\n'), ((13709, 13750), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'delete': '(False)'}), '(delete=False)\n', (13736, 13750), False, 'import tempfile\n'), ((17076, 17097), 'os.listdir', 'os.listdir', (['directory'], {}), '(directory)\n', (17086, 17097), False, 'import os\n'), ((13575, 13626), 'random.choice', 'random.choice', (['(string.ascii_letters + string.digits)'], {}), '(string.ascii_letters + string.digits)\n', (13588, 13626), False, 'import random\n'), ((17138, 17171), 'os.path.join', 'os.path.join', (['directory', 'filename'], {}), '(directory, filename)\n', (17150, 17171), False, 'import os\n')]
|
from django.views.generic import TemplateView
import re
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import login as auth_login
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.sites.models import Site
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.utils.translation import ugettext_lazy as _
from tos.compat import get_runtime_user_model, get_request_site
from tos.models import has_user_agreed_latest_tos, TermsOfService, UserAgreement
class TosView(TemplateView):
template_name = "tos/tos.html"
def get_context_data(self, **kwargs):
context = super(TosView, self).get_context_data(**kwargs)
context['tos'] = TermsOfService.objects.get_current_tos()
return context
def _redirect_to(redirect_to):
""" Moved redirect_to logic here to avoid duplication in views"""
# Light security check -- make sure redirect_to isn't garbage.
if not redirect_to or ' ' in redirect_to:
redirect_to = settings.LOGIN_REDIRECT_URL
# Heavier security check -- redirects to http://example.com should
# not be allowed, but things like /view/?param=http://example.com
# should be allowed. This regex checks if there is a '//' *before* a
# question mark.
elif '//' in redirect_to and re.match(r'[^\?]*//', redirect_to):
redirect_to = settings.LOGIN_REDIRECT_URL
return redirect_to
@csrf_protect
@never_cache
def check_tos(request, template_name='tos/tos_check.html',
redirect_field_name=REDIRECT_FIELD_NAME,):
redirect_to = _redirect_to(request.POST.get(redirect_field_name, request.GET.get(redirect_field_name, '')))
tos = TermsOfService.objects.get_current_tos()
if request.method == "POST":
if request.POST.get("accept", "") == "accept":
user = get_runtime_user_model().objects.get(pk=request.session['tos_user'])
user.backend = request.session['tos_backend']
# Save the user agreement to the new TOS
UserAgreement.objects.create(terms_of_service=tos, user=user)
# Log the user in
auth_login(request, user)
if request.session.test_cookie_worked():
request.session.delete_test_cookie()
return HttpResponseRedirect(redirect_to)
else:
messages.error(
request,
_(u"You cannot login without agreeing to the terms of this site.")
)
return render_to_response(template_name, {
'tos': tos,
redirect_field_name: redirect_to,
}, RequestContext(request))
@csrf_protect
@never_cache
def login(request, template_name='registration/login.html',
redirect_field_name=REDIRECT_FIELD_NAME,
authentication_form=AuthenticationForm):
"""Displays the login form and handles the login action."""
redirect_to = request.POST.get(redirect_field_name, request.GET.get(redirect_field_name, ''))
if request.method == "POST":
form = authentication_form(data=request.POST)
if form.is_valid():
redirect_to = _redirect_to(redirect_to)
# Okay, security checks complete. Check to see if user agrees
# to terms
user = form.get_user()
if has_user_agreed_latest_tos(user):
# Log the user in.
auth_login(request, user)
if request.session.test_cookie_worked():
request.session.delete_test_cookie()
return HttpResponseRedirect(redirect_to)
else:
# user has not yet agreed to latest tos
# force them to accept or refuse
request.session['tos_user'] = user.pk
# Pass the used backend as well since django will require it
# and it can only be optained by calling authenticate, but we
# got no credentials in check_tos.
# see: https://docs.djangoproject.com/en/1.6/topics/auth/default/#how-to-log-a-user-in
request.session['tos_backend'] = user.backend
return render_to_response('tos/tos_check.html', {
redirect_field_name: redirect_to,
'tos': TermsOfService.objects.get_current_tos()
}, RequestContext(request))
else:
form = authentication_form(request)
request.session.set_test_cookie()
if Site._meta.installed:
current_site = Site.objects.get_current()
else:
current_site = get_request_site()(request)
return render_to_response(template_name, {
'form': form,
redirect_field_name: redirect_to,
'site': current_site,
'site_name': current_site.name,
}, RequestContext(request))
|
[
"django.contrib.sites.models.Site.objects.get_current",
"tos.models.has_user_agreed_latest_tos",
"re.match",
"tos.models.UserAgreement.objects.create",
"tos.compat.get_request_site",
"tos.models.TermsOfService.objects.get_current_tos",
"tos.compat.get_runtime_user_model",
"django.http.HttpResponseRedirect",
"django.utils.translation.ugettext_lazy",
"django.contrib.auth.login",
"django.template.RequestContext"
] |
[((1958, 1998), 'tos.models.TermsOfService.objects.get_current_tos', 'TermsOfService.objects.get_current_tos', ([], {}), '()\n', (1996, 1998), False, 'from tos.models import has_user_agreed_latest_tos, TermsOfService, UserAgreement\n'), ((977, 1017), 'tos.models.TermsOfService.objects.get_current_tos', 'TermsOfService.objects.get_current_tos', ([], {}), '()\n', (1015, 1017), False, 'from tos.models import has_user_agreed_latest_tos, TermsOfService, UserAgreement\n'), ((2872, 2895), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (2886, 2895), False, 'from django.template import RequestContext\n'), ((4783, 4809), 'django.contrib.sites.models.Site.objects.get_current', 'Site.objects.get_current', ([], {}), '()\n', (4807, 4809), False, 'from django.contrib.sites.models import Site\n'), ((5060, 5083), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (5074, 5083), False, 'from django.template import RequestContext\n'), ((1577, 1611), 're.match', 're.match', (['"""[^\\\\?]*//"""', 'redirect_to'], {}), "('[^\\\\?]*//', redirect_to)\n", (1585, 1611), False, 'import re\n'), ((2299, 2360), 'tos.models.UserAgreement.objects.create', 'UserAgreement.objects.create', ([], {'terms_of_service': 'tos', 'user': 'user'}), '(terms_of_service=tos, user=user)\n', (2327, 2360), False, 'from tos.models import has_user_agreed_latest_tos, TermsOfService, UserAgreement\n'), ((2404, 2429), 'django.contrib.auth.login', 'auth_login', (['request', 'user'], {}), '(request, user)\n', (2414, 2429), True, 'from django.contrib.auth import login as auth_login\n'), ((2557, 2590), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['redirect_to'], {}), '(redirect_to)\n', (2577, 2590), False, 'from django.http import HttpResponseRedirect\n'), ((3568, 3600), 'tos.models.has_user_agreed_latest_tos', 'has_user_agreed_latest_tos', (['user'], {}), '(user)\n', (3594, 3600), False, 'from tos.models import has_user_agreed_latest_tos, TermsOfService, UserAgreement\n'), ((4843, 4861), 'tos.compat.get_request_site', 'get_request_site', ([], {}), '()\n', (4859, 4861), False, 'from tos.compat import get_runtime_user_model, get_request_site\n'), ((2674, 2740), 'django.utils.translation.ugettext_lazy', '_', (['u"""You cannot login without agreeing to the terms of this site."""'], {}), "(u'You cannot login without agreeing to the terms of this site.')\n", (2675, 2740), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3654, 3679), 'django.contrib.auth.login', 'auth_login', (['request', 'user'], {}), '(request, user)\n', (3664, 3679), True, 'from django.contrib.auth import login as auth_login\n'), ((3819, 3852), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['redirect_to'], {}), '(redirect_to)\n', (3839, 3852), False, 'from django.http import HttpResponseRedirect\n'), ((4611, 4634), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (4625, 4634), False, 'from django.template import RequestContext\n'), ((2106, 2130), 'tos.compat.get_runtime_user_model', 'get_runtime_user_model', ([], {}), '()\n', (2128, 2130), False, 'from tos.compat import get_runtime_user_model, get_request_site\n'), ((4551, 4591), 'tos.models.TermsOfService.objects.get_current_tos', 'TermsOfService.objects.get_current_tos', ([], {}), '()\n', (4589, 4591), False, 'from tos.models import has_user_agreed_latest_tos, TermsOfService, UserAgreement\n')]
|
import os, shutil, stat
SDpath=r'C:\Depots\MBSI\Projects\OOB\UI'
def del_rw(action, name, exc):
os.chmod(name, stat.S_IWRITE)
os.remove(name)
#GitPath=r'C:\Gitprojects'
GitPath=r'C:\test\Talent'
langs=["bg", "ca-ES", "cs", "da", "de", "de-AT", "de-CH", "el", "en-AU", "en-CA", "en-GB", "en-IE", "en-IN", "en-MY", "en-NZ", "en-SG", "en-ZA", "es", "es-MX", "et", "eu", "fi", "fr", "fr-BE", "fr-CA", "fr-CH", "gl", "hi", "hr-HR", "hu", "id", "is", "it", "it-CH", "ja", "kk", "ko", "lt", "lv", "ms-MY", "nb-NO", "nl", "nl-BE", "pl", "pt-BR", "pt-PT", "ro", "ru", "sk", "sl", "sr-Cyrl-RS", "sr-Latn-RS", "sv", "th", "tr", "uk", "vi", "zh-Hans", "zh-Hant", "zh-HK"]
langs2=["bg-BG", "ca-ES", "cs-CZ", "da-DK", "de-DE", "de-AT", "de-CH", "el-GR", "en-AU", "en-CA", "en-GB", "en-IE", "en-IN", "en-MY", "en-NZ", "en-SG", "en-ZA", "es-ES", "es-MX", "et-EE", "eu-ES", "fi-FI", "fr-FR", "fr-BE", "fr-CA", "fr-CH", "gl-ES", "hi-IN", "hr-HR", "hu-HU", "id-ID", "is-is", "it-IT", "it-CH", "ja-JP", "kk-KZ", "ko-KR", "lt-LT", "lv-LV", "ms-MY", "nb-NO", "nl-NL", "nl-BE", "pl-PL", "pt-BR", "pt-PT", "ro-RO", "ru-RU", "sk-SK", "sl-SI", "sr-Cyrl-RS", "sr-Latn-RS", "sv-SE", "th-TH", "tr-TR", "uk-UA", "vi-VN", "zh-Hans-CN", "zh-Hant-TW", "zh-Hant-HK"]
components=["Dynamics365-HCM-OnboardingClient","Dynamics365-HCM-MsAssessClient","Dynamics365-HCM-Offermanagement","Dynamics365-HCM-TalentEngagement","Dynamics365-HCM-AppsPortalClient"]
for com in components:
targetFolder=os.path.join(GitPath,com,'localization')
for l in langs2:
targetFile=os.path.join(targetFolder,'messages.'+l+'.xlf')
print(targetFile)
if os.path.isfile(targetFile):
os.chmod(targetFile,stat.S_IWRITE)
os.remove(targetFile)
def copymessages():
for lang in langs:
for com in components:
if os.path.exists(os.path.join(SDpath,lang)):
#copy from C:\Depots\MBSI\Projects\OOB\UI\bg\TalentEngagement\Dynamics365-HCM-AppsPortalClient\messages.bg.xlf
#to C:\test\TalentEngagement\Dynamics365-HCM-AppsPortalClient\localization\messages.bg-BG.xlf
f='messages.'+lang+'.xlf'
#print(f)
sdfile=os.path.join(SDpath,lang,'TalentEngagement',com,f)
dst=os.path.join(GitPath,com,'localization')
print(sdfile)
print(dst)
if not os.path.exists(dst):
os.makedirs(dst)
if os.path.exists(sdfile):
shutil.copy2(sdfile,os.path.join(dst,f))
#### rename the files ####
# from C:\test\TalentEngagement\Dynamics365-HCM-MsAssessClient\localization\messages.bg.xlf to
# to C:\test\TalentEngagement\Dynamics365-HCM-MsAssessClient\localization\messages.bg-BG.xlf
def renameMessages():
for com in components:
for i in range(60):
f='messages.'+langs[i]+'.xlf'
f2='messages.'+langs2[i]+'.xlf'
src=os.path.join(GitPath,com,'localization',f)
dst=os.path.join(GitPath,com,'localization',f2)
#print('from '+src)
#print('to '+dst)
if not os.path.exists(dst):
os.rename(src,dst)
#copymessages()
#renameMessages()
|
[
"os.remove",
"os.chmod",
"os.makedirs",
"os.rename",
"os.path.exists",
"os.path.isfile",
"os.path.join"
] |
[((105, 134), 'os.chmod', 'os.chmod', (['name', 'stat.S_IWRITE'], {}), '(name, stat.S_IWRITE)\n', (113, 134), False, 'import os, shutil, stat\n'), ((140, 155), 'os.remove', 'os.remove', (['name'], {}), '(name)\n', (149, 155), False, 'import os, shutil, stat\n'), ((1484, 1526), 'os.path.join', 'os.path.join', (['GitPath', 'com', '"""localization"""'], {}), "(GitPath, com, 'localization')\n", (1496, 1526), False, 'import os, shutil, stat\n'), ((1567, 1619), 'os.path.join', 'os.path.join', (['targetFolder', "('messages.' + l + '.xlf')"], {}), "(targetFolder, 'messages.' + l + '.xlf')\n", (1579, 1619), False, 'import os, shutil, stat\n'), ((1664, 1690), 'os.path.isfile', 'os.path.isfile', (['targetFile'], {}), '(targetFile)\n', (1678, 1690), False, 'import os, shutil, stat\n'), ((1705, 1740), 'os.chmod', 'os.chmod', (['targetFile', 'stat.S_IWRITE'], {}), '(targetFile, stat.S_IWRITE)\n', (1713, 1740), False, 'import os, shutil, stat\n'), ((1753, 1774), 'os.remove', 'os.remove', (['targetFile'], {}), '(targetFile)\n', (1762, 1774), False, 'import os, shutil, stat\n'), ((3314, 3359), 'os.path.join', 'os.path.join', (['GitPath', 'com', '"""localization"""', 'f'], {}), "(GitPath, com, 'localization', f)\n", (3326, 3359), False, 'import os, shutil, stat\n'), ((3386, 3432), 'os.path.join', 'os.path.join', (['GitPath', 'com', '"""localization"""', 'f2'], {}), "(GitPath, com, 'localization', f2)\n", (3398, 3432), False, 'import os, shutil, stat\n'), ((1911, 1937), 'os.path.join', 'os.path.join', (['SDpath', 'lang'], {}), '(SDpath, lang)\n', (1923, 1937), False, 'import os, shutil, stat\n'), ((2352, 2406), 'os.path.join', 'os.path.join', (['SDpath', 'lang', '"""TalentEngagement"""', 'com', 'f'], {}), "(SDpath, lang, 'TalentEngagement', com, f)\n", (2364, 2406), False, 'import os, shutil, stat\n'), ((2440, 2482), 'os.path.join', 'os.path.join', (['GitPath', 'com', '"""localization"""'], {}), "(GitPath, com, 'localization')\n", (2452, 2482), False, 'import os, shutil, stat\n'), ((2729, 2751), 'os.path.exists', 'os.path.exists', (['sdfile'], {}), '(sdfile)\n', (2743, 2751), False, 'import os, shutil, stat\n'), ((3550, 3569), 'os.path.exists', 'os.path.exists', (['dst'], {}), '(dst)\n', (3564, 3569), False, 'import os, shutil, stat\n'), ((3604, 3623), 'os.rename', 'os.rename', (['src', 'dst'], {}), '(src, dst)\n', (3613, 3623), False, 'import os, shutil, stat\n'), ((2614, 2633), 'os.path.exists', 'os.path.exists', (['dst'], {}), '(dst)\n', (2628, 2633), False, 'import os, shutil, stat\n'), ((2676, 2692), 'os.makedirs', 'os.makedirs', (['dst'], {}), '(dst)\n', (2687, 2692), False, 'import os, shutil, stat\n'), ((2814, 2834), 'os.path.join', 'os.path.join', (['dst', 'f'], {}), '(dst, f)\n', (2826, 2834), False, 'import os, shutil, stat\n')]
|
from pathlib import Path
from typing import Any, Callable, List, Sequence, Type, Union
import numpy as np
import pandas as pd
from exrec.core.interface import DataFrame, DataFrameGroupBy
class DataFrameImpl:
data: pd.DataFrame
def __init__(self, data):
self.data = pd.DataFrame(data)
def __str__(self) -> str:
return str(self.data)
def __len__(self) -> int:
return len(self.data)
def __contains__(self, key: str) -> bool:
return key in self.data
def __iter__(self):
return iter(self.data)
def __next__(self):
return next(self.data)
def __reversed__(self):
return reversed(self.data)
def __getitem__(self, key: Union[str, List[str]]):
return self.data[key]
def __setitem__(self, key: str, value) -> None:
self.data[key] = value
def __delitem__(self, key: str) -> None:
del self.data[key]
@property
def columns(self) -> List[str]:
return self.data.columns
def sort_values(self, key: str) -> DataFrame:
return DataFrameImpl(data=self.data.sort_values(key))
def groupby(self, by: Union[str, List[str]]) -> DataFrameGroupBy:
return DataFrameGroupByImpl(group_data=self.data.groupby(by=by))
def drop_duplicates(self) -> DataFrame:
return DataFrameImpl(data=self.data.drop_duplicates())
def copy(self, deep: bool = True) -> DataFrame:
return DataFrameImpl(data=self.data.copy(deep=deep))
def get_data(self) -> pd.DataFrame:
return self.data
def values(self, key: str) -> np.ndarray:
return self.data[key].values
def remove(self, remove: DataFrame, keys: List[str]) -> DataFrame:
return DataFrameImpl(
data=self.data[~self.data[keys].isin(remove.data[keys]).all(axis=1)]
)
def append(self, add: DataFrame) -> DataFrame:
return DataFrameImpl(data=self.data.append(add.get_data()))
def unique(self, key: str) -> Sequence[Any]:
return self.data[key].unique()
def astype(self, key: str, type: Type) -> None:
self.data[key] = self.data[key].astype(type)
def apply(self, f: Callable[[Any], Any], key: str, dest: str) -> None:
self.data[dest] = self.data[key].apply(f)
def to_csv(self, file_name: str, index: bool) -> None:
self.data.to_csv(file_name, index=False)
def read_csv(src_file: Path, sep: str = ",") -> DataFrame:
data = pd.read_csv(src_file, sep=sep)
return DataFrameImpl(data=data)
class DataFrameGroupByImpl:
group_data: pd.core.groupby.GroupBy
def __init__(self, group_data: pd.core.groupby.GroupBy):
self.group_data = group_data
def get_groups(self, keys: List[str]) -> dict:
return {
k: DataFrameImpl(data=v)
for k, v in dict(tuple(self.group_data[keys])).items()
}
def filter(self, func: Callable[..., bool]) -> DataFrame:
return DataFrameImpl(data=self.group_data.filter(func=func))
def get_dataframe(self) -> DataFrame:
return self.filter(lambda _: True)
def reduce_sum(self, key: str) -> DataFrame:
data = self.group_data[key].sum().reset_index()
return DataFrameImpl(data=data)
|
[
"pandas.read_csv",
"pandas.DataFrame"
] |
[((2452, 2482), 'pandas.read_csv', 'pd.read_csv', (['src_file'], {'sep': 'sep'}), '(src_file, sep=sep)\n', (2463, 2482), True, 'import pandas as pd\n'), ((285, 303), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (297, 303), True, 'import pandas as pd\n')]
|
from collections import namedtuple
class Ticker(
namedtuple(
"ticker",
[
"last",
"price_before_last",
"open",
"high",
"low",
"vwap",
"volume",
"bid",
"ask",
"json",
],
)
):
@classmethod
def create_from_json(cls, ticker):
return cls(
last=ticker["last"],
price_before_last=ticker["price_before_last"],
open=ticker["open"],
high=ticker["high"],
low=ticker["low"],
vwap=ticker["vwap"],
volume=ticker["volume"],
bid=ticker["bid"],
ask=ticker["ask"],
json=ticker,
)
class OrderBookEntry(
namedtuple(
"book_entry",
[
"price",
"amount",
],
)
):
@classmethod
def create_from_json(cls, book_entry):
return cls(
price=float(book_entry[0]),
amount=float(book_entry[1]),
)
class OrderBook(
namedtuple(
"order_book",
[
"asks",
"bids",
"json",
],
)
):
@classmethod
def create_from_json(cls, order_book):
return cls(
asks=[
OrderBookEntry.create_from_json(entry) for entry in order_book["asks"]
],
bids=[
OrderBookEntry.create_from_json(entry) for entry in order_book["bids"]
],
json=order_book,
)
class Transaction(
namedtuple(
"transaction",
[
"timestamp",
"id",
"price",
"amount",
],
)
):
@classmethod
def create_from_json(cls, transaction):
return cls(
timestamp=transaction[0],
id=transaction[1],
price=transaction[2],
amount=transaction[3],
)
|
[
"collections.namedtuple"
] |
[((55, 173), 'collections.namedtuple', 'namedtuple', (['"""ticker"""', "['last', 'price_before_last', 'open', 'high', 'low', 'vwap', 'volume',\n 'bid', 'ask', 'json']"], {}), "('ticker', ['last', 'price_before_last', 'open', 'high', 'low',\n 'vwap', 'volume', 'bid', 'ask', 'json'])\n", (65, 173), False, 'from collections import namedtuple\n'), ((787, 832), 'collections.namedtuple', 'namedtuple', (['"""book_entry"""', "['price', 'amount']"], {}), "('book_entry', ['price', 'amount'])\n", (797, 832), False, 'from collections import namedtuple\n'), ((1088, 1138), 'collections.namedtuple', 'namedtuple', (['"""order_book"""', "['asks', 'bids', 'json']"], {}), "('order_book', ['asks', 'bids', 'json'])\n", (1098, 1138), False, 'from collections import namedtuple\n'), ((1598, 1663), 'collections.namedtuple', 'namedtuple', (['"""transaction"""', "['timestamp', 'id', 'price', 'amount']"], {}), "('transaction', ['timestamp', 'id', 'price', 'amount'])\n", (1608, 1663), False, 'from collections import namedtuple\n')]
|
import unittest
import json
from alvarium.streams.contracts import ServiceInfo, StreamInfo, StreamType
class TestStreamInfo(unittest.TestCase):
def test_to_json_should_return_json_representation(self):
stream_info = StreamInfo(type=StreamType.MQTT, config={})
result = stream_info.to_json()
info_json = json.loads(result)
self.assertEqual(StreamType(info_json["type"]), StreamType.MQTT)
self.assertEqual(info_json["config"], {})
def test_from_json_should_return_stream_info(self):
info_json = {}
with open("./tests/streams/config-mqtt.json") as file:
info_json = json.loads(file.read())
stream_info = StreamInfo.from_json(json.dumps(info_json))
self.assertEqual(StreamType.MQTT, stream_info.type)
class TestServiceInfo(unittest.TestCase):
def test_to_json_should_return_json_representation(self):
host = "localhost"
protocol = "tcp"
port = 1883
service_info = ServiceInfo(host=host, protocol=protocol, port=port)
result = service_info.to_json()
info_json = json.loads(result)
self.assertEqual(host, info_json["host"])
self.assertEqual(protocol, info_json["protocol"])
self.assertEqual(port, info_json["port"])
def test_from_json_should_return_service_info_object(self):
host = "localhost"
protocol = "tcp"
port = 1883
info_json = {}
with open("./tests/streams/service-info.json") as file:
info_json = json.loads(file.read())
result = ServiceInfo.from_json(json.dumps(info_json))
self.assertEqual(result.host, host)
self.assertEqual(result.protocol, protocol)
self.assertEqual(result.port, port)
def test_uri_should_return_the_right_representation(self):
host = "localhost"
protocol = "tcp"
port = 1883
service_info = ServiceInfo(host=host, protocol=protocol, port=port)
uri = service_info.uri()
self.assertEqual(f"{protocol}://{host}:{port}", uri)
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"json.loads",
"alvarium.streams.contracts.StreamType",
"json.dumps",
"alvarium.streams.contracts.StreamInfo",
"alvarium.streams.contracts.ServiceInfo"
] |
[((2132, 2147), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2145, 2147), False, 'import unittest\n'), ((231, 274), 'alvarium.streams.contracts.StreamInfo', 'StreamInfo', ([], {'type': 'StreamType.MQTT', 'config': '{}'}), '(type=StreamType.MQTT, config={})\n', (241, 274), False, 'from alvarium.streams.contracts import ServiceInfo, StreamInfo, StreamType\n'), ((335, 353), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (345, 353), False, 'import json\n'), ((1009, 1061), 'alvarium.streams.contracts.ServiceInfo', 'ServiceInfo', ([], {'host': 'host', 'protocol': 'protocol', 'port': 'port'}), '(host=host, protocol=protocol, port=port)\n', (1020, 1061), False, 'from alvarium.streams.contracts import ServiceInfo, StreamInfo, StreamType\n'), ((1123, 1141), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (1133, 1141), False, 'import json\n'), ((1952, 2004), 'alvarium.streams.contracts.ServiceInfo', 'ServiceInfo', ([], {'host': 'host', 'protocol': 'protocol', 'port': 'port'}), '(host=host, protocol=protocol, port=port)\n', (1963, 2004), False, 'from alvarium.streams.contracts import ServiceInfo, StreamInfo, StreamType\n'), ((379, 408), 'alvarium.streams.contracts.StreamType', 'StreamType', (["info_json['type']"], {}), "(info_json['type'])\n", (389, 408), False, 'from alvarium.streams.contracts import ServiceInfo, StreamInfo, StreamType\n'), ((725, 746), 'json.dumps', 'json.dumps', (['info_json'], {}), '(info_json)\n', (735, 746), False, 'import json\n'), ((1624, 1645), 'json.dumps', 'json.dumps', (['info_json'], {}), '(info_json)\n', (1634, 1645), False, 'import json\n')]
|
# generate overexposure samples from clear images
# author: @LucasX
import argparse
import os
import random
from multiprocessing import Queue, Process
import cv2
import numpy as np
parser = argparse.ArgumentParser()
parser.add_argument('-orig_dir', type=str,
default='C:/Users/LucasX/Desktop/ShelfExposure/normal')
parser.add_argument('-outpur_dir', type=str,
default='C:/Users/LucasX/Desktop/ShelfExposure/exposure')
parser.add_argument('-alpha', type=float, default=2.0)
parser.add_argument('-beta', type=float, default=0.0)
parser.add_argument('-procs', type=int, default=2)
parser.add_argument('-show', type=bool, default=False)
args = vars(parser.parse_args())
print('-' * 100)
for key, value in args.items():
print('%s = %s' % (key, value))
print('-' * 100)
def modify_img_saturation(img_f):
"""
modify image saturation to imitate overexposure effect
:param img_f:
:return:
"""
if img_f.endswith('.jpg') or img_f.endswith('.png') or img_f.endswith('.jpeg'):
if not os.path.exists(args['outpur_dir']):
os.makedirs(args['outpur_dir'])
image = cv2.imread(img_f)
overexposure_image = np.zeros(image.shape, image.dtype)
# alpha = args['alpha']
alpha = random.randint(2, 10)
for y in range(image.shape[0]):
for x in range(image.shape[1]):
for c in range(image.shape[2]):
overexposure_image[y, x, c] = np.clip(alpha * image[y, x, c] + args['beta'], 0, 255)
if args['show'] and overexposure_image is not None:
cv2.imshow('overexposure_image', overexposure_image)
cv2.waitKey(0)
cv2.destroyAllWindows()
cv2.imwrite(os.path.join(args['outpur_dir'], os.path.basename(img_f)), overexposure_image)
print('[INFO] generate overexposure image {} successfully'.format(os.path.basename(img_f)))
def multi_proc_modify_img_saturation(imgs_queue):
"""
modify image saturation to imitate overexposure effect in multi-processing mode
:param imgs_queue:
:return:
"""
if not imgs_queue.empty():
img_f = imgs_queue.get()
if img_f.endswith('.jpg') or img_f.endswith('.png') or img_f.endswith('.jpeg'):
if not os.path.exists(args['outpur_dir']):
os.makedirs(args['outpur_dir'])
image = cv2.imread(img_f)
overexposure_image = np.zeros(image.shape, image.dtype)
# alpha = args['alpha']
alpha = random.randint(2, 10)
for y in range(image.shape[0]):
for x in range(image.shape[1]):
for c in range(image.shape[2]):
overexposure_image[y, x, c] = np.clip(alpha * image[y, x, c] + args['beta'], 0, 255)
if args['show'] and overexposure_image is not None:
cv2.imshow('overexposure_image', overexposure_image)
cv2.waitKey(0)
cv2.destroyAllWindows()
cv2.imwrite(os.path.join(args['outpur_dir'], os.path.basename(img_f)), overexposure_image)
print('[INFO] generate overexposure image {} successfully'.format(os.path.basename(img_f)))
if __name__ == '__main__':
# multi-thread processing version
imgs_queue = Queue()
for img_f in os.listdir(args['orig_dir']):
imgs_queue.put(os.path.join(args['orig_dir'], img_f))
for i in range(args['procs']):
Process(target=multi_proc_modify_img_saturation, args=(imgs_queue,)).start()
# single-thread processing version
# for img_f in os.listdir(args['orig_dir']):
# modify_img_saturation(os.path.join(args['orig_dir'], img_f))
|
[
"argparse.ArgumentParser",
"random.randint",
"os.makedirs",
"cv2.waitKey",
"cv2.destroyAllWindows",
"os.path.basename",
"numpy.zeros",
"os.path.exists",
"numpy.clip",
"cv2.imread",
"multiprocessing.Queue",
"multiprocessing.Process",
"cv2.imshow",
"os.path.join",
"os.listdir"
] |
[((192, 217), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (215, 217), False, 'import argparse\n'), ((3309, 3316), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (3314, 3316), False, 'from multiprocessing import Queue, Process\n'), ((3334, 3362), 'os.listdir', 'os.listdir', (["args['orig_dir']"], {}), "(args['orig_dir'])\n", (3344, 3362), False, 'import os\n'), ((1148, 1165), 'cv2.imread', 'cv2.imread', (['img_f'], {}), '(img_f)\n', (1158, 1165), False, 'import cv2\n'), ((1196, 1230), 'numpy.zeros', 'np.zeros', (['image.shape', 'image.dtype'], {}), '(image.shape, image.dtype)\n', (1204, 1230), True, 'import numpy as np\n'), ((1279, 1300), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (1293, 1300), False, 'import random\n'), ((1052, 1086), 'os.path.exists', 'os.path.exists', (["args['outpur_dir']"], {}), "(args['outpur_dir'])\n", (1066, 1086), False, 'import os\n'), ((1100, 1131), 'os.makedirs', 'os.makedirs', (["args['outpur_dir']"], {}), "(args['outpur_dir'])\n", (1111, 1131), False, 'import os\n'), ((1613, 1665), 'cv2.imshow', 'cv2.imshow', (['"""overexposure_image"""', 'overexposure_image'], {}), "('overexposure_image', overexposure_image)\n", (1623, 1665), False, 'import cv2\n'), ((1678, 1692), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (1689, 1692), False, 'import cv2\n'), ((1705, 1728), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1726, 1728), False, 'import cv2\n'), ((2392, 2409), 'cv2.imread', 'cv2.imread', (['img_f'], {}), '(img_f)\n', (2402, 2409), False, 'import cv2\n'), ((2444, 2478), 'numpy.zeros', 'np.zeros', (['image.shape', 'image.dtype'], {}), '(image.shape, image.dtype)\n', (2452, 2478), True, 'import numpy as np\n'), ((2535, 2556), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (2549, 2556), False, 'import random\n'), ((3387, 3424), 'os.path.join', 'os.path.join', (["args['orig_dir']", 'img_f'], {}), "(args['orig_dir'], img_f)\n", (3399, 3424), False, 'import os\n'), ((1783, 1806), 'os.path.basename', 'os.path.basename', (['img_f'], {}), '(img_f)\n', (1799, 1806), False, 'import os\n'), ((1903, 1926), 'os.path.basename', 'os.path.basename', (['img_f'], {}), '(img_f)\n', (1919, 1926), False, 'import os\n'), ((2288, 2322), 'os.path.exists', 'os.path.exists', (["args['outpur_dir']"], {}), "(args['outpur_dir'])\n", (2302, 2322), False, 'import os\n'), ((2340, 2371), 'os.makedirs', 'os.makedirs', (["args['outpur_dir']"], {}), "(args['outpur_dir'])\n", (2351, 2371), False, 'import os\n'), ((2893, 2945), 'cv2.imshow', 'cv2.imshow', (['"""overexposure_image"""', 'overexposure_image'], {}), "('overexposure_image', overexposure_image)\n", (2903, 2945), False, 'import cv2\n'), ((2962, 2976), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (2973, 2976), False, 'import cv2\n'), ((2993, 3016), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (3014, 3016), False, 'import cv2\n'), ((3470, 3538), 'multiprocessing.Process', 'Process', ([], {'target': 'multi_proc_modify_img_saturation', 'args': '(imgs_queue,)'}), '(target=multi_proc_modify_img_saturation, args=(imgs_queue,))\n', (3477, 3538), False, 'from multiprocessing import Queue, Process\n'), ((1485, 1539), 'numpy.clip', 'np.clip', (["(alpha * image[y, x, c] + args['beta'])", '(0)', '(255)'], {}), "(alpha * image[y, x, c] + args['beta'], 0, 255)\n", (1492, 1539), True, 'import numpy as np\n'), ((3075, 3098), 'os.path.basename', 'os.path.basename', (['img_f'], {}), '(img_f)\n', (3091, 3098), False, 'import os\n'), ((3199, 3222), 'os.path.basename', 'os.path.basename', (['img_f'], {}), '(img_f)\n', (3215, 3222), False, 'import os\n'), ((2757, 2811), 'numpy.clip', 'np.clip', (["(alpha * image[y, x, c] + args['beta'])", '(0)', '(255)'], {}), "(alpha * image[y, x, c] + args['beta'], 0, 255)\n", (2764, 2811), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('blogapp', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='article',
name='categories',
),
migrations.RemoveField(
model_name='article',
name='name',
),
migrations.AddField(
model_name='article',
name='Author',
field=models.CharField(default='', max_length=50),
preserve_default=False,
),
migrations.AddField(
model_name='article',
name='Title',
field=models.CharField(default='', max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='article',
name='published_date',
field=models.DateField(null=True, blank=True),
),
migrations.AlterField(
model_name='article',
name='description',
field=models.TextField(max_length=500, blank=True),
),
migrations.DeleteModel(
name='Category',
),
]
|
[
"django.db.models.TextField",
"django.db.migrations.RemoveField",
"django.db.models.CharField",
"django.db.migrations.DeleteModel",
"django.db.models.DateField"
] |
[((240, 303), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""article"""', 'name': '"""categories"""'}), "(model_name='article', name='categories')\n", (262, 303), False, 'from django.db import models, migrations\n'), ((348, 405), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""article"""', 'name': '"""name"""'}), "(model_name='article', name='name')\n", (370, 405), False, 'from django.db import models, migrations\n'), ((1190, 1229), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""Category"""'}), "(name='Category')\n", (1212, 1229), False, 'from django.db import models, migrations\n'), ((550, 593), 'django.db.models.CharField', 'models.CharField', ([], {'default': '""""""', 'max_length': '(50)'}), "(default='', max_length=50)\n", (566, 593), False, 'from django.db import models, migrations\n'), ((749, 793), 'django.db.models.CharField', 'models.CharField', ([], {'default': '""""""', 'max_length': '(100)'}), "(default='', max_length=100)\n", (765, 793), False, 'from django.db import models, migrations\n'), ((958, 997), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (974, 997), False, 'from django.db import models, migrations\n'), ((1125, 1169), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(500)', 'blank': '(True)'}), '(max_length=500, blank=True)\n', (1141, 1169), False, 'from django.db import models, migrations\n')]
|
import json
import os
import ray
import ray._private.test_utils as test_utils
from ray.util.placement_group import placement_group, remove_placement_group
import time
import tqdm
if "SMOKE_TEST" in os.environ:
MAX_PLACEMENT_GROUPS = 20
else:
MAX_PLACEMENT_GROUPS = 1000
def test_many_placement_groups():
# @ray.remote(num_cpus=1, resources={"node": 0.02})
@ray.remote
class C1:
def ping(self):
return "pong"
# @ray.remote(num_cpus=1)
@ray.remote
class C2:
def ping(self):
return "pong"
# @ray.remote(resources={"node": 0.02})
@ray.remote
class C3:
def ping(self):
return "pong"
bundle1 = {"node": 0.02, "CPU": 1}
bundle2 = {"CPU": 1}
bundle3 = {"node": 0.02}
pgs = []
for _ in tqdm.trange(MAX_PLACEMENT_GROUPS, desc="Creating pgs"):
pg = placement_group(bundles=[bundle1, bundle2, bundle3])
pgs.append(pg)
for pg in tqdm.tqdm(pgs, desc="Waiting for pgs to be ready"):
ray.get(pg.ready())
actors = []
for pg in tqdm.tqdm(pgs, desc="Scheduling tasks"):
actors.append(C1.options(placement_group=pg).remote())
actors.append(C2.options(placement_group=pg).remote())
actors.append(C3.options(placement_group=pg).remote())
not_ready = [actor.ping.remote() for actor in actors]
for _ in tqdm.trange(len(actors)):
ready, not_ready = ray.wait(not_ready)
assert ray.get(*ready) == "pong"
for pg in tqdm.tqdm(pgs, desc="Cleaning up pgs"):
remove_placement_group(pg)
def no_resource_leaks():
return ray.available_resources() == ray.cluster_resources()
ray.init(address="auto")
test_utils.wait_for_condition(no_resource_leaks)
monitor_actor = test_utils.monitor_memory_usage()
start_time = time.time()
test_many_placement_groups()
end_time = time.time()
ray.get(monitor_actor.stop_run.remote())
used_gb, usage = ray.get(monitor_actor.get_peak_memory_info.remote())
print(f"Peak memory usage: {round(used_gb, 2)}GB")
print(f"Peak memory usage per processes:\n {usage}")
del monitor_actor
test_utils.wait_for_condition(no_resource_leaks)
rate = MAX_PLACEMENT_GROUPS / (end_time - start_time)
print(f"Success! Started {MAX_PLACEMENT_GROUPS} pgs in "
f"{end_time - start_time}s. ({rate} pgs/s)")
if "TEST_OUTPUT_JSON" in os.environ:
out_file = open(os.environ["TEST_OUTPUT_JSON"], "w")
results = {
"pgs_per_second": rate,
"num_pgs": MAX_PLACEMENT_GROUPS,
"time": end_time - start_time,
"success": "1",
"_peak_memory": round(used_gb, 2),
"_peak_process_memory": usage
}
json.dump(results, out_file)
|
[
"ray._private.test_utils.monitor_memory_usage",
"ray.init",
"tqdm.tqdm",
"json.dump",
"ray.available_resources",
"tqdm.trange",
"ray.get",
"time.time",
"ray._private.test_utils.wait_for_condition",
"ray.util.placement_group.remove_placement_group",
"ray.util.placement_group.placement_group",
"ray.wait",
"ray.cluster_resources"
] |
[((1678, 1702), 'ray.init', 'ray.init', ([], {'address': '"""auto"""'}), "(address='auto')\n", (1686, 1702), False, 'import ray\n'), ((1704, 1752), 'ray._private.test_utils.wait_for_condition', 'test_utils.wait_for_condition', (['no_resource_leaks'], {}), '(no_resource_leaks)\n', (1733, 1752), True, 'import ray._private.test_utils as test_utils\n'), ((1769, 1802), 'ray._private.test_utils.monitor_memory_usage', 'test_utils.monitor_memory_usage', ([], {}), '()\n', (1800, 1802), True, 'import ray._private.test_utils as test_utils\n'), ((1816, 1827), 'time.time', 'time.time', ([], {}), '()\n', (1825, 1827), False, 'import time\n'), ((1868, 1879), 'time.time', 'time.time', ([], {}), '()\n', (1877, 1879), False, 'import time\n'), ((2113, 2161), 'ray._private.test_utils.wait_for_condition', 'test_utils.wait_for_condition', (['no_resource_leaks'], {}), '(no_resource_leaks)\n', (2142, 2161), True, 'import ray._private.test_utils as test_utils\n'), ((808, 862), 'tqdm.trange', 'tqdm.trange', (['MAX_PLACEMENT_GROUPS'], {'desc': '"""Creating pgs"""'}), "(MAX_PLACEMENT_GROUPS, desc='Creating pgs')\n", (819, 862), False, 'import tqdm\n'), ((968, 1018), 'tqdm.tqdm', 'tqdm.tqdm', (['pgs'], {'desc': '"""Waiting for pgs to be ready"""'}), "(pgs, desc='Waiting for pgs to be ready')\n", (977, 1018), False, 'import tqdm\n'), ((1079, 1118), 'tqdm.tqdm', 'tqdm.tqdm', (['pgs'], {'desc': '"""Scheduling tasks"""'}), "(pgs, desc='Scheduling tasks')\n", (1088, 1118), False, 'import tqdm\n'), ((1510, 1548), 'tqdm.tqdm', 'tqdm.tqdm', (['pgs'], {'desc': '"""Cleaning up pgs"""'}), "(pgs, desc='Cleaning up pgs')\n", (1519, 1548), False, 'import tqdm\n'), ((2663, 2691), 'json.dump', 'json.dump', (['results', 'out_file'], {}), '(results, out_file)\n', (2672, 2691), False, 'import json\n'), ((877, 929), 'ray.util.placement_group.placement_group', 'placement_group', ([], {'bundles': '[bundle1, bundle2, bundle3]'}), '(bundles=[bundle1, bundle2, bundle3])\n', (892, 929), False, 'from ray.util.placement_group import placement_group, remove_placement_group\n'), ((1434, 1453), 'ray.wait', 'ray.wait', (['not_ready'], {}), '(not_ready)\n', (1442, 1453), False, 'import ray\n'), ((1558, 1584), 'ray.util.placement_group.remove_placement_group', 'remove_placement_group', (['pg'], {}), '(pg)\n', (1580, 1584), False, 'from ray.util.placement_group import placement_group, remove_placement_group\n'), ((1623, 1648), 'ray.available_resources', 'ray.available_resources', ([], {}), '()\n', (1646, 1648), False, 'import ray\n'), ((1652, 1675), 'ray.cluster_resources', 'ray.cluster_resources', ([], {}), '()\n', (1673, 1675), False, 'import ray\n'), ((1469, 1484), 'ray.get', 'ray.get', (['*ready'], {}), '(*ready)\n', (1476, 1484), False, 'import ray\n')]
|
import dateutil.parser
from argdeco import arg, command, main
@arg("-d", "--date", help="pass some date")
def arg_date(value):
# here we can do some validations
try:
v = dateutil.parser.parse(value)
return v
except StandardError as e:
raise e
@command("check_date", arg_date)
def check_date(date):
print(date)
if __name__ == "__main__":
main()
|
[
"argdeco.arg",
"argdeco.command",
"argdeco.main"
] |
[((65, 107), 'argdeco.arg', 'arg', (['"""-d"""', '"""--date"""'], {'help': '"""pass some date"""'}), "('-d', '--date', help='pass some date')\n", (68, 107), False, 'from argdeco import arg, command, main\n'), ((283, 314), 'argdeco.command', 'command', (['"""check_date"""', 'arg_date'], {}), "('check_date', arg_date)\n", (290, 314), False, 'from argdeco import arg, command, main\n'), ((385, 391), 'argdeco.main', 'main', ([], {}), '()\n', (389, 391), False, 'from argdeco import arg, command, main\n')]
|
from itertools import islice
import pytest
@pytest.fixture(autouse=True)
def add_np(doctest_namespace):
doctest_namespace["islice"] = islice
|
[
"pytest.fixture"
] |
[((47, 75), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (61, 75), False, 'import pytest\n')]
|
#!/usr/local/bin/python3
import sys
import os
import argparse
import msprime
import tskit
# simulate tree under smc model
def wrap_msprime(sample_size, Ne, length, recombination_rate, model, outpath, repnum):
tree_sequence = msprime.simulate(
sample_size=sample_size, Ne=Ne, # note, it thinks diploid pop
length=length, recombination_rate=recombination_rate,
model=model)
# recursively make out directories
if not os.path.exists(outpath):
os.makedirs(outpath)
# write out simulations to tab delimited file
f = open(os.path.join(outpath, "msprimesim_"+repnum+".tab.txt"), "w")
for tree in tree_sequence.trees():
for u in tree.nodes():
print(tree.index, tree.interval, u, tree.parent(u), tree.time(u), sep="\t", file = f )
f.close()
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("--sample_size", required = True, type = int, help = "The number of sampled individual in the population. For more information, see msprime documentations")
parser.add_argument("--Ne", required = True, type = float, help = "Effective population size. For more information, see msprime documentations")
parser.add_argument("--length", required = True, type = float, help = "Length of chromsome. For more information, see msprime documentations")
parser.add_argument("--recombination_rate", required = True, type = float, help = "Rate per base per generation. For more information, see msprime documentations")
parser.add_argument("--model", required = True, type = str, help = " For more information, see msprime documentations")
parser.add_argument("--outpath", required = True, type = str, help = "Path for tab-delimited file for msprime output")
parser.add_argument("--replicates", required = True, type = int, help = "Number of model replicates to perform")
return parser.parse_args()
def main():
args = parse_args()
for i in range(0, args.replicates):
wrap_msprime(args.sample_size, args.Ne, args.length, args.recombination_rate, args.model, args.outpath, str(i))
return 0
if __name__== '__main__':
main()
|
[
"os.makedirs",
"argparse.ArgumentParser",
"os.path.exists",
"msprime.simulate",
"os.path.join"
] |
[((232, 351), 'msprime.simulate', 'msprime.simulate', ([], {'sample_size': 'sample_size', 'Ne': 'Ne', 'length': 'length', 'recombination_rate': 'recombination_rate', 'model': 'model'}), '(sample_size=sample_size, Ne=Ne, length=length,\n recombination_rate=recombination_rate, model=model)\n', (248, 351), False, 'import msprime\n'), ((842, 867), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (865, 867), False, 'import argparse\n'), ((447, 470), 'os.path.exists', 'os.path.exists', (['outpath'], {}), '(outpath)\n', (461, 470), False, 'import os\n'), ((480, 500), 'os.makedirs', 'os.makedirs', (['outpath'], {}), '(outpath)\n', (491, 500), False, 'import os\n'), ((564, 622), 'os.path.join', 'os.path.join', (['outpath', "('msprimesim_' + repnum + '.tab.txt')"], {}), "(outpath, 'msprimesim_' + repnum + '.tab.txt')\n", (576, 622), False, 'import os\n')]
|
# Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Integration test program for Subpar
Tests file extraction functionality (zip_safe=False)
"""
import os
import pkgutil
import sys
def main():
print('In extract.py main()')
# Test that imports are from real files on disk. Slightly tricky
# to test, since the 'subpar' package is imported before we
# extract and setup sys.path, so we can't "import subpar.test.something"
import extract_helper
assert os.path.isfile(extract_helper.__file__), (
extract_helper.__file__, sys.path)
import extract_helper_package
assert os.path.isfile(extract_helper_package.__file__), (
extract_helper_package.__file__, sys.path)
# Test resource extraction
dat = pkgutil.get_data('extract_helper_package', 'extract_dat.txt')
assert (dat == b'Dummy data file for extract.py\n'), dat
if __name__ == '__main__':
main()
|
[
"pkgutil.get_data",
"os.path.isfile"
] |
[((1028, 1067), 'os.path.isfile', 'os.path.isfile', (['extract_helper.__file__'], {}), '(extract_helper.__file__)\n', (1042, 1067), False, 'import os\n'), ((1159, 1206), 'os.path.isfile', 'os.path.isfile', (['extract_helper_package.__file__'], {}), '(extract_helper_package.__file__)\n', (1173, 1206), False, 'import os\n'), ((1303, 1364), 'pkgutil.get_data', 'pkgutil.get_data', (['"""extract_helper_package"""', '"""extract_dat.txt"""'], {}), "('extract_helper_package', 'extract_dat.txt')\n", (1319, 1364), False, 'import pkgutil\n')]
|
import json
class Config(object):
def __init__(self, config_file):
with open(config_file, 'r') as f:
config_dict = json.load(f)
self.__dict__.update(config_dict)
|
[
"json.load"
] |
[((140, 152), 'json.load', 'json.load', (['f'], {}), '(f)\n', (149, 152), False, 'import json\n')]
|
# This code has taken from link below.
# https://bitbucket.org/damienjadeduff/hashing_example/raw/master/hash_password.py
"""The below is a simple example of how you would use "hashing" to
compare passwords without having to save the password - you would-
save the "hash" of the password instead.
Written for the class BLG101E assignment 2."""
'''We need to import a function for generating hashes from byte
strings.'''
from hashlib import sha256
''' To make use of the above sha256 function we need to do a couple
of things. So we write our own function which takes a password and
returns a string consisting of a hash of the password.'''
def create_hash(password):
pw_bytestring = password.encode()
return sha256(pw_bytestring).hexdigest()
if __name__ == '__main__':
'''In the following example we get a password from the user, generate a
hash from it, then get another password and generate a hash, and
check if they are the same. But note that we do not compare the
passwords themselves - once we have the hashes we no longer need the
passwords.'''
pw1 = input('Please enter your password:')
hsh1 = create_hash(pw1)
print('The hash of that is', hsh1)
pw2 = input('Please enter another password that we can check against that:')
hsh2 = create_hash(pw2)
print('The hash of that is', hsh2)
if hsh1 == hsh2:
print('Those were the same passwords')
else:
print('Those were different passwords')
|
[
"hashlib.sha256"
] |
[((721, 742), 'hashlib.sha256', 'sha256', (['pw_bytestring'], {}), '(pw_bytestring)\n', (727, 742), False, 'from hashlib import sha256\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Nov 28 16:40:41 2020
<NAME>
<EMAIL>
BME Bogazici University
Istanbul / Uskudar
@author: abas
"""
import numpy as np
import pytorch_lightning as pl
import torch
import torchvision
from torch.utils.data import Dataset, DataLoader
from preprocess import transformation,scaler,normalizer,smoother
import pandas as pd
from scipy.fft import fft,ifft
class dataset(Dataset):
""" This function initializes the dataset.
Args:
path (string): Input path
gtpath (string,optional): Path for ground truths. Default to None
responseCol (string,optional): If ınput dataset has the response in it you can simply assign it to this value. Note: 0 for first, 1 for second column same as python. Take that into account
phase (str, optional): Defaults to 'train'.
preprocess (bool, optional): Switch for preprocess. Defaults to True.
smooth (bool, optional): Switch for smoothing. Defaults to True.
normalise (bool, optional): Switch for normalise. Defaults to True.
transform (bool, optional): Switch for yeo-johnson power transformation. Defaults to True.
"""
def __init__(self,path,gtpath=None,responseCol=-1,phase='train',preprocess=True,smooth=True,normalise=True,transform=True):
self.normalise=normalise
self.exc=pd.read_excel(path)
self.phase=phase
self.smooth=smooth
self.normalise=normalise
self.transform=transform
self.preprocess=preprocess
if gtpath is not None:
self.response=np.load(gtpath)
else:
self.response=np.array(self.exc.iloc[:,responseCol])
self.excarr=np.array(self.exc.drop(self.exc.columns[[responseCol]],axis=1))
if phase=='train':
self.excarr=np.array(self.exc)
if self.preprocess:
self.excarr=normalizer(self.excarr)
self.excarr=smoother(self.excarr)
self.excarr,self.scale=scaler(self.excarr)
self.excarr,self.transformater=transformation(self.excarr)
def __len__(self):
return len(self.excarr)
def __getitem__(self,idx=None):
spectrum=self.excarr[idx,:]
response=self.response[idx]
age=torch.tensor(self.exc.iloc[idx,1]).type(torch.float32)
return age,torch.tensor(spectrum).type(torch.float32).unsqueeze(0),torch.tensor(response).type(torch.long)
|
[
"preprocess.normalizer",
"numpy.load",
"preprocess.scaler",
"pandas.read_excel",
"preprocess.transformation",
"numpy.array",
"torch.tensor",
"preprocess.smoother"
] |
[((1431, 1450), 'pandas.read_excel', 'pd.read_excel', (['path'], {}), '(path)\n', (1444, 1450), True, 'import pandas as pd\n'), ((1661, 1676), 'numpy.load', 'np.load', (['gtpath'], {}), '(gtpath)\n', (1668, 1676), True, 'import numpy as np\n'), ((1717, 1756), 'numpy.array', 'np.array', (['self.exc.iloc[:, responseCol]'], {}), '(self.exc.iloc[:, responseCol])\n', (1725, 1756), True, 'import numpy as np\n'), ((1913, 1931), 'numpy.array', 'np.array', (['self.exc'], {}), '(self.exc)\n', (1921, 1931), True, 'import numpy as np\n'), ((2005, 2028), 'preprocess.normalizer', 'normalizer', (['self.excarr'], {}), '(self.excarr)\n', (2015, 2028), False, 'from preprocess import transformation, scaler, normalizer, smoother\n'), ((2057, 2078), 'preprocess.smoother', 'smoother', (['self.excarr'], {}), '(self.excarr)\n', (2065, 2078), False, 'from preprocess import transformation, scaler, normalizer, smoother\n'), ((2118, 2137), 'preprocess.scaler', 'scaler', (['self.excarr'], {}), '(self.excarr)\n', (2124, 2137), False, 'from preprocess import transformation, scaler, normalizer, smoother\n'), ((2185, 2212), 'preprocess.transformation', 'transformation', (['self.excarr'], {}), '(self.excarr)\n', (2199, 2212), False, 'from preprocess import transformation, scaler, normalizer, smoother\n'), ((2438, 2473), 'torch.tensor', 'torch.tensor', (['self.exc.iloc[idx, 1]'], {}), '(self.exc.iloc[idx, 1])\n', (2450, 2473), False, 'import torch\n'), ((2577, 2599), 'torch.tensor', 'torch.tensor', (['response'], {}), '(response)\n', (2589, 2599), False, 'import torch\n'), ((2521, 2543), 'torch.tensor', 'torch.tensor', (['spectrum'], {}), '(spectrum)\n', (2533, 2543), False, 'import torch\n')]
|
#!/usr/bin/env python3
# SPDX-FileCopyrightText: © 2022 Decompollaborate
# SPDX-License-Identifier: MIT
from __future__ import annotations
import dataclasses
import struct
# a.k.a. Sym (symbol)
@dataclasses.dataclass
class Elf32SymEntry:
name: int # word # 0x00
value: int # address # 0x04
size: int # word # 0x08
info: int # uchar # 0x0C
other: int # uchar # 0x0D
shndx: int # section # 0x0E
# 0x10
@property
def stBind(self):
return self.info >> 4
@property
def stType(self):
return self.info & 0xF
@staticmethod
def fromBytearray(array_of_bytes: bytearray, offset: int = 0) -> Elf32SymEntry:
entryFormat = ">IIIBBH"
unpacked = struct.unpack_from(entryFormat, array_of_bytes, offset)
return Elf32SymEntry(*unpacked)
class Elf32Syms:
def __init__(self, array_of_bytes: bytearray, offset: int, rawSize: int):
self.symbols: list[Elf32SymEntry] = list()
self.offset: int = offset
self.rawSize: int = rawSize
for i in range(rawSize // 0x10):
entry = Elf32SymEntry.fromBytearray(array_of_bytes, offset + i*0x10)
self.symbols.append(entry)
def __getitem__(self, key: int) -> Elf32SymEntry:
return self.symbols[key]
|
[
"struct.unpack_from"
] |
[((777, 832), 'struct.unpack_from', 'struct.unpack_from', (['entryFormat', 'array_of_bytes', 'offset'], {}), '(entryFormat, array_of_bytes, offset)\n', (795, 832), False, 'import struct\n')]
|
# -*- coding: utf-8 -*-
"""Console script for module_renamer."""
from __future__ import absolute_import
import sys
import click
from .commands.analyze_modifications import analyze_modifications
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
@click.group(context_settings=CONTEXT_SETTINGS)
def main():
"""
Console script for module_renamer.
"""
pass
@main.command()
@click.argument('project_path', type=click.Path(exists=True))
@click.option('--compare-with', default='master',
help='Branch to be compared with [Default: master]')
@click.option('--branch', default=False,
help='Branch that has the modifications [Default: current active branch]')
@click.option('--output-file', default='list_output.py',
help='Change the name of the output file [Default: list_output.py]')
def analyze(project_path, compare_with, branch, output_file):
"""
Generate the difference between the imports on two different branches.
Command to analyze all modifications made between two different branches. The output will be a
list written directly to a file (which will later be used by the script to rename the imports)
Ex.:
The following command generate a "list_output.py" with the difference between the
current branch (that contains the modification) against the master.
> renamer analyze project_path
It's possible to use the flag --branch to point to branch different than the current one.
It's possible to set --output-file to change the default output file name.
> renamer analyze project_path --branch=my-branch --output-file=my_file.py
And finally, it's possible to change the branch with which the modifications will be compared.
> renamer analyze project_path --branch=my-branch --compare-with=my-other-branch
"""
analyze_modifications(project_path, compare_with, branch, output_file)
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
[
"click.group",
"click.option",
"click.Path"
] |
[((261, 307), 'click.group', 'click.group', ([], {'context_settings': 'CONTEXT_SETTINGS'}), '(context_settings=CONTEXT_SETTINGS)\n', (272, 307), False, 'import click\n'), ((465, 571), 'click.option', 'click.option', (['"""--compare-with"""'], {'default': '"""master"""', 'help': '"""Branch to be compared with [Default: master]"""'}), "('--compare-with', default='master', help=\n 'Branch to be compared with [Default: master]')\n", (477, 571), False, 'import click\n'), ((582, 701), 'click.option', 'click.option', (['"""--branch"""'], {'default': '(False)', 'help': '"""Branch that has the modifications [Default: current active branch]"""'}), "('--branch', default=False, help=\n 'Branch that has the modifications [Default: current active branch]')\n", (594, 701), False, 'import click\n'), ((712, 841), 'click.option', 'click.option', (['"""--output-file"""'], {'default': '"""list_output.py"""', 'help': '"""Change the name of the output file [Default: list_output.py]"""'}), "('--output-file', default='list_output.py', help=\n 'Change the name of the output file [Default: list_output.py]')\n", (724, 841), False, 'import click\n'), ((439, 462), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (449, 462), False, 'import click\n')]
|
# -*- coding: utf-8 -*
import JackFramework as jf
from .Your_Model.inference import YourModelInterface
def model_zoo(args: object, name: str) -> object:
for case in jf.Switch(name):
if case('YourMode'):
jf.log.info("Enter the YourMode model")
model = YourModelInterface(args)
break
if case(''):
model = None
jf.log.error("The model's name is error!!!")
return model
|
[
"JackFramework.log.error",
"JackFramework.log.info",
"JackFramework.Switch"
] |
[((172, 187), 'JackFramework.Switch', 'jf.Switch', (['name'], {}), '(name)\n', (181, 187), True, 'import JackFramework as jf\n'), ((230, 269), 'JackFramework.log.info', 'jf.log.info', (['"""Enter the YourMode model"""'], {}), "('Enter the YourMode model')\n", (241, 269), True, 'import JackFramework as jf\n'), ((391, 435), 'JackFramework.log.error', 'jf.log.error', (['"""The model\'s name is error!!!"""'], {}), '("The model\'s name is error!!!")\n', (403, 435), True, 'import JackFramework as jf\n')]
|
#!/usr/bin/python
import sys
import os
import shutil
from readConfig import *
def get_stock_data_path():
config = ReadConfig()
path = config.find_path("config.ini")
config.__read__(path)
stock_path = config.get_stock("path")
if not os.path.exists(stock_path):
os.makedirs(stock_path)
return stock_path
def mkdir(directory):
"""
如果directory不存在则创建,如果存在删除该目录下所有内容
:param directory: 路径
:return:
"""
if os.path.exists(directory):
shutil.rmtree(directory)
os.makedirs(directory)
|
[
"shutil.rmtree",
"os.path.exists",
"os.makedirs"
] |
[((459, 484), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (473, 484), False, 'import os\n'), ((523, 545), 'os.makedirs', 'os.makedirs', (['directory'], {}), '(directory)\n', (534, 545), False, 'import os\n'), ((254, 280), 'os.path.exists', 'os.path.exists', (['stock_path'], {}), '(stock_path)\n', (268, 280), False, 'import os\n'), ((290, 313), 'os.makedirs', 'os.makedirs', (['stock_path'], {}), '(stock_path)\n', (301, 313), False, 'import os\n'), ((494, 518), 'shutil.rmtree', 'shutil.rmtree', (['directory'], {}), '(directory)\n', (507, 518), False, 'import shutil\n')]
|
from typing import Any, Type
from grapl_analyzerlib.analyzer import Analyzer, OneOrMany, A
from grapl_analyzerlib.counters import ParentChildCounter
from grapl_analyzerlib.execution import ExecutionHit
from grapl_analyzerlib.prelude import ProcessQuery, ProcessView, Not
from pydgraph import DgraphClient
class RareParentOfCmd(Analyzer):
def __init__(self, dgraph_client: DgraphClient, counter: ParentChildCounter):
super(RareParentOfCmd, self).__init__(dgraph_client)
self.counter = counter
@classmethod
def build(cls: Type[A], dgraph_client: DgraphClient) -> A:
counter = ParentChildCounter(dgraph_client)
return RareParentOfCmd(dgraph_client, counter)
def get_queries(self) -> OneOrMany[ProcessQuery]:
# TODO: We should be checking binary paths for these to ensure we handle impersonation
parent_whitelist = [
Not("svchost.exe"),
Not("RuntimeBroker.exe"),
Not("chrome.exe"),
Not("explorer.exe"),
Not("SIHClient.exe"),
Not("conhost.exe"),
Not("MpCmdRun.exe"),
Not("GoogleUpdateComRegisterShell64.exe"),
Not("GoogleUpdate.exe"),
Not("notepad.exe"),
Not("OneDrive.exe"),
Not("VBoxTray.exe"),
Not("Firefox Installer.exe"),
]
return (
ProcessQuery()
.with_process_name(eq=parent_whitelist)
.with_children(ProcessQuery().with_process_name(eq="cmd.exe"))
)
def on_response(self, response: ProcessView, output: Any) -> None:
count = self.counter.get_count_for(
parent_process_name=response.get_process_name(),
child_process_name="cmd.exe",
)
asset_id = response.get_asset().get_hostname()
if count <= 3:
output.send(
ExecutionHit(
analyzer_name="Rare Parent of cmd.exe",
node_view=response,
risk_score=10,
lenses=[("hostname", asset_id)],
)
)
|
[
"grapl_analyzerlib.prelude.Not",
"grapl_analyzerlib.counters.ParentChildCounter",
"grapl_analyzerlib.prelude.ProcessQuery",
"grapl_analyzerlib.execution.ExecutionHit"
] |
[((614, 647), 'grapl_analyzerlib.counters.ParentChildCounter', 'ParentChildCounter', (['dgraph_client'], {}), '(dgraph_client)\n', (632, 647), False, 'from grapl_analyzerlib.counters import ParentChildCounter\n'), ((894, 912), 'grapl_analyzerlib.prelude.Not', 'Not', (['"""svchost.exe"""'], {}), "('svchost.exe')\n", (897, 912), False, 'from grapl_analyzerlib.prelude import ProcessQuery, ProcessView, Not\n'), ((926, 950), 'grapl_analyzerlib.prelude.Not', 'Not', (['"""RuntimeBroker.exe"""'], {}), "('RuntimeBroker.exe')\n", (929, 950), False, 'from grapl_analyzerlib.prelude import ProcessQuery, ProcessView, Not\n'), ((964, 981), 'grapl_analyzerlib.prelude.Not', 'Not', (['"""chrome.exe"""'], {}), "('chrome.exe')\n", (967, 981), False, 'from grapl_analyzerlib.prelude import ProcessQuery, ProcessView, Not\n'), ((995, 1014), 'grapl_analyzerlib.prelude.Not', 'Not', (['"""explorer.exe"""'], {}), "('explorer.exe')\n", (998, 1014), False, 'from grapl_analyzerlib.prelude import ProcessQuery, ProcessView, Not\n'), ((1028, 1048), 'grapl_analyzerlib.prelude.Not', 'Not', (['"""SIHClient.exe"""'], {}), "('SIHClient.exe')\n", (1031, 1048), False, 'from grapl_analyzerlib.prelude import ProcessQuery, ProcessView, Not\n'), ((1062, 1080), 'grapl_analyzerlib.prelude.Not', 'Not', (['"""conhost.exe"""'], {}), "('conhost.exe')\n", (1065, 1080), False, 'from grapl_analyzerlib.prelude import ProcessQuery, ProcessView, Not\n'), ((1094, 1113), 'grapl_analyzerlib.prelude.Not', 'Not', (['"""MpCmdRun.exe"""'], {}), "('MpCmdRun.exe')\n", (1097, 1113), False, 'from grapl_analyzerlib.prelude import ProcessQuery, ProcessView, Not\n'), ((1127, 1168), 'grapl_analyzerlib.prelude.Not', 'Not', (['"""GoogleUpdateComRegisterShell64.exe"""'], {}), "('GoogleUpdateComRegisterShell64.exe')\n", (1130, 1168), False, 'from grapl_analyzerlib.prelude import ProcessQuery, ProcessView, Not\n'), ((1182, 1205), 'grapl_analyzerlib.prelude.Not', 'Not', (['"""GoogleUpdate.exe"""'], {}), "('GoogleUpdate.exe')\n", (1185, 1205), False, 'from grapl_analyzerlib.prelude import ProcessQuery, ProcessView, Not\n'), ((1219, 1237), 'grapl_analyzerlib.prelude.Not', 'Not', (['"""notepad.exe"""'], {}), "('notepad.exe')\n", (1222, 1237), False, 'from grapl_analyzerlib.prelude import ProcessQuery, ProcessView, Not\n'), ((1251, 1270), 'grapl_analyzerlib.prelude.Not', 'Not', (['"""OneDrive.exe"""'], {}), "('OneDrive.exe')\n", (1254, 1270), False, 'from grapl_analyzerlib.prelude import ProcessQuery, ProcessView, Not\n'), ((1284, 1303), 'grapl_analyzerlib.prelude.Not', 'Not', (['"""VBoxTray.exe"""'], {}), "('VBoxTray.exe')\n", (1287, 1303), False, 'from grapl_analyzerlib.prelude import ProcessQuery, ProcessView, Not\n'), ((1317, 1345), 'grapl_analyzerlib.prelude.Not', 'Not', (['"""Firefox Installer.exe"""'], {}), "('Firefox Installer.exe')\n", (1320, 1345), False, 'from grapl_analyzerlib.prelude import ProcessQuery, ProcessView, Not\n'), ((1889, 2013), 'grapl_analyzerlib.execution.ExecutionHit', 'ExecutionHit', ([], {'analyzer_name': '"""Rare Parent of cmd.exe"""', 'node_view': 'response', 'risk_score': '(10)', 'lenses': "[('hostname', asset_id)]"}), "(analyzer_name='Rare Parent of cmd.exe', node_view=response,\n risk_score=10, lenses=[('hostname', asset_id)])\n", (1901, 2013), False, 'from grapl_analyzerlib.execution import ExecutionHit\n'), ((1481, 1495), 'grapl_analyzerlib.prelude.ProcessQuery', 'ProcessQuery', ([], {}), '()\n', (1493, 1495), False, 'from grapl_analyzerlib.prelude import ProcessQuery, ProcessView, Not\n'), ((1387, 1401), 'grapl_analyzerlib.prelude.ProcessQuery', 'ProcessQuery', ([], {}), '()\n', (1399, 1401), False, 'from grapl_analyzerlib.prelude import ProcessQuery, ProcessView, Not\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@author: lry
"""
import scipy.io as sio
import numpy as np
sep = [20,40,61,103] # please pay attention to this seperation , setted according to the spectral response function of different sensors.
# setting global parameters
patch_size = 16
patch_size1 = 16
patch_size2 = 16
patch_num = 2
batch_size = patch_size*patch_size*patch_num
gap = 12 # the interval between two adjacent patches, must smaller than 'patch_size'
sigmaInit = 0.01
lastTrain = 0 # go on the former training
Pretrain = 500 # the number of iteration for pretraining
Maxiter = 3000 # the max iterations for training # try 2000 3000 4000
step = 100 # save the model every "step" iterations
learning_rate = 0.0001
max_grad_norm = 0.1
# saving path
path = './result_fusion'
filename = "../processed_data/.."
print("Loading data")
data = sio.loadmat(filename)
Xl_3d = data['xl']
Xl_bicubic = data['xl_bicubic'] # this is the bibubic-interpolated xl image acquired with matlab
Xg_3d = data['xg']
scale = data['scale'][0][0]
Trans_data = data['P']
N1,N2,dimX = data['xh'].shape
s1,s2 = data['xl'].shape[0], data['xl'].shape[1]
dimXg = Xg_3d.shape[2]
Xl_2d = np.reshape(Xl_3d,[-1,dimX])
num = s1*s2 # the number of low-resolution pixels
f2_1 = 9 # 1D filter size
f3_1 = 5 # 2D filter size
hidden_size_local = 30
hidden_size_global = 20
gener_hidden_size = 20
enc_q = enc_k = hidden_size_global * 2
enc_v = hidden_size_global * 2
enc_k_z = enc_v_z = 30
dec_q = 30
filter_num_1 = 20
filter_num_2 = 20
filter_num_3 = hidden_size_global*2*dimXg #### hidden_size_global*2*dimXg
f_1 = 5
f_2 = 3
f_3 = 1
H3_1 = dimX
dimZ = dimXg*hidden_size_global # dimension of z
|
[
"numpy.reshape",
"scipy.io.loadmat"
] |
[((935, 956), 'scipy.io.loadmat', 'sio.loadmat', (['filename'], {}), '(filename)\n', (946, 956), True, 'import scipy.io as sio\n'), ((1262, 1291), 'numpy.reshape', 'np.reshape', (['Xl_3d', '[-1, dimX]'], {}), '(Xl_3d, [-1, dimX])\n', (1272, 1291), True, 'import numpy as np\n')]
|
import warnings
warnings.warn(
"pyschema.contrib.luigi is deprecated and will be removed.\n"
"Please use the pyschema_extensions.luigi package instead.",
DeprecationWarning,
stacklevel=2
)
import pyschema_extensions.luigi
from pyschema_extensions.luigi import *
|
[
"warnings.warn"
] |
[((17, 197), 'warnings.warn', 'warnings.warn', (['"""pyschema.contrib.luigi is deprecated and will be removed.\nPlease use the pyschema_extensions.luigi package instead."""', 'DeprecationWarning'], {'stacklevel': '(2)'}), '(\n """pyschema.contrib.luigi is deprecated and will be removed.\nPlease use the pyschema_extensions.luigi package instead."""\n , DeprecationWarning, stacklevel=2)\n', (30, 197), False, 'import warnings\n')]
|
import threading
import socketserver
import argparse
import time
class ThreadedUDPRequestHandler(socketserver.BaseRequestHandler):
def handle(self):
data = self.request[0].strip()
socket = self.request[1]
print(f'[{time.time()}] Incoming: {data}')
reply = data.upper()
print(f'[{time.time()}] Outgoing: {reply}')
socket.sendto(reply, self.client_address)
class ThreadedUDPServer(socketserver.ThreadingMixIn, socketserver.UDPServer):
pass
class UdpServerMock:
def __init__(self, host, port):
self._server = ThreadedUDPServer((host, port),
ThreadedUDPRequestHandler)
# Start a thread with the server -- that thread will then start one
# more thread for each request
self._server_thread = threading.Thread(
target=self._server.serve_forever)
# Exit the server thread when the main thread terminates
self._server_thread.daemon = True
self._server_thread.start()
def shutdown(self):
self._server.shutdown()
self._server_thread.join()
def main():
parser = argparse.ArgumentParser(description='UDP Server Mock.')
parser.add_argument('host_port', type=int, help='Host port')
parser.add_argument('--host_ip', type=str, help='Host ip')
args = parser.parse_args()
UdpServerMock(args.host_ip or '127.0.0.1', args.host_port)
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
print('Mamba UDP Server Mock Finalized')
break
if __name__ == "__main__":
main()
|
[
"threading.Thread",
"time.time",
"argparse.ArgumentParser",
"time.sleep"
] |
[((1157, 1212), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""UDP Server Mock."""'}), "(description='UDP Server Mock.')\n", (1180, 1212), False, 'import argparse\n'), ((830, 881), 'threading.Thread', 'threading.Thread', ([], {'target': 'self._server.serve_forever'}), '(target=self._server.serve_forever)\n', (846, 881), False, 'import threading\n'), ((1479, 1492), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1489, 1492), False, 'import time\n'), ((246, 257), 'time.time', 'time.time', ([], {}), '()\n', (255, 257), False, 'import time\n'), ((328, 339), 'time.time', 'time.time', ([], {}), '()\n', (337, 339), False, 'import time\n')]
|
# assign random tasks to team member 1.
import random
import sys
# Prior information
n, m, d, r = list(map(int, input().split()))
task_difficulty = []
for i in range(n):
task_difficulty.append(list(map(int, input().split())))
task_dependency = [[] for _ in range(n)]
for i in range(r):
temp = list(map(int, input().split()))
task_dependency[temp[1] - 1].append(temp[0] - 1)
# -1: not started
# 0: started
# 1: completed
task_status = [-1] * n
# -1: not assigned
# k: assigned task k (1 <= k <= N)
member_status = -1
day = 0
while True:
day += 1
output = [0]
# random search for tasks
if member_status < 0:
tasklist = list(range(n))
random.shuffle(tasklist)
for task in tasklist:
if task_status[task] != -1:
continue
ok = True
for necessary in task_dependency[task]:
if task_status[necessary] != 1:
# the dependent tasks have not been completed
ok = False
break
if ok:
# assign the task to team member 1
task_status[task] = 0
member_status = task
output = [1, 1, task + 1]
break
str_output = map(str, output)
print(" ".join(str_output))
# After the output, you have to flush Standard Output
sys.stdout.flush()
temp = list(map(int, input().split()))
if len(temp) == 1:
if temp[0] == -1:
# elapsed days == 2000, or all the tasks have been completed
exit()
else:
# no change in state
pass
else:
# one task has been completed
task = member_status
task_status[task] = 1
member_status = -1
|
[
"random.shuffle",
"sys.stdout.flush"
] |
[((1428, 1446), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1444, 1446), False, 'import sys\n'), ((707, 731), 'random.shuffle', 'random.shuffle', (['tasklist'], {}), '(tasklist)\n', (721, 731), False, 'import random\n')]
|
from tensorflow.keras.preprocessing.image import ImageDataGenerator
import os
def generator(BS, data_path, npix):
'''
Image Data Generator from Directory;
Given the path to the train, validation, test
directories located in data_path
preprocesses the images and the
target masks in batch sizes of BS.
'''
train_datagen = ImageDataGenerator(
rescale=1./255,
rotation_range = 45,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,
fill_mode = "nearest")
train_maskgen = ImageDataGenerator(
rescale=1.,
rotation_range = 45,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,
fill_mode = "nearest")
val_datagen = ImageDataGenerator(rescale=1./255)
val_maskgen = ImageDataGenerator(rescale=1.)
test_datagen = ImageDataGenerator(rescale=1./255)
test_maskgen = ImageDataGenerator(rescale=1.)
train_image_generator = train_datagen.flow_from_directory(
os.path.join(data_path, 'train/images/pre/'),
class_mode=None,
target_size = (npix, npix),
seed = 12345,
batch_size = BS)
train_mask_generator = train_maskgen.flow_from_directory(
os.path.join(data_path, 'train/targets/pre/'),
class_mode=None,
target_size = (npix, npix),
color_mode = "grayscale",
seed = 12345,
batch_size = BS)
val_image_generator = val_datagen.flow_from_directory(
os.path.join(data_path, 'test/images/pre/'),
class_mode=None,
target_size = (npix, npix),
seed = 123,
batch_size = BS)
val_mask_generator = val_maskgen.flow_from_directory(
os.path.join(data_path, 'test/targets/pre2/'),
class_mode=None,
target_size = (npix, npix),
color_mode = "grayscale",
seed = 123,
batch_size = BS)
test_image_generator = val_datagen.flow_from_directory(
os.path.join(data_path, 'hold/images/pre/'),
class_mode=None,
target_size = (npix, npix),
seed = 123,
batch_size = BS)
test_mask_generator = val_maskgen.flow_from_directory(
os.path.join(data_path, 'hold/targets/pre2/'),
class_mode=None,
target_size = (npix, npix),
color_mode = "grayscale",
seed = 123,
batch_size = BS)
train_generator = zip(train_image_generator, train_mask_generator)
valid_generator = zip(val_image_generator, val_mask_generator)
test_generator = zip(test_image_generator, test_mask_generator)
return train_generator, valid_generator, test_generator
|
[
"tensorflow.keras.preprocessing.image.ImageDataGenerator",
"os.path.join"
] |
[((343, 479), 'tensorflow.keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rescale': '(1.0 / 255)', 'rotation_range': '(45)', 'shear_range': '(0.2)', 'zoom_range': '(0.2)', 'horizontal_flip': '(True)', 'fill_mode': '"""nearest"""'}), "(rescale=1.0 / 255, rotation_range=45, shear_range=0.2,\n zoom_range=0.2, horizontal_flip=True, fill_mode='nearest')\n", (361, 479), False, 'from tensorflow.keras.preprocessing.image import ImageDataGenerator\n'), ((554, 684), 'tensorflow.keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rescale': '(1.0)', 'rotation_range': '(45)', 'shear_range': '(0.2)', 'zoom_range': '(0.2)', 'horizontal_flip': '(True)', 'fill_mode': '"""nearest"""'}), "(rescale=1.0, rotation_range=45, shear_range=0.2,\n zoom_range=0.2, horizontal_flip=True, fill_mode='nearest')\n", (572, 684), False, 'from tensorflow.keras.preprocessing.image import ImageDataGenerator\n'), ((752, 789), 'tensorflow.keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rescale': '(1.0 / 255)'}), '(rescale=1.0 / 255)\n', (770, 789), False, 'from tensorflow.keras.preprocessing.image import ImageDataGenerator\n'), ((803, 834), 'tensorflow.keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rescale': '(1.0)'}), '(rescale=1.0)\n', (821, 834), False, 'from tensorflow.keras.preprocessing.image import ImageDataGenerator\n'), ((852, 889), 'tensorflow.keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rescale': '(1.0 / 255)'}), '(rescale=1.0 / 255)\n', (870, 889), False, 'from tensorflow.keras.preprocessing.image import ImageDataGenerator\n'), ((904, 935), 'tensorflow.keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rescale': '(1.0)'}), '(rescale=1.0)\n', (922, 935), False, 'from tensorflow.keras.preprocessing.image import ImageDataGenerator\n'), ((1023, 1067), 'os.path.join', 'os.path.join', (['data_path', '"""train/images/pre/"""'], {}), "(data_path, 'train/images/pre/')\n", (1035, 1067), False, 'import os\n'), ((1326, 1371), 'os.path.join', 'os.path.join', (['data_path', '"""train/targets/pre/"""'], {}), "(data_path, 'train/targets/pre/')\n", (1338, 1371), False, 'import os\n'), ((1670, 1713), 'os.path.join', 'os.path.join', (['data_path', '"""test/images/pre/"""'], {}), "(data_path, 'test/images/pre/')\n", (1682, 1713), False, 'import os\n'), ((1956, 2001), 'os.path.join', 'os.path.join', (['data_path', '"""test/targets/pre2/"""'], {}), "(data_path, 'test/targets/pre2/')\n", (1968, 2001), False, 'import os\n'), ((2289, 2332), 'os.path.join', 'os.path.join', (['data_path', '"""hold/images/pre/"""'], {}), "(data_path, 'hold/images/pre/')\n", (2301, 2332), False, 'import os\n'), ((2576, 2621), 'os.path.join', 'os.path.join', (['data_path', '"""hold/targets/pre2/"""'], {}), "(data_path, 'hold/targets/pre2/')\n", (2588, 2621), False, 'import os\n')]
|
try: import ijson.backends.yajl2_cffi as ijson
except: import ijson
from datetime import datetime, date, timedelta
from collections import OrderedDict
from bs4 import BeautifulSoup
from warnings import warn
from copy import copy
from urllib.request import urlopen
import argparse
import requests
import zipfile
import json
import math
import time
import csv
import re
import io
import os
__title__ = "TokyoGTFS: Buses-GTFS"
__author__ = "<NAME>"
__email__ = "<EMAIL>"
__license__ = "CC BY 4.0"
GTFS_HEADERS = {
"agency.txt": ["agency_id", "agency_name", "agency_url", "agency_timezone", "agency_lang"],
"stops.txt": ["stop_id", "stop_name", "stop_code", "stop_lat", "stop_lon", "zone_id"],
"routes.txt": ["agency_id", "route_id", "route_short_name", "route_long_name", "route_type", "route_color", "route_text_color"],
"trips.txt": ["route_id", "trip_id", "service_id", "trip_headsign", "trip_pattern_id", "wheelchair_accessible"],
"stop_times.txt": ["trip_id", "stop_sequence", "stop_id", "arrival_time", "departure_time", "pickup_type", "drop_off_type"],
"calendar_dates.txt": ["service_id", "date", "exception_type"],
#"fare_attributes.txt": ["agency_id", "fare_id", "price", "currency_type", "payment_method", "transfers"],
#"fare_rules.txt": ["fare_id", "contains_id"],
"translations.txt": ["trans_id", "lang", "translation"]
}
BUILT_IN_CALENDARS = {"Weekday", "SaturdayHoliday", "Holiday", "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"}
def _text_color(route_color: str):
"""Calculate if route_text_color should be white or black"""
# This isn't perfect, but works for what we're doing
red, green, blue = int(route_color[:2], base=16), int(route_color[2:4], base=16), int(route_color[4:6], base=16)
yiq = 0.299 * red + 0.587 * green + 0.114 * blue
if yiq > 128: return "000000"
else: return "FFFFFF"
def _holidays(year):
request = requests.get("https://www.officeholidays.com/countries/japan/{}.php".format(year), timeout=30)
soup = BeautifulSoup(request.text, "html.parser")
holidays = {datetime.strptime(h.find("time").string, "%Y-%m-%d").date() for h in soup.find_all("tr", class_="holiday")}
return holidays
class _Time:
"Represent a time value"
def __init__(self, seconds):
self.m, self.s = divmod(int(seconds), 60)
self.h, self.m = divmod(self.m, 60)
def __str__(self):
"Return GTFS-compliant string representation of time"
return ":".join(["0" + i if len(i) == 1 else i for i in map(str, [self.h, self.m, self.s])])
def __repr__(self): return "<Time " + self.__str__() + ">"
def __int__(self): return self.h * 3600 + self.m * 60 + self.s
def __add__(self, other): return _Time(self.__int__() + int(other))
def __sub__(self, other): return self.__int__() - int(other)
def __lt__(self, other): return self.__int__() < int(other)
def __le__(self, other): return self.__int__() <= int(other)
def __gt__(self, other): return self.__int__() > int(other)
def __ge__(self, other): return self.__int__() >= int(other)
def __eq__(self, other): return self.__int__() == int(other)
def __ne__(self, other): return self.__int__() != int(other)
@classmethod
def from_str(cls, string):
str_split = list(map(int, string.split(":")))
if len(str_split) == 2:
return cls(str_split[0]*3600 + str_split[1]*60)
elif len(str_split) == 3:
return cls(str_split[0]*3600 + str_split[1]*60 + str_split[2])
else:
raise ValueError("invalid string for _Time.from_str(), {} (should be HH:MM or HH:MM:SS)".format(string))
class BusesParser:
def __init__(self, apikey, verbose=True):
self.apikey = apikey
self.verbose = verbose
self.valid_stops = set()
self.stop_names = {}
self.pattern_map = {}
self.english_strings = {}
self.carmel_to_title = lambda i: re.sub(r"(?!^)([A-Z][a-z]+)", r" \1", i)
# Clean gtfs/ directory
if not os.path.exists("gtfs"): os.mkdir("gtfs")
for file in os.listdir("gtfs"): os.remove("gtfs/" + file)
# Get info on which routes to parse
self.operators = OrderedDict()
with open("data/bus_data.csv", mode="r", encoding="utf8", newline="") as buffer:
reader = csv.DictReader(buffer)
for row in reader:
if row["route_timetables_available"] != "1": continue # Ignores agencies without BusTimetables
self.operators[row["operator"]] = (row["color"].upper(), _text_color(row["color"]))
# Calendars
self.startdate = date.today()
self.enddate = self.startdate + timedelta(days=180)
self.used_calendars = OrderedDict()
def _legal_calendars(self):
calendars = requests.get("http://api-tokyochallenge.odpt.org/api/v4/odpt:Calendar.json", params={"acl:consumerKey": self.apikey}, timeout=30, stream=True)
calendars.raise_for_status()
calendars = ijson.items(calendars.raw, "item")
valid_calendars = set()
for calendar in calendars:
calendar_id = calendar["owl:sameAs"].split(":")[1]
if calendar_id in BUILT_IN_CALENDARS:
valid_calendars.add(calendar_id)
elif "odpt:day" in calendar and calendar["odpt:day"] != []:
dates = [datetime.strptime(i, "%Y-%m-%d").date() for i in calendar["odpt:day"]]
if min(dates) <= self.enddate and max(dates) >= self.startdate:
valid_calendars.add(calendar_id)
else:
warn("\033[1mno dates defined for calendar {}\033[0m".format(calendar_id))
calendars.close()
return valid_calendars
def agencies(self):
buffer = open("gtfs/agency.txt", mode="w", encoding="utf8", newline="")
writer = csv.DictWriter(buffer, GTFS_HEADERS["agency.txt"], extrasaction="ignore")
writer.writeheader()
with open("data/operators.csv", mode="r", encoding="utf8", newline="") as add_info_buff:
additional_info = {i["operator"]: i for i in csv.DictReader(add_info_buff)}
# Iterate over agencies
for operator in self.operators.keys():
# Get data fro moperators.csv
operator_data = additional_info.get(operator, {})
if not operator_data: warn("\033[1mno data defined for operator {}\033[0m".format(operator))
# Translations
if "name_en" in operator_data:
self.english_strings[operator_data["name"]] = operator_data["name_en"]
# Write to agency.txt
writer.writerow({
"agency_id": operator,
"agency_name": operator_data.get("name", operator),
"agency_url": operator_data.get("website", ""),
"agency_timezone": "Asia/Tokyo", "agency_lang": "ja"
})
buffer.close()
def feed_info(self):
with open(os.path.join("gtfs", "feed_info.txt"), mode="w", encoding="utf8", newline="") as file_buff:
file_wrtr = csv.writer(file_buff)
file_wrtr.writerow(["feed_publisher_name", "feed_publisher_url", "feed_lang"])
file_wrtr.writerow([
"<NAME> (via TokyoGTFS); Data provded by Open Data Challenge for Public Transportation in Tokyo",
"https://github.com/MKuranowski/TokyoGTFS",
"ja"
])
def stops(self):
"""Parse stops"""
# Get list of stops
stops = requests.get("http://api-tokyochallenge.odpt.org/api/v4/odpt:BusstopPole.json", params={"acl:consumerKey": self.apikey}, timeout=30, stream=True)
stops.raise_for_status()
stops = ijson.items(stops.raw, "item")
# Open files
buffer = open("gtfs/stops.txt", mode="w", encoding="utf8", newline="")
writer = csv.DictWriter(buffer, GTFS_HEADERS["stops.txt"], extrasaction="ignore")
writer.writeheader()
broken_stops_buff = open("broken_stops.csv", mode="w", encoding="utf8", newline="")
broken_stops_wrtr = csv.writer(broken_stops_buff)
broken_stops_wrtr.writerow(["stop_id", "stop_name", "stop_name_en", "stop_code"])
# Iterate over stops
for stop in stops:
stop_id = stop["owl:sameAs"].split(":")[1]
stop_code = stop.get("odpt:busstopPoleNumber", "")
stop_name = stop["dc:title"]
stop_name_en = self.carmel_to_title(stop_id.split(".")[1])
if self.verbose: print("\033[1A\033[KParsing stops:", stop_id)
self.stop_names[stop_id] = stop_name
# Stop name translation
if stop_name_en: self.english_strings[stop_name] = stop_name_en
# Stop operators
if type(stop["odpt:operator"]) is list:
operators = [i.split(":")[1] for i in stop["odpt:operator"]]
else:
operators = [stop["odpt:operator"].split(":")[1]]
# Ignore stops that belong to ignored agencies
if not set(operators).intersection(self.operators):
continue
# Correct stop position
if "geo:lat" in stop and "geo:long" in stop:
stop_lat = stop["geo:lat"]
stop_lon = stop["geo:long"]
# Output to GTFS or to incorrect stops
if stop_lat and stop_lon:
self.valid_stops.add(stop_id)
writer.writerow({
"stop_id": stop_id, "stop_code": stop_code, "zone_id": stop_id,
"stop_name": stop_name, "stop_lat": stop_lat, "stop_lon": stop_lon,
})
else:
broken_stops_wrtr.writerow([stop_id, stop_name, stop_name_en, stop_code])
stops.close()
buffer.close()
def routes(self):
patterns = requests.get("http://api-tokyochallenge.odpt.org/api/v4/odpt:BusroutePattern.json", params={"acl:consumerKey": self.apikey}, timeout=30, stream=True)
patterns.raise_for_status()
patterns = ijson.items(patterns.raw, "item")
buffer = open("gtfs/routes.txt", mode="w", encoding="utf8", newline="")
writer = csv.DictWriter(buffer, GTFS_HEADERS["routes.txt"], extrasaction="ignore")
writer.writeheader()
self.parsed_routes = set()
for pattern in patterns:
pattern_id = pattern["owl:sameAs"].split(":")[1]
if type(pattern["odpt:operator"]) is list: operator = pattern["odpt:operator"][0].split(":")[1]
else: operator = pattern["odpt:operator"].split(":")[1]
if operator not in self.operators: continue
if self.verbose: print("\033[1A\033[KParsing route patterns:", pattern_id)
# Get route_id
if "odpt:busroute" in pattern:
route_id = pattern["odpt:busroute"].split(":")[1]
else:
if operator == "JRBusKanto":
route_id = operator + "." + \
pattern_id.split(".")[1] + "." + \
pattern_id.split(".")[2]
else:
route_id = operator + "." + pattern_id.split(".")[1]
# Map pattern → route_id, as BusTimetable references patterns instead of routes
self.pattern_map[pattern_id] = route_id
# Get color from bus_colors.csv
route_code = pattern["dc:title"].split(" ")[0] # Toei appends direction to BusroutePattern's dc:title
route_color, route_text = self.operators[operator]
# Output to GTFS
if route_id not in self.parsed_routes:
self.parsed_routes.add(route_id)
writer.writerow({
"agency_id": operator,
"route_id": route_id,
"route_short_name": route_code,
"route_type": 3,
"route_color": route_color,
"route_text_color": route_text
})
patterns.close()
buffer.close()
def trips(self):
"""Parse trips & stop_times"""
# Some variables
available_calendars = self._legal_calendars()
# Get all trips
trips = requests.get("http://api-tokyochallenge.odpt.org/api/v4/odpt:BusTimetable.json", params={"acl:consumerKey": self.apikey}, timeout=90, stream=True)
trips.raise_for_status()
trips = ijson.items(trips.raw, "item")
# Open GTFS trips
buffer_trips = open("gtfs/trips.txt", mode="w", encoding="utf8", newline="")
writer_trips = csv.DictWriter(buffer_trips, GTFS_HEADERS["trips.txt"], extrasaction="ignore")
writer_trips.writeheader()
buffer_times = open("gtfs/stop_times.txt", mode="w", encoding="utf8", newline="")
writer_times = csv.DictWriter(buffer_times, GTFS_HEADERS["stop_times.txt"], extrasaction="ignore")
writer_times.writeheader()
# Iteratr over trips
for trip in trips:
operator = trip["odpt:operator"].split(":")[1]
pattern_id = trip["odpt:busroutePattern"].split(":")[1]
# Get route_id
if pattern_id in self.pattern_map:
route_id = self.pattern_map[pattern_id]
else:
if operator == "JRBusKanto":
route_id = operator + "." + \
pattern_id.split(".")[1] + "." + \
pattern_id.split(".")[2]
else:
route_id = operator + "." + pattern_id.split(".")[1]
trip_id = trip["owl:sameAs"].split(":")[1]
calendar = trip["odpt:calendar"].split(":")[1]
service_id = route_id + "/" + calendar
if self.verbose: print("\033[1A\033[KParsing times:", trip_id)
# Ignore non-parsed routes and non_active calendars
if operator not in self.operators:
continue
if route_id not in self.parsed_routes:
warn("\033[1mno route for pattern {}\033[0m".format(pattern_id))
continue
if calendar not in available_calendars:
continue
# Add calendar
if route_id not in self.used_calendars: self.used_calendars[route_id] = set()
self.used_calendars[route_id].add(calendar)
# Ignore one-stop trips
if len(trip["odpt:busTimetableObject"]) < 2:
continue
# Bus headsign
headsigns = [i["odpt:destinationSign"] for i in trip["odpt:busTimetableObject"] if i.get("odpt:destinationSign") != None]
if headsigns:
trip_headsign = headsigns[0]
else:
last_stop_id = trip["odpt:busTimetableObject"][-1]["odpt:busstopPole"].split(":")[1]
if last_stop_id in self.stop_names:
trip_headsign = self.stop_names[last_stop_id]
else:
trip_headsign = re.sub(r"(?!^)([A-Z][a-z]+)", r" \1", last_stop_id.split(".")[1])
warn("\033[1mno name for stop {}\033[0m".format(last_stop_id))
self.stop_names[last_stop_id] = trip_headsign
trip_headsign_en = self.english_strings.get(trip_headsign, "")
# Non-step bus (wheelchair accesibility)
if any([i.get("odpt:isNonStepBus") == False for i in trip["odpt:busTimetableObject"]]):
wheelchair = "2"
elif any([i.get("odpt:isNonStepBus") == True for i in trip["odpt:busTimetableObject"]]):
wheelchair = "1"
else:
wheelchair = "0"
# Do we start after midnight?
prev_departure = _Time(0)
if trip["odpt:busTimetableObject"][0].get("odpt:isMidnight", False):
first_time = trip["odpt:busTimetableObject"][0].get("odpt:departureTime") or \
trip["odpt:busTimetableObject"][0].get("odpt:arrivalTime")
# If that's a night bus, and the trip starts before 6 AM
# Add 24h to departure, as the trip starts "after-midnight"
if int(first_time.split(":")[0]) < 6: prev_departure = _Time(86400)
# Filter stops to include only active stops
trip["odpt:busTimetableObject"] = sorted([
i for i in trip["odpt:busTimetableObject"]
if i["odpt:busstopPole"].split(":")[1] in self.valid_stops
], key=lambda i: i["odpt:index"])
# Ignore trips with less then 1 stop
if len(trip["odpt:busTimetableObject"]) <= 1:
#warn("\033[1mno correct stops in trip {}\033[0m".format(trip_id))
continue
# Write to trips.txt
writer_trips.writerow({
"route_id": route_id, "trip_id": trip_id,
"service_id": service_id, "trip_headsign": trip_headsign,
"trip_pattern_id": pattern_id, "wheelchair_accessible": wheelchair
})
# Times
for idx, stop_time in enumerate(trip["odpt:busTimetableObject"]):
stop_id = stop_time["odpt:busstopPole"].split(":")[1]
# Get time
arrival = stop_time.get("odpt:arrivalTime") or stop_time.get("odpt:departureTime")
departure = stop_time.get("odpt:departureTime") or stop_time.get("odpt:arrivalTime")
if arrival: arrival = _Time.from_str(arrival)
if departure: departure = _Time.from_str(departure)
# Be sure arrival and departure exist
if not (arrival and departure): continue
# Fix for after-midnight trips. GTFS requires "24:23", while JSON data contains "00:23"
if arrival < prev_departure: arrival += 86400
if departure < arrival: departure += 86400
prev_departure = copy(departure)
# Can get on/off?
# None → no info → fallbacks to True, but bool(None) == False, so we have to explicitly comapre the value to False
pickup = "1" if stop_time.get("odpt:CanGetOn") == False else "0"
dropoff = "1" if stop_time.get("odpt:CanGetOff") == False else "0"
writer_times.writerow({
"trip_id": trip_id, "stop_sequence": idx, "stop_id": stop_id,
"arrival_time": str(arrival), "departure_time": str(departure),
"pickup_type": pickup, "drop_off_type": dropoff
})
trips.close()
buffer_trips.close()
buffer_times.close()
def translations(self):
buffer = open("gtfs/translations.txt", mode="w", encoding="utf8", newline="")
writer = csv.DictWriter(buffer, GTFS_HEADERS["translations.txt"], extrasaction="ignore")
writer.writeheader()
for ja_string, en_string in self.english_strings.items():
writer.writerow({"trans_id": ja_string, "lang": "ja", "translation": ja_string})
writer.writerow({"trans_id": ja_string, "lang": "en", "translation": en_string})
buffer.close()
def calendars(self):
calendars = requests.get("http://api-tokyochallenge.odpt.org/api/v4/odpt:Calendar.json", params={"acl:consumerKey": self.apikey}, timeout=30, stream=True)
calendars.raise_for_status()
calendars = ijson.items(calendars.raw, "item")
# Get info on specific calendars
calendar_dates = {}
for calendar in calendars:
calendar_id = calendar["owl:sameAs"].split(":")[1]
if "odpt:day" in calendar and calendar["odpt:day"] != []:
dates = [datetime.strptime(i, "%Y-%m-%d").date() for i in calendar["odpt:day"]]
dates = [i for i in dates if self.startdate <= i <= self.enddate]
for date in dates:
if date not in calendar_dates: calendar_dates[date] = set()
calendar_dates[date].add(calendar_id)
# Get info about holidays
if self.startdate.year == self.enddate.year: holidays = _holidays(self.startdate.year)
else: holidays = _holidays(self.startdate.year) | _holidays(self.enddate.year)
# Open file
buffer = open("gtfs/calendar_dates.txt", mode="w", encoding="utf8", newline="")
writer = csv.DictWriter(buffer, GTFS_HEADERS["calendar_dates.txt"], extrasaction="ignore")
writer.writeheader()
# Dump data
for route, services in self.used_calendars.items():
if self.verbose: print("\033[1A\033[KParsing calendars:", route)
working_date = copy(self.startdate)
while working_date <= self.enddate:
active_services = []
if calendar_dates.get(working_date, set()).intersection(services):
active_services = [i for i in calendar_dates[working_date].intersection(services)]
elif working_date in holidays and "Holiday" in services:
active_services = ["Holiday"]
elif working_date.isoweekday() == 7 and working_date not in holidays:
if "Sunday" in services: active_services = ["Sunday"]
elif "Holiday" in services: active_services = ["Sunday"]
elif working_date.isoweekday() == 6 and working_date not in holidays and "Saturday" in services:
active_services = ["Saturday"]
elif working_date.isoweekday() == 5 and working_date not in holidays and "Friday" in services:
active_services = ["Friday"]
elif working_date.isoweekday() == 4 and working_date not in holidays and "Thursday" in services:
active_services = ["Thursday"]
elif working_date.isoweekday() == 3 and working_date not in holidays and "Wednesday" in services:
active_services = ["Wednesday"]
elif working_date.isoweekday() == 2 and working_date not in holidays and "Tuesday" in services:
active_services = ["Tuesday"]
elif working_date.isoweekday() == 1 and working_date not in holidays and "Monday" in services:
active_services = ["Monday"]
elif (working_date.isoweekday() >= 6 or working_date in holidays) and "SaturdayHoliday" in services:
active_services = ["SaturdayHoliday"]
elif working_date.isoweekday() <= 5 and working_date not in holidays and "Weekday" in services:
active_services = ["Weekday"]
if active_services:
for service in active_services:
writer.writerow({"service_id": route+"/"+service, "date": working_date.strftime("%Y%m%d"), "exception_type": 1})
working_date += timedelta(days=1)
calendars.close()
buffer.close()
def trips_calendars_crosscheck(self):
# Sometimes BusTimetable references the "Holiday" service, which is »valid«,
# But sometimes specific calendars override every holiday inside the GTFS peroid
# This functions checks if service_id of every trips is inside calendar_dates.txt
valid_services = set()
remove_trips = set()
# Read valid services
if self.verbose: print("\033[1A\033[KTrips×Calendars cross-check: reading calendar_dates.txt")
buff = open("gtfs/calendar_dates.txt", "r", encoding="utf8")
reader = csv.DictReader(buff)
for row in reader:
valid_services.add(row["service_id"])
buff.close()
### FIX TRIPS.TXT ###
if self.verbose: print("\033[1A\033[KTrips×Calendars cross-check: rewriting trips.txt")
os.rename("gtfs/trips.txt", "gtfs/trips.txt.old")
# Old file
in_buffer = open("gtfs/trips.txt.old", mode="r", encoding="utf8", newline="")
reader = csv.DictReader(in_buffer)
# New file
out_buffer = open("gtfs/trips.txt", mode="w", encoding="utf8", newline="")
writer = csv.DictWriter(out_buffer, GTFS_HEADERS["trips.txt"], extrasaction="ignore")
writer.writeheader()
for row in reader:
if row["service_id"] in valid_services:
writer.writerow(row)
else:
remove_trips.add(row["trip_id"])
in_buffer.close()
out_buffer.close()
os.remove("gtfs/trips.txt.old")
del valid_services
### FIX STOP_TIMES.TXT ###
if self.verbose: print("\033[1A\033[KTrips×Calendars cross-check: rewriting stop_times.txt")
os.rename("gtfs/stop_times.txt", "gtfs/stop_times.txt.old")
# Old file
in_buffer = open("gtfs/stop_times.txt.old", mode="r", encoding="utf8", newline="")
reader = csv.DictReader(in_buffer)
# New file
out_buffer = open("gtfs/stop_times.txt", mode="w", encoding="utf8", newline="")
writer = csv.DictWriter(out_buffer, GTFS_HEADERS["stop_times.txt"], extrasaction="ignore")
writer.writeheader()
for row in reader:
if row["trip_id"] in remove_trips:
continue
else:
writer.writerow(row)
in_buffer.close()
out_buffer.close()
os.remove("gtfs/stop_times.txt.old")
def parse(self):
if self.verbose: print("Parsing agencies")
self.agencies()
self.feed_info()
if self.verbose: print("\033[1A\033[KParsing stops")
self.stops()
if self.verbose: print("\033[1A\033[KParsing stops: finished")
if self.verbose: print("\033[1A\033[KParsing routes")
self.routes()
if self.verbose: print("\033[1A\033[KParsing routes: finished")
if self.verbose: print("\033[1A\033[KParsing times")
self.trips()
if self.verbose: print("\033[1A\033[KParsing times: finished")
if self.verbose: print("\033[1A\033[KParsing translations")
self.translations()
if self.verbose: print("\033[1A\033[KParsing calendars")
self.calendars()
if self.verbose: print("\033[1A\033[KParsing calendars: finished")
if self.verbose: print("\033[1A\033[KTrips×Calendars cross-check")
self.trips_calendars_crosscheck()
if self.verbose: print("\033[1A\033[KTrips×Calendars cross-check: finished")
if self.verbose: print("\033[1A\033[KParsing finished!")
def compress(self):
"Compress all created files to tokyo_trains.zip"
archive = zipfile.ZipFile("tokyo_buses.zip", mode="w", compression=zipfile.ZIP_DEFLATED)
for file in os.listdir("gtfs"):
if file.endswith(".txt"):
archive.write(os.path.join("gtfs", file), arcname=file)
archive.close()
if __name__ == "__main__":
args_parser = argparse.ArgumentParser()
args_parser.add_argument("-a", "--apikey", metavar="YOUR_APIKEY", help="apikey from developer-tokyochallenge.odpt.org")
args_parser.add_argument("--no-verbose", action="store_false", dest="verbose", help="don't verbose")
args = args_parser.parse_args()
if args.apikey:
apikey = args.apikey
elif os.path.exists("apikey.txt"):
with open("apikey.txt", mode="r", encoding="utf8") as f:
apikey = f.read().strip()
else:
raise RuntimeError("No apikey!\n Provide it inside command line argument '--apikey',\n Or put it inside a file named 'apikey.txt'.")
start_time = time.time()
print("""
| _____ _ ____ _____ _____ ____ |
| |_ _|__ | | ___ _ ___ / ___|_ _| ___/ ___| |
| | |/ _ \| |/ / | | |/ _ \| | _ | | | |_ \___ \ |
| | | (_) | <| |_| | (_) | |_| | | | | _| ___) | |
| |_|\___/|_|\_\\\\__, |\___/ \____| |_| |_| |____/ |
| |___/ |
""")
print("=== Buses GTFS: Starting! ===")
print("Initializing parser")
parser = BusesParser(apikey=apikey, verbose=args.verbose)
print("Starting data parse... This might take some time...")
parser.parse()
print("Compressing to tokyo_buses.zip")
parser.compress()
total_time = time.time() - start_time
print("=== TokyoGTFS: Finished in {} s ===".format(round(total_time, 2)))
|
[
"os.mkdir",
"os.remove",
"argparse.ArgumentParser",
"os.path.join",
"csv.DictWriter",
"os.path.exists",
"datetime.timedelta",
"requests.get",
"re.sub",
"csv.writer",
"csv.DictReader",
"os.rename",
"datetime.date.today",
"datetime.datetime.strptime",
"ijson.items",
"bs4.BeautifulSoup",
"os.listdir",
"zipfile.ZipFile",
"copy.copy",
"time.time",
"collections.OrderedDict"
] |
[((2045, 2087), 'bs4.BeautifulSoup', 'BeautifulSoup', (['request.text', '"""html.parser"""'], {}), "(request.text, 'html.parser')\n", (2058, 2087), False, 'from bs4 import BeautifulSoup\n'), ((27100, 27125), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (27123, 27125), False, 'import argparse\n'), ((27780, 27791), 'time.time', 'time.time', ([], {}), '()\n', (27789, 27791), False, 'import time\n'), ((4125, 4143), 'os.listdir', 'os.listdir', (['"""gtfs"""'], {}), "('gtfs')\n", (4135, 4143), False, 'import os\n'), ((4241, 4254), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (4252, 4254), False, 'from collections import OrderedDict\n'), ((4676, 4688), 'datetime.date.today', 'date.today', ([], {}), '()\n', (4686, 4688), False, 'from datetime import datetime, date, timedelta\n'), ((4779, 4792), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (4790, 4792), False, 'from collections import OrderedDict\n'), ((4846, 4992), 'requests.get', 'requests.get', (['"""http://api-tokyochallenge.odpt.org/api/v4/odpt:Calendar.json"""'], {'params': "{'acl:consumerKey': self.apikey}", 'timeout': '(30)', 'stream': '(True)'}), "('http://api-tokyochallenge.odpt.org/api/v4/odpt:Calendar.json',\n params={'acl:consumerKey': self.apikey}, timeout=30, stream=True)\n", (4858, 4992), False, 'import requests\n'), ((5046, 5080), 'ijson.items', 'ijson.items', (['calendars.raw', '"""item"""'], {}), "(calendars.raw, 'item')\n", (5057, 5080), False, 'import ijson\n'), ((5904, 5977), 'csv.DictWriter', 'csv.DictWriter', (['buffer', "GTFS_HEADERS['agency.txt']"], {'extrasaction': '"""ignore"""'}), "(buffer, GTFS_HEADERS['agency.txt'], extrasaction='ignore')\n", (5918, 5977), False, 'import csv\n'), ((7592, 7741), 'requests.get', 'requests.get', (['"""http://api-tokyochallenge.odpt.org/api/v4/odpt:BusstopPole.json"""'], {'params': "{'acl:consumerKey': self.apikey}", 'timeout': '(30)', 'stream': '(True)'}), "('http://api-tokyochallenge.odpt.org/api/v4/odpt:BusstopPole.json',\n params={'acl:consumerKey': self.apikey}, timeout=30, stream=True)\n", (7604, 7741), False, 'import requests\n'), ((7787, 7817), 'ijson.items', 'ijson.items', (['stops.raw', '"""item"""'], {}), "(stops.raw, 'item')\n", (7798, 7817), False, 'import ijson\n'), ((7936, 8008), 'csv.DictWriter', 'csv.DictWriter', (['buffer', "GTFS_HEADERS['stops.txt']"], {'extrasaction': '"""ignore"""'}), "(buffer, GTFS_HEADERS['stops.txt'], extrasaction='ignore')\n", (7950, 8008), False, 'import csv\n'), ((8159, 8188), 'csv.writer', 'csv.writer', (['broken_stops_buff'], {}), '(broken_stops_buff)\n', (8169, 8188), False, 'import csv\n'), ((9936, 10094), 'requests.get', 'requests.get', (['"""http://api-tokyochallenge.odpt.org/api/v4/odpt:BusroutePattern.json"""'], {'params': "{'acl:consumerKey': self.apikey}", 'timeout': '(30)', 'stream': '(True)'}), "(\n 'http://api-tokyochallenge.odpt.org/api/v4/odpt:BusroutePattern.json',\n params={'acl:consumerKey': self.apikey}, timeout=30, stream=True)\n", (9948, 10094), False, 'import requests\n'), ((10141, 10174), 'ijson.items', 'ijson.items', (['patterns.raw', '"""item"""'], {}), "(patterns.raw, 'item')\n", (10152, 10174), False, 'import ijson\n'), ((10273, 10346), 'csv.DictWriter', 'csv.DictWriter', (['buffer', "GTFS_HEADERS['routes.txt']"], {'extrasaction': '"""ignore"""'}), "(buffer, GTFS_HEADERS['routes.txt'], extrasaction='ignore')\n", (10287, 10346), False, 'import csv\n'), ((12350, 12501), 'requests.get', 'requests.get', (['"""http://api-tokyochallenge.odpt.org/api/v4/odpt:BusTimetable.json"""'], {'params': "{'acl:consumerKey': self.apikey}", 'timeout': '(90)', 'stream': '(True)'}), "('http://api-tokyochallenge.odpt.org/api/v4/odpt:BusTimetable.json'\n , params={'acl:consumerKey': self.apikey}, timeout=90, stream=True)\n", (12362, 12501), False, 'import requests\n'), ((12546, 12576), 'ijson.items', 'ijson.items', (['trips.raw', '"""item"""'], {}), "(trips.raw, 'item')\n", (12557, 12576), False, 'import ijson\n'), ((12712, 12790), 'csv.DictWriter', 'csv.DictWriter', (['buffer_trips', "GTFS_HEADERS['trips.txt']"], {'extrasaction': '"""ignore"""'}), "(buffer_trips, GTFS_HEADERS['trips.txt'], extrasaction='ignore')\n", (12726, 12790), False, 'import csv\n'), ((12940, 13028), 'csv.DictWriter', 'csv.DictWriter', (['buffer_times', "GTFS_HEADERS['stop_times.txt']"], {'extrasaction': '"""ignore"""'}), "(buffer_times, GTFS_HEADERS['stop_times.txt'], extrasaction=\n 'ignore')\n", (12954, 13028), False, 'import csv\n'), ((18961, 19040), 'csv.DictWriter', 'csv.DictWriter', (['buffer', "GTFS_HEADERS['translations.txt']"], {'extrasaction': '"""ignore"""'}), "(buffer, GTFS_HEADERS['translations.txt'], extrasaction='ignore')\n", (18975, 19040), False, 'import csv\n'), ((19393, 19539), 'requests.get', 'requests.get', (['"""http://api-tokyochallenge.odpt.org/api/v4/odpt:Calendar.json"""'], {'params': "{'acl:consumerKey': self.apikey}", 'timeout': '(30)', 'stream': '(True)'}), "('http://api-tokyochallenge.odpt.org/api/v4/odpt:Calendar.json',\n params={'acl:consumerKey': self.apikey}, timeout=30, stream=True)\n", (19405, 19539), False, 'import requests\n'), ((19593, 19627), 'ijson.items', 'ijson.items', (['calendars.raw', '"""item"""'], {}), "(calendars.raw, 'item')\n", (19604, 19627), False, 'import ijson\n'), ((20560, 20646), 'csv.DictWriter', 'csv.DictWriter', (['buffer', "GTFS_HEADERS['calendar_dates.txt']"], {'extrasaction': '"""ignore"""'}), "(buffer, GTFS_HEADERS['calendar_dates.txt'], extrasaction=\n 'ignore')\n", (20574, 20646), False, 'import csv\n'), ((23747, 23767), 'csv.DictReader', 'csv.DictReader', (['buff'], {}), '(buff)\n', (23761, 23767), False, 'import csv\n'), ((24001, 24050), 'os.rename', 'os.rename', (['"""gtfs/trips.txt"""', '"""gtfs/trips.txt.old"""'], {}), "('gtfs/trips.txt', 'gtfs/trips.txt.old')\n", (24010, 24050), False, 'import os\n'), ((24174, 24199), 'csv.DictReader', 'csv.DictReader', (['in_buffer'], {}), '(in_buffer)\n', (24188, 24199), False, 'import csv\n'), ((24320, 24396), 'csv.DictWriter', 'csv.DictWriter', (['out_buffer', "GTFS_HEADERS['trips.txt']"], {'extrasaction': '"""ignore"""'}), "(out_buffer, GTFS_HEADERS['trips.txt'], extrasaction='ignore')\n", (24334, 24396), False, 'import csv\n'), ((24673, 24704), 'os.remove', 'os.remove', (['"""gtfs/trips.txt.old"""'], {}), "('gtfs/trips.txt.old')\n", (24682, 24704), False, 'import os\n'), ((24877, 24936), 'os.rename', 'os.rename', (['"""gtfs/stop_times.txt"""', '"""gtfs/stop_times.txt.old"""'], {}), "('gtfs/stop_times.txt', 'gtfs/stop_times.txt.old')\n", (24886, 24936), False, 'import os\n'), ((25065, 25090), 'csv.DictReader', 'csv.DictReader', (['in_buffer'], {}), '(in_buffer)\n', (25079, 25090), False, 'import csv\n'), ((25216, 25302), 'csv.DictWriter', 'csv.DictWriter', (['out_buffer', "GTFS_HEADERS['stop_times.txt']"], {'extrasaction': '"""ignore"""'}), "(out_buffer, GTFS_HEADERS['stop_times.txt'], extrasaction=\n 'ignore')\n", (25230, 25302), False, 'import csv\n'), ((25545, 25581), 'os.remove', 'os.remove', (['"""gtfs/stop_times.txt.old"""'], {}), "('gtfs/stop_times.txt.old')\n", (25554, 25581), False, 'import os\n'), ((26801, 26879), 'zipfile.ZipFile', 'zipfile.ZipFile', (['"""tokyo_buses.zip"""'], {'mode': '"""w"""', 'compression': 'zipfile.ZIP_DEFLATED'}), "('tokyo_buses.zip', mode='w', compression=zipfile.ZIP_DEFLATED)\n", (26816, 26879), False, 'import zipfile\n'), ((26900, 26918), 'os.listdir', 'os.listdir', (['"""gtfs"""'], {}), "('gtfs')\n", (26910, 26918), False, 'import os\n'), ((27451, 27479), 'os.path.exists', 'os.path.exists', (['"""apikey.txt"""'], {}), "('apikey.txt')\n", (27465, 27479), False, 'import os\n'), ((28493, 28504), 'time.time', 'time.time', ([], {}), '()\n', (28502, 28504), False, 'import time\n'), ((3975, 4014), 're.sub', 're.sub', (['"""(?!^)([A-Z][a-z]+)"""', '""" \\\\1"""', 'i'], {}), "('(?!^)([A-Z][a-z]+)', ' \\\\1', i)\n", (3981, 4014), False, 'import re\n'), ((4064, 4086), 'os.path.exists', 'os.path.exists', (['"""gtfs"""'], {}), "('gtfs')\n", (4078, 4086), False, 'import os\n'), ((4088, 4104), 'os.mkdir', 'os.mkdir', (['"""gtfs"""'], {}), "('gtfs')\n", (4096, 4104), False, 'import os\n'), ((4145, 4170), 'os.remove', 'os.remove', (["('gtfs/' + file)"], {}), "('gtfs/' + file)\n", (4154, 4170), False, 'import os\n'), ((4365, 4387), 'csv.DictReader', 'csv.DictReader', (['buffer'], {}), '(buffer)\n', (4379, 4387), False, 'import csv\n'), ((4729, 4748), 'datetime.timedelta', 'timedelta', ([], {'days': '(180)'}), '(days=180)\n', (4738, 4748), False, 'from datetime import datetime, date, timedelta\n'), ((7144, 7165), 'csv.writer', 'csv.writer', (['file_buff'], {}), '(file_buff)\n', (7154, 7165), False, 'import csv\n'), ((20856, 20876), 'copy.copy', 'copy', (['self.startdate'], {}), '(self.startdate)\n', (20860, 20876), False, 'from copy import copy\n'), ((7028, 7065), 'os.path.join', 'os.path.join', (['"""gtfs"""', '"""feed_info.txt"""'], {}), "('gtfs', 'feed_info.txt')\n", (7040, 7065), False, 'import os\n'), ((18108, 18123), 'copy.copy', 'copy', (['departure'], {}), '(departure)\n', (18112, 18123), False, 'from copy import copy\n'), ((23090, 23107), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (23099, 23107), False, 'from datetime import datetime, date, timedelta\n'), ((6162, 6191), 'csv.DictReader', 'csv.DictReader', (['add_info_buff'], {}), '(add_info_buff)\n', (6176, 6191), False, 'import csv\n'), ((26988, 27014), 'os.path.join', 'os.path.join', (['"""gtfs"""', 'file'], {}), "('gtfs', file)\n", (27000, 27014), False, 'import os\n'), ((19891, 19923), 'datetime.datetime.strptime', 'datetime.strptime', (['i', '"""%Y-%m-%d"""'], {}), "(i, '%Y-%m-%d')\n", (19908, 19923), False, 'from datetime import datetime, date, timedelta\n'), ((5410, 5442), 'datetime.datetime.strptime', 'datetime.strptime', (['i', '"""%Y-%m-%d"""'], {}), "(i, '%Y-%m-%d')\n", (5427, 5442), False, 'from datetime import datetime, date, timedelta\n')]
|
# MIT License
# Copyright 2020 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# ==============================================================================
# Adapted from: https://github.com/kennethreitz/setup.py
import io
import os
from setuptools import find_packages, setup
# Package meta-data.
NAME = "morpheus-core"
DESCRIPTION = "A framework for scaling pixel-level analysis to large images"
URL = "https://github.com/morpheus-project/morpheus-core"
EMAIL = "<EMAIL>"
AUTHOR = "<NAME> & <NAME>"
REQUIRES_PYTHON = ">=3.6"
# What packages are required for this module to be executed?
REQUIRED = ["dill", "numpy", "astropy", "tqdm"]
# The rest you shouldn't have to touch too much :)
# ------------------------------------------------
# Except, perhaps the License and Trove Classifiers!
# If you do change the License, remember to change the Trove Classifier for that!
here = os.path.abspath(os.path.dirname(__file__))
# Import the README and use it as the long-description.
# Note: this will only work if 'README.md' is present in your MANIFEST.in file!
try:
with io.open(os.path.join(here, "README.rst"), encoding="utf-8") as f:
long_description = "\n" + f.read()
except FileNotFoundError:
long_description = DESCRIPTION
with open("./morpheus_core/__version__.py", "r") as f:
version = f.readlines()[0].strip().replace('"', "")
# Where the magic happens:
setup(
name=NAME,
version=version,
description=DESCRIPTION,
long_description=long_description,
long_description_content_type="text/x-rst",
author=AUTHOR,
author_email=EMAIL,
python_requires=REQUIRES_PYTHON,
url=URL,
packages=find_packages(exclude=("morpheus_core.tests",)),
install_requires=REQUIRED,
license="MIT",
classifiers=[
# Trove classifiers
# Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
"Intended Audience :: Science/Research",
"Development Status :: 4 - Beta",
"Operating System :: POSIX :: Linux",
"Topic :: Scientific/Engineering",
],
)
|
[
"os.path.dirname",
"os.path.join",
"setuptools.find_packages"
] |
[((1923, 1948), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1938, 1948), False, 'import os\n'), ((2676, 2723), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "('morpheus_core.tests',)"}), "(exclude=('morpheus_core.tests',))\n", (2689, 2723), False, 'from setuptools import find_packages, setup\n'), ((2109, 2141), 'os.path.join', 'os.path.join', (['here', '"""README.rst"""'], {}), "(here, 'README.rst')\n", (2121, 2141), False, 'import os\n')]
|
# %%
import pandas as pd
import numpy as np
from datetime import datetime
import os
import pickle
import matplotlib.pyplot as plt
import scipy.special as sc
from scipy.stats import norm
from scipy.stats import lognorm
import copy
exec(open('../env_vars.py').read())
dir_data = os.environ['dir_data']
dir_picklejar = os.environ['dir_picklejar']
dir_code_methods = os.environ['dir_code_methods']
# %%
# Output of this script is the data frame data_day_limits
exec(open(os.path.join(os.path.realpath(dir_code_methods), 'setup-day-limits.py')).read())
# %%
execute_test = False
if execute_test:
# Sanity check: are there any duplicates in the hours_since_start_day column?
for participant in dict_knitted_with_puffmarker.keys():
for days in dict_knitted_with_puffmarker[participant].keys():
current_data = dict_knitted_with_puffmarker[participant][days]
if len(current_data.index) > 0:
which_idx_dup = current_data['hours_since_start_day'].duplicated()
which_idx_dup = np.array(which_idx_dup)
if np.sum(which_idx_dup*1.)>0:
print((participant, days, np.cumsum(which_idx_dup))) # prints those participant-days with duplicates
# found: 1 selfreport and 1 random ema with exactly the same hours_since_start_day
# the selfreport will eventually be dropped since when_smoke=4
# %%
# Test out the function
execute_test = False
if execute_test:
use_this_id = None
use_this_days = None
# Test out the function latent_poisson_process_ex1
# pre-quit
print(latent_poisson_process_ex1(latent_dict = latent_data[use_this_id][use_this_days], params = {'lambda': 0.14}))
# post-quit
print(latent_poisson_process_ex1(latent_dict = latent_data[use_this_id][use_this_days], params = {'lambda': 0.14}))
# %%
# Test out the function
execute_test = False
if execute_test:
use_this_id = None
use_this_days = None
# Test out the function latent_poisson_process_ex2
# pre-quit
print(latent_poisson_process_ex2(latent_dict = latent_data[use_this_id][use_this_days], params = {'lambda_prequit': 0.14, 'lambda_postquit': 0.75}))
# post-quit
print(latent_poisson_process_ex2(latent_dict = latent_data[use_this_id][use_this_days], params = {'lambda_prequit': 0.14, 'lambda_postquit': 0.75}))
# %%
# Test out the class
execute_test = False
if execute_test:
tmp_latent_data = copy.deepcopy(latent_data)
lat_pp_ex1 = latent(data=tmp_latent_data, model=latent_poisson_process_ex1, params = {'lambda': 0.14})
print(lat_pp_ex1.model)
print(lat_pp_ex1.params)
print(lat_pp_ex1.compute_total_pp(use_params = None))
lat_pp_ex1.update_params(new_params = {'lambda': 0.77})
print(lat_pp_ex1.model)
print(lat_pp_ex1.params)
print(lat_pp_ex1.compute_total_pp(use_params = None))
# %%
# Another test on the class
execute_test = False
if execute_test:
tmp_latent_data = copy.deepcopy(latent_data)
lat_pp_ex2 = latent(data=tmp_latent_data, model=latent_poisson_process_ex2, params = {'lambda_prequit': 0.14, 'lambda_postquit': 0.75})
print(lat_pp_ex2.model)
print(lat_pp_ex2.params)
print(lat_pp_ex2.compute_total_pp(use_params = None))
lat_pp_ex2.update_params(new_params = {'lambda_prequit': 0.05, 'lambda_postquit': 0.25})
print(lat_pp_ex2.model)
print(lat_pp_ex2.params)
print(lat_pp_ex2.compute_total_pp(use_params = None))
# %%
# Test out the function
execute_test = False
if execute_test:
use_participant = None
use_days = None
tmp_clean_data = copy.deepcopy(clean_data[use_participant][use_days]) # keep clean_data[use_participant][use_days] untouched
tmp_latent_data = copy.deepcopy(latent_data[use_participant][use_days]) # keep latent_data[use_participant][use_days] untouched
tmp_clean_data, tmp_latent_data = matching(observed_dict = tmp_clean_data, latent_dict = tmp_latent_data)
print(tmp_clean_data)
print(tmp_latent_data)
print(clean_data[use_participant][use_days]) # Check that this object remains unmodified
print(latent_data[use_participant][use_days]) # Check that this object remains unmodified
# %%
# Test out the function
execute_test = False
if execute_test:
use_participant = None
use_days = None
tmp_clean_data = copy.deepcopy(clean_data[use_participant][use_days]) # keep clean_data[use_participant][use_days] untouched
tmp_latent_data = copy.deepcopy(latent_data[use_participant][use_days])
if len(tmp_latent_data['matched']) > 0:
res = selfreport_mem(observed_dict = tmp_clean_data, latent_dict = tmp_latent_data)
print(res)
# %%
# Test out the function
execute_test = False
if execute_test:
use_participant = None
use_days = None
tmp_clean_data = copy.deepcopy(clean_data[use_participant][use_days]) # keep clean_data[use_participant][use_days] untouched
tmp_latent_data = copy.deepcopy(latent_data[use_participant][use_days])
res = selfreport_mem_total(observed_dict = tmp_clean_data, latent_dict = tmp_latent_data, params = {'p':0.9})
print(res)
# %%
# Another test of the function
execute_test = False
if execute_test:
tmp_clean_data = copy.deepcopy(clean_data) # keep clean_data untouched
tmp_latent_data = copy.deepcopy(latent_data) # keep latent_data untouched
# Sanity check: are there observed events which are NOT matched to latent events?
all_matched = True
for use_this_id in tmp_clean_data.keys():
for use_this_days in tmp_clean_data[use_this_id].keys():
observed = tmp_clean_data[use_this_id][use_this_days]
latent = tmp_latent_data[use_this_id][use_this_days]
res = selfreport_mem_total(observed_dict = observed, latent_dict = latent, params = {'p':0.9})
if res== -np.inf:
all_matched = False
print(("NOT all matched", use_this_id, use_this_days, res))
if all_matched:
print("all observed events are matched to latent events")
# %%
# Test out the class
execute_test = False
if execute_test:
tmp_clean_data = copy.deepcopy(clean_data) # keep clean_data untouched
tmp_latent_data = copy.deepcopy(latent_data) # keep latent_data untouched
sr_mem = measurement_model(data=tmp_clean_data, model=selfreport_mem_total, latent = tmp_latent_data, model_params={'p':0.9})
print(sr_mem.model_params)
print(sr_mem.compute_total_mem())
sr_mem.update_params(new_params = {'p':0.4})
print(sr_mem.model_params)
print(sr_mem.compute_total_mem())
|
[
"copy.deepcopy",
"numpy.sum",
"os.path.realpath",
"numpy.cumsum",
"numpy.array"
] |
[((2457, 2483), 'copy.deepcopy', 'copy.deepcopy', (['latent_data'], {}), '(latent_data)\n', (2470, 2483), False, 'import copy\n'), ((2978, 3004), 'copy.deepcopy', 'copy.deepcopy', (['latent_data'], {}), '(latent_data)\n', (2991, 3004), False, 'import copy\n'), ((3608, 3660), 'copy.deepcopy', 'copy.deepcopy', (['clean_data[use_participant][use_days]'], {}), '(clean_data[use_participant][use_days])\n', (3621, 3660), False, 'import copy\n'), ((3739, 3792), 'copy.deepcopy', 'copy.deepcopy', (['latent_data[use_participant][use_days]'], {}), '(latent_data[use_participant][use_days])\n', (3752, 3792), False, 'import copy\n'), ((4341, 4393), 'copy.deepcopy', 'copy.deepcopy', (['clean_data[use_participant][use_days]'], {}), '(clean_data[use_participant][use_days])\n', (4354, 4393), False, 'import copy\n'), ((4472, 4525), 'copy.deepcopy', 'copy.deepcopy', (['latent_data[use_participant][use_days]'], {}), '(latent_data[use_participant][use_days])\n', (4485, 4525), False, 'import copy\n'), ((4820, 4872), 'copy.deepcopy', 'copy.deepcopy', (['clean_data[use_participant][use_days]'], {}), '(clean_data[use_participant][use_days])\n', (4833, 4872), False, 'import copy\n'), ((4951, 5004), 'copy.deepcopy', 'copy.deepcopy', (['latent_data[use_participant][use_days]'], {}), '(latent_data[use_participant][use_days])\n', (4964, 5004), False, 'import copy\n'), ((5231, 5256), 'copy.deepcopy', 'copy.deepcopy', (['clean_data'], {}), '(clean_data)\n', (5244, 5256), False, 'import copy\n'), ((5308, 5334), 'copy.deepcopy', 'copy.deepcopy', (['latent_data'], {}), '(latent_data)\n', (5321, 5334), False, 'import copy\n'), ((6144, 6169), 'copy.deepcopy', 'copy.deepcopy', (['clean_data'], {}), '(clean_data)\n', (6157, 6169), False, 'import copy\n'), ((6221, 6247), 'copy.deepcopy', 'copy.deepcopy', (['latent_data'], {}), '(latent_data)\n', (6234, 6247), False, 'import copy\n'), ((1037, 1060), 'numpy.array', 'np.array', (['which_idx_dup'], {}), '(which_idx_dup)\n', (1045, 1060), True, 'import numpy as np\n'), ((482, 516), 'os.path.realpath', 'os.path.realpath', (['dir_code_methods'], {}), '(dir_code_methods)\n', (498, 516), False, 'import os\n'), ((1080, 1107), 'numpy.sum', 'np.sum', (['(which_idx_dup * 1.0)'], {}), '(which_idx_dup * 1.0)\n', (1086, 1107), True, 'import numpy as np\n'), ((1154, 1178), 'numpy.cumsum', 'np.cumsum', (['which_idx_dup'], {}), '(which_idx_dup)\n', (1163, 1178), True, 'import numpy as np\n')]
|
from rest_framework.permissions import AllowAny
from rest_framework.routers import DefaultRouter
from rest_framework.viewsets import ModelViewSet
from .models import Order
from .serializers import OrderSerializer
from .serializers import OrderListSerializer
from .permissions import OwnerOnly
class OrderViewset(ModelViewSet):
queryset = Order.objects.filter(is_verified=True).all()
def get_serializer_class(self):
if self.action in ["list", "retrieve"]:
return OrderListSerializer
return OrderSerializer
def get_permissions(self) -> tuple:
permissions: dict = {
"GET": AllowAny,
"POST": OwnerOnly,
"PUT": OwnerOnly,
"PATCH": OwnerOnly,
"DELETE": OwnerOnly,
}
return (
permissions.get(
self.request.method,
(AllowAny(),)
)(),
)
order_router = DefaultRouter()
order_router.register(r'orders', OrderViewset)
urls = order_router.urls
|
[
"rest_framework.routers.DefaultRouter",
"rest_framework.permissions.AllowAny"
] |
[((940, 955), 'rest_framework.routers.DefaultRouter', 'DefaultRouter', ([], {}), '()\n', (953, 955), False, 'from rest_framework.routers import DefaultRouter\n'), ((883, 893), 'rest_framework.permissions.AllowAny', 'AllowAny', ([], {}), '()\n', (891, 893), False, 'from rest_framework.permissions import AllowAny\n')]
|
import unittest
from labsys.app import create_app, db
from labsys.admissions.models import (
Patient, Address, Admission, Symptom, ObservedSymptom, Method, Sample,
InfluenzaExam, Vaccine, Hospitalization, UTIHospitalization, ClinicalEvolution,)
from . import mock
class TestModelsRelationships(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
self.client = self.app.test_client(use_cookies=True)
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_patient_address_1to1(self):
patient = mock.patient()
residence = mock.address()
patient.residence = residence
self.assertEqual(patient.residence.patient, patient)
db.session.add(patient)
db.session.commit()
def test_patient_admission_1toM(self):
patient = mock.patient()
admission = mock.admission()
patient.admissions.append(admission)
self.assertEqual(admission.patient, patient)
self.assertEqual(len(patient.admissions.all()), 1)
patient = mock.patient()
admission = mock.admission()
admission.patient = patient
self.assertEqual(admission.patient, patient)
self.assertEqual(len(patient.admissions.all()), 1)
def test_admission_dated_event_1to1(self):
'''
Where dated event is a vaccine, hospitalizaion, utihospitalization or
clinicalEvolution.
That's why their constructor must be the same as MockDatedEvent.
'''
# Setup
admission = mock.admission()
vaccine = mock.dated_event(Vaccine)
# Add to admission
admission.vaccine = vaccine
# Assert they are linked
self.assertEqual(vaccine.admission.vaccine, vaccine)
# Overrides previous vaccine (since it's one-to-one)
vaccine2 = mock.dated_event(Vaccine)
vaccine2.admission = admission
# Assert it was replaced
self.assertNotEqual(admission.vaccine, vaccine)
self.assertEqual(admission.vaccine, vaccine2)
# Ensures commit works
db.session.add(admission)
db.session.commit()
self.assertEqual(vaccine2.id, 1)
self.assertIsNone(vaccine.id)
self.assertEqual(len(Admission.query.all()), 1)
self.assertEqual(len(Vaccine.query.all()), 1)
# Ensures cascade all, delete-orphan works
db.session.delete(admission)
db.session.commit()
self.assertEqual(len(Admission.query.all()), 0)
self.assertEqual(len(Vaccine.query.all()), 0)
def test_admission_symptoms_1toM(self):
# Generate mock models
admission = mock.admission()
obs_symptom0 = ObservedSymptom(
observed=True,
details='obs symptom details',
admission=admission,
symptom=Symptom(name='symptom1'),
)
obs_symptom1 = ObservedSymptom(
observed=False,
details='obs symptom details',
admission=admission,
symptom=Symptom(name='symptom2'),
)
# Assert relationship between is setup
self.assertEqual(len(admission.symptoms), 2)
self.assertEqual(obs_symptom0.admission, obs_symptom1.admission)
self.assertEqual(admission.symptoms[0], obs_symptom0)
self.assertEqual(admission.symptoms[1], obs_symptom1)
# Assert they are correctly commited
db.session.add(admission)
db.session.commit()
# Assert symptoms have the same admission_id
self.assertEqual(obs_symptom0.admission_id, obs_symptom1.admission_id)
# Assert cascade all, delete-orphan works
db.session.delete(admission)
db.session.commit()
self.assertEqual(len(Admission.query.all()), 0)
self.assertEqual(len(ObservedSymptom.query.all()), 0)
def test_syptom_observations_Mto1(self):
symptom = Symptom(name='symptom')
admission0 = mock.admission()
admission1 = mock.admission()
# id_lvrs_intern must be unique
admission1.id_lvrs_intern += 'lvrs0002'
# Generate mock models
obs_symptom0 = ObservedSymptom(
observed=True,
details='obs symptom details',
admission=admission0,
symptom=symptom
)
obs_symptom1 = ObservedSymptom(
observed=False,
details='obs symptom details',
admission=admission1,
symptom=symptom,
)
# Assert relationship is correctly setup
self.assertEqual(len(symptom.observations), 2)
self.assertEqual(symptom.observations[0], obs_symptom0)
# Collaterally, admission has relation with observed symptom
self.assertEqual(admission0.symptoms[0], obs_symptom0)
# Assert they are correctly commited
db.session.add(symptom)
db.session.commit()
# Assert symptoms have the same admission_id
self.assertEqual(obs_symptom0.symptom_id, symptom.id)
# Assert cascade all, delete-orphan works
db.session.delete(symptom)
db.session.commit()
self.assertEqual(len(Symptom.query.all()), 0)
self.assertEqual(len(ObservedSymptom.query.all()), 0)
# Collaterally, admission does not have the observed symptom
self.assertEqual(len(admission0.symptoms), 0)
|
[
"labsys.app.db.drop_all",
"labsys.app.create_app",
"labsys.admissions.models.ObservedSymptom.query.all",
"labsys.app.db.session.delete",
"labsys.admissions.models.Symptom",
"labsys.admissions.models.Vaccine.query.all",
"labsys.app.db.create_all",
"labsys.admissions.models.ObservedSymptom",
"labsys.app.db.session.remove",
"labsys.app.db.session.add",
"labsys.admissions.models.Symptom.query.all",
"labsys.admissions.models.Admission.query.all",
"labsys.app.db.session.commit"
] |
[((366, 387), 'labsys.app.create_app', 'create_app', (['"""testing"""'], {}), "('testing')\n", (376, 387), False, 'from labsys.app import create_app, db\n'), ((478, 493), 'labsys.app.db.create_all', 'db.create_all', ([], {}), '()\n', (491, 493), False, 'from labsys.app import create_app, db\n'), ((588, 607), 'labsys.app.db.session.remove', 'db.session.remove', ([], {}), '()\n', (605, 607), False, 'from labsys.app import create_app, db\n'), ((616, 629), 'labsys.app.db.drop_all', 'db.drop_all', ([], {}), '()\n', (627, 629), False, 'from labsys.app import create_app, db\n'), ((878, 901), 'labsys.app.db.session.add', 'db.session.add', (['patient'], {}), '(patient)\n', (892, 901), False, 'from labsys.app import create_app, db\n'), ((910, 929), 'labsys.app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (927, 929), False, 'from labsys.app import create_app, db\n'), ((2250, 2275), 'labsys.app.db.session.add', 'db.session.add', (['admission'], {}), '(admission)\n', (2264, 2275), False, 'from labsys.app import create_app, db\n'), ((2284, 2303), 'labsys.app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2301, 2303), False, 'from labsys.app import create_app, db\n'), ((2552, 2580), 'labsys.app.db.session.delete', 'db.session.delete', (['admission'], {}), '(admission)\n', (2569, 2580), False, 'from labsys.app import create_app, db\n'), ((2589, 2608), 'labsys.app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2606, 2608), False, 'from labsys.app import create_app, db\n'), ((3581, 3606), 'labsys.app.db.session.add', 'db.session.add', (['admission'], {}), '(admission)\n', (3595, 3606), False, 'from labsys.app import create_app, db\n'), ((3615, 3634), 'labsys.app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3632, 3634), False, 'from labsys.app import create_app, db\n'), ((3825, 3853), 'labsys.app.db.session.delete', 'db.session.delete', (['admission'], {}), '(admission)\n', (3842, 3853), False, 'from labsys.app import create_app, db\n'), ((3862, 3881), 'labsys.app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3879, 3881), False, 'from labsys.app import create_app, db\n'), ((4064, 4087), 'labsys.admissions.models.Symptom', 'Symptom', ([], {'name': '"""symptom"""'}), "(name='symptom')\n", (4071, 4087), False, 'from labsys.admissions.models import Patient, Address, Admission, Symptom, ObservedSymptom, Method, Sample, InfluenzaExam, Vaccine, Hospitalization, UTIHospitalization, ClinicalEvolution\n'), ((4306, 4411), 'labsys.admissions.models.ObservedSymptom', 'ObservedSymptom', ([], {'observed': '(True)', 'details': '"""obs symptom details"""', 'admission': 'admission0', 'symptom': 'symptom'}), "(observed=True, details='obs symptom details', admission=\n admission0, symptom=symptom)\n", (4321, 4411), False, 'from labsys.admissions.models import Patient, Address, Admission, Symptom, ObservedSymptom, Method, Sample, InfluenzaExam, Vaccine, Hospitalization, UTIHospitalization, ClinicalEvolution\n'), ((4488, 4594), 'labsys.admissions.models.ObservedSymptom', 'ObservedSymptom', ([], {'observed': '(False)', 'details': '"""obs symptom details"""', 'admission': 'admission1', 'symptom': 'symptom'}), "(observed=False, details='obs symptom details', admission=\n admission1, symptom=symptom)\n", (4503, 4594), False, 'from labsys.admissions.models import Patient, Address, Admission, Symptom, ObservedSymptom, Method, Sample, InfluenzaExam, Vaccine, Hospitalization, UTIHospitalization, ClinicalEvolution\n'), ((5002, 5025), 'labsys.app.db.session.add', 'db.session.add', (['symptom'], {}), '(symptom)\n', (5016, 5025), False, 'from labsys.app import create_app, db\n'), ((5034, 5053), 'labsys.app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (5051, 5053), False, 'from labsys.app import create_app, db\n'), ((5227, 5253), 'labsys.app.db.session.delete', 'db.session.delete', (['symptom'], {}), '(symptom)\n', (5244, 5253), False, 'from labsys.app import create_app, db\n'), ((5262, 5281), 'labsys.app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (5279, 5281), False, 'from labsys.app import create_app, db\n'), ((2412, 2433), 'labsys.admissions.models.Admission.query.all', 'Admission.query.all', ([], {}), '()\n', (2431, 2433), False, 'from labsys.admissions.models import Patient, Address, Admission, Symptom, ObservedSymptom, Method, Sample, InfluenzaExam, Vaccine, Hospitalization, UTIHospitalization, ClinicalEvolution\n'), ((2468, 2487), 'labsys.admissions.models.Vaccine.query.all', 'Vaccine.query.all', ([], {}), '()\n', (2485, 2487), False, 'from labsys.admissions.models import Patient, Address, Admission, Symptom, ObservedSymptom, Method, Sample, InfluenzaExam, Vaccine, Hospitalization, UTIHospitalization, ClinicalEvolution\n'), ((2638, 2659), 'labsys.admissions.models.Admission.query.all', 'Admission.query.all', ([], {}), '()\n', (2657, 2659), False, 'from labsys.admissions.models import Patient, Address, Admission, Symptom, ObservedSymptom, Method, Sample, InfluenzaExam, Vaccine, Hospitalization, UTIHospitalization, ClinicalEvolution\n'), ((2694, 2713), 'labsys.admissions.models.Vaccine.query.all', 'Vaccine.query.all', ([], {}), '()\n', (2711, 2713), False, 'from labsys.admissions.models import Patient, Address, Admission, Symptom, ObservedSymptom, Method, Sample, InfluenzaExam, Vaccine, Hospitalization, UTIHospitalization, ClinicalEvolution\n'), ((2995, 3019), 'labsys.admissions.models.Symptom', 'Symptom', ([], {'name': '"""symptom1"""'}), "(name='symptom1')\n", (3002, 3019), False, 'from labsys.admissions.models import Patient, Address, Admission, Symptom, ObservedSymptom, Method, Sample, InfluenzaExam, Vaccine, Hospitalization, UTIHospitalization, ClinicalEvolution\n'), ((3195, 3219), 'labsys.admissions.models.Symptom', 'Symptom', ([], {'name': '"""symptom2"""'}), "(name='symptom2')\n", (3202, 3219), False, 'from labsys.admissions.models import Patient, Address, Admission, Symptom, ObservedSymptom, Method, Sample, InfluenzaExam, Vaccine, Hospitalization, UTIHospitalization, ClinicalEvolution\n'), ((3911, 3932), 'labsys.admissions.models.Admission.query.all', 'Admission.query.all', ([], {}), '()\n', (3930, 3932), False, 'from labsys.admissions.models import Patient, Address, Admission, Symptom, ObservedSymptom, Method, Sample, InfluenzaExam, Vaccine, Hospitalization, UTIHospitalization, ClinicalEvolution\n'), ((3967, 3994), 'labsys.admissions.models.ObservedSymptom.query.all', 'ObservedSymptom.query.all', ([], {}), '()\n', (3992, 3994), False, 'from labsys.admissions.models import Patient, Address, Admission, Symptom, ObservedSymptom, Method, Sample, InfluenzaExam, Vaccine, Hospitalization, UTIHospitalization, ClinicalEvolution\n'), ((5311, 5330), 'labsys.admissions.models.Symptom.query.all', 'Symptom.query.all', ([], {}), '()\n', (5328, 5330), False, 'from labsys.admissions.models import Patient, Address, Admission, Symptom, ObservedSymptom, Method, Sample, InfluenzaExam, Vaccine, Hospitalization, UTIHospitalization, ClinicalEvolution\n'), ((5365, 5392), 'labsys.admissions.models.ObservedSymptom.query.all', 'ObservedSymptom.query.all', ([], {}), '()\n', (5390, 5392), False, 'from labsys.admissions.models import Patient, Address, Admission, Symptom, ObservedSymptom, Method, Sample, InfluenzaExam, Vaccine, Hospitalization, UTIHospitalization, ClinicalEvolution\n')]
|
from .alphabet import protein_alphabet, dna_alphabet, rna_alphabet
from .alignment import Alignment, ReferenceMapping
import numpy as np
from Bio import pairwise2
from Bio.SubsMat import MatrixInfo
def _get_substitution_matrix(alphabet):
""" Return a tuple with default parameters `(substitution_matrix, gap_open, gap_extend) for the given alphabet. """
if alphabet == protein_alphabet:
return MatrixInfo.blosum50, -8, -8
elif alphabet == dna_alphabet:
return ({
('A', 'A'): 5,
('C', 'A'): -4, ('C', 'C'): 5,
('G', 'A'): -4, ('G', 'C'): -4, ('G', 'G'): 5,
('T', 'A'): -4, ('T', 'C'): -4, ('T', 'G'): -4, ('T', 'T'): 5
}, -2, -0.5)
elif alphabet == rna_alphabet:
return ({
('A', 'A'): 5,
('C', 'A'): -4, ('C', 'C'): 5,
('G', 'A'): -4, ('G', 'C'): -4, ('G', 'G'): 5,
('U', 'A'): -4, ('U', 'C'): -4, ('U', 'G'): -4, ('U', 'U'): 5
}, -2, -0.5)
else:
raise ValueError('explicit substitution_matrix missing on alignment with alphabet that is not protein, dna,'
' or rna')
def search(align, seq, move_to_top=False, substitution_matrix=None, gap_open=None, gap_extend=None):
""" Searches for the best match to the given sequence in the given alignment, and returns its index.
If `move_to_top == True`, the sequence is swapped with the first alignment sequence. The return value remains
the position of the sequence before it got moved.
The default substitution matrix is BLOSUM50 for proteins and NUC.4.4 for DNA. A version of NUC.4.4 with T replaced
by U is used by default for RNA. For any other alphabets, a substitution matrix needs to be specified (that is a
dict from pairs of letters to scores).
The function currently does not work on multi-alphabet alignments.
"""
if len(align.alphabets) == 0 or len(align) == 0:
raise ValueError('search on empty alignment.')
if len(align.alphabets) > 1:
raise ValueError('search not implemented on multi-alphabet alignments.')
if len(seq) == 0:
raise ValueError('search with empty sequence.')
alphabet = align.alphabets[0][0]
if substitution_matrix is None:
substitution_matrix, default_gap_open, default_gap_extend = _get_substitution_matrix(alphabet)
if gap_open is None:
gap_open = default_gap_open
if gap_extend is None:
gap_extend = default_gap_extend
# make sure the sequence is a string
seq = ''.join(seq)
# turn alignment into sequence of strings, stripping gaps
if not alphabet.has_gap:
raise ValueError('search requires the alignment alphabet to have a gap.')
gap_char = alphabet[0]
align_seqs = [''.join(x for x in _ if x != gap_char) for _ in np.asarray(align[:, :])]
scores = []
for i, align_seq in enumerate(align_seqs):
scores.append(pairwise2.align.globalds(seq, align_seq, substitution_matrix,
gap_open, gap_extend, one_alignment_only=True, score_only=True,
penalize_end_gaps=False))
# find the highest scoring sequence
best_id = np.argmax(scores)
# swap to first position?
if move_to_top:
align.swap(0, best_id)
return best_id
def filter_rows(align, max_gaps=0.5):
""" Return a new alignment where rows that have too many gaps are removed (a fraction larger than max_gaps). """
if len(align) == 0:
return Alignment()
gap_structure = align.get_gap_structure()
gap_fractions = np.mean(gap_structure, axis=1)
mask = (gap_fractions <= max_gaps)
return align[mask]
def align_to_sequence(align, seq, ref_idx_names=None, truncate=False, force_idx=None):
""" Set the reference mapping for the alignment according to the given sequence.
By default, the function searches for the best match to `seq` within the alignment, and uses this match to infer a
mapping between alignment columns and positions in `seq`. Columns that do not match any position in `seq` are marked
with `None`. If `truncate` is `True`, the columns that do not have a match in `seq` are removed.
If `force_idx` is set, the search is not done, and only the sequence at that position is used for the matching.
By default, the positions in `seq` are numbered consecutively, starting from 0. If `ref_idx_names` is given,
position `i` in `seq` will have name `ref_idx_names[i]`, and these names will be used in the reference mapping that
will be attached to the alignment.
Currently this only works for single alphabet alignments, and the alphabet needs to be protein, DNA, or RNA.
The position of the matched sequence in the alignment, and the accuracy of the match, are returned in a dictionary.
"""
if len(align) == 0:
# nothing to do
return align
if len(align.alphabets) > 1:
raise ValueError('align_to_sequence not implemented on multi-alphabet alignments.')
alphabet = align.alphabets[0][0]
substitution_matrix, gap_open, gap_extend = _get_substitution_matrix(alphabet)
if force_idx is None:
# find the best matching sequence
force_idx = search(align, seq, substitution_matrix=substitution_matrix,
gap_open=gap_open, gap_extend=gap_extend)
# find the best match
gap_ch = alphabet[0]
# need the alignment sequence without gaps
align_seq = np.asarray(align.data[force_idx])[0]
align_gap_mask = (align_seq == gap_ch)
align_seq_no_gaps = align_seq[~align_gap_mask]
align_seq_no_gaps_as_str = ''.join(align_seq_no_gaps)
seq = ''.join(seq)
p_al = pairwise2.align.globalds(seq, align_seq_no_gaps_as_str, substitution_matrix, gap_open, gap_extend,
penalize_end_gaps=False)
# this will be the mapping from indices in alignment to indices in `seq`
ref_idxs = np.asarray([None for _ in range(len(align_seq))])
# the ungapped positions in p_al[0][0] correspond to positions in the reference sequence
# let's label them
p_al_ref_idxs = np.asarray([None for _ in range(len(p_al[0][0]))])
p_al_ref_idxs[np.asarray(list(p_al[0][0])) != gap_ch] = list(range(len(seq)))
# now the ungapped positions in p_al[0][1] correspond to ungapped positions in the alignment sequence
ref_idxs[~align_gap_mask] = p_al_ref_idxs[np.asarray(list(p_al[0][1])) != gap_ch]
# calculate some details
details = {'align_accuracy': np.mean(
[a == b for a, b in zip(p_al[0][0], p_al[0][1]) if a != gap_ch and b != gap_ch]),
'idx': force_idx}
# do we want to truncate the alignment?
if truncate:
# noinspection PyComparisonWithNone
truncate_mask = (ref_idxs != None)
align.truncate_columns(truncate_mask, in_place=True)
ref_idxs = ref_idxs[truncate_mask]
if ref_idx_names is not None:
ref_seq = [ref_idx_names[_] if _ is not None else None for _ in ref_idxs]
else:
ref_seq = ref_idxs
align.reference = ReferenceMapping(list(ref_seq))
return details
|
[
"numpy.asarray",
"Bio.pairwise2.align.globalds",
"numpy.mean",
"numpy.argmax"
] |
[((3260, 3277), 'numpy.argmax', 'np.argmax', (['scores'], {}), '(scores)\n', (3269, 3277), True, 'import numpy as np\n'), ((3655, 3685), 'numpy.mean', 'np.mean', (['gap_structure'], {'axis': '(1)'}), '(gap_structure, axis=1)\n', (3662, 3685), True, 'import numpy as np\n'), ((5770, 5897), 'Bio.pairwise2.align.globalds', 'pairwise2.align.globalds', (['seq', 'align_seq_no_gaps_as_str', 'substitution_matrix', 'gap_open', 'gap_extend'], {'penalize_end_gaps': '(False)'}), '(seq, align_seq_no_gaps_as_str, substitution_matrix,\n gap_open, gap_extend, penalize_end_gaps=False)\n', (5794, 5897), False, 'from Bio import pairwise2\n'), ((5546, 5579), 'numpy.asarray', 'np.asarray', (['align.data[force_idx]'], {}), '(align.data[force_idx])\n', (5556, 5579), True, 'import numpy as np\n'), ((2848, 2871), 'numpy.asarray', 'np.asarray', (['align[:, :]'], {}), '(align[:, :])\n', (2858, 2871), True, 'import numpy as np\n'), ((2959, 3118), 'Bio.pairwise2.align.globalds', 'pairwise2.align.globalds', (['seq', 'align_seq', 'substitution_matrix', 'gap_open', 'gap_extend'], {'one_alignment_only': '(True)', 'score_only': '(True)', 'penalize_end_gaps': '(False)'}), '(seq, align_seq, substitution_matrix, gap_open,\n gap_extend, one_alignment_only=True, score_only=True, penalize_end_gaps\n =False)\n', (2983, 3118), False, 'from Bio import pairwise2\n')]
|
#!/usr/bin/python
# Copyright: (c) 2018, <NAME> <<EMAIL>>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = """
---
module: utm_proxy_auth_profile
author:
- <NAME> (@stearz)
short_description: create, update or destroy reverse_proxy auth_profile entry in Sophos UTM
description:
- Create, update or destroy a reverse_proxy auth_profile entry in SOPHOS UTM.
- This module needs to have the REST Ability of the UTM to be activated.
version_added: "2.8"
options:
name:
description:
- The name of the object. Will be used to identify the entry
required: true
aaa:
description:
- List of references to utm_aaa objects (allowed users or groups)
required: true
basic_prompt:
description:
- The message in the basic authentication prompt
required: true
backend_mode:
description:
- Specifies if the backend server needs authentication ([Basic|None])
default: None
choices:
- Basic
- None
backend_strip_basic_auth:
description:
- Should the login data be stripped when proxying the request to the backend host
type: bool
default: True
choices:
- True
- False
backend_user_prefix:
description:
- Prefix string to prepend to the username for backend authentication
default: ""
backend_user_suffix:
description:
- Suffix string to append to the username for backend authentication
default: ""
comment:
description:
- Optional comment string
default: ""
frontend_cookie:
description:
- Frontend cookie name
frontend_cookie_secret:
description:
- Frontend cookie secret
frontend_form:
description:
- Frontend authentication form name
frontend_form_template:
description:
- Frontend authentication form template
default: ""
frontend_login:
description:
- Frontend login name
frontend_logout:
description:
- Frontend logout name
frontend_mode:
description:
- Frontend authentication mode (Form|Basic)
default: Basic
choices:
- Basic
- Form
frontend_realm:
description:
- Frontend authentication realm
frontend_session_allow_persistency:
description:
- Allow session persistency
type: bool
default: False
choices:
- True
- False
frontend_session_lifetime:
description:
- session lifetime
required: true
frontend_session_lifetime_limited:
description:
- Specifies if limitation of session lifetime is active
type: bool
default: True
choices:
- True
- False
frontend_session_lifetime_scope:
description:
- scope for frontend_session_lifetime (days|hours|minutes)
default: hours
choices:
- days
- hours
- minutes
frontend_session_timeout:
description:
- session timeout
required: true
frontend_session_timeout_enabled:
description:
- Specifies if session timeout is active
type: bool
default: True
choices:
- True
- False
frontend_session_timeout_scope:
description:
- scope for frontend_session_timeout (days|hours|minutes)
default: minutes
choices:
- days
- hours
- minutes
logout_delegation_urls:
description:
- List of logout URLs that logouts are delegated to
default: []
logout_mode:
description:
- Mode of logout (None|Delegation)
default: None
choices:
- None
- Delegation
redirect_to_requested_url:
description:
- Should a redirect to the requested URL be made
type: bool
default: False
choices:
- True
- False
extends_documentation_fragment:
- utm
"""
EXAMPLES = """
- name: Create UTM proxy_auth_profile
utm_proxy_auth_profile:
utm_host: sophos.host.name
utm_token: <PASSWORD>
name: TestAuthProfileEntry
aaa: [REF_OBJECT_STRING,REF_ANOTHEROBJECT_STRING]
basic_prompt: "Authentication required: Please login"
frontend_session_lifetime: 1
frontend_session_timeout: 1
state: present
- name: Remove UTM proxy_auth_profile
utm_proxy_auth_profile:
utm_host: sophos.host.name
utm_token: abcdefghijkl<PASSWORD>
name: TestAuthProfileEntry
state: absent
- name: Read UTM proxy_auth_profile
utm_proxy_auth_profile:
utm_host: sophos.host.name
utm_token: abcdefgh<PASSWORD>
name: TestAuthProfileEntry
state: info
"""
RETURN = """
result:
description: The utm object that was created
returned: success
type: complex
contains:
_ref:
description: The reference name of the object
type: str
_locked:
description: Whether or not the object is currently locked
type: bool
_type:
description: The type of the object
type: str
name:
description: The name of the object
type: str
aaa:
description: List of references to utm_aaa objects (allowed users or groups)
type: list
basic_prompt:
description: The message in the basic authentication prompt
type: str
backend_mode:
description: Specifies if the backend server needs authentication ([Basic|None])
type: str
backend_strip_basic_auth:
description: Should the login data be stripped when proxying the request to the backend host
type: bool
backend_user_prefix:
description: Prefix string to prepend to the username for backend authentication
type: str
backend_user_suffix:
description: Suffix string to append to the username for backend authentication
type: str
comment:
description: Optional comment string
type: str
frontend_cookie:
description: Frontend cookie name
type: str
frontend_cookie_secret:
description: Frontend cookie secret
type: str
frontend_form:
description: Frontend authentication form name
type: str
frontend_form_template:
description: Frontend authentication form template
type: str
frontend_login:
description: Frontend login name
type: str
frontend_logout:
description: Frontend logout name
type: str
frontend_mode:
description: Frontend authentication mode (Form|Basic)
type: str
frontend_realm:
description: Frontend authentication realm
type: str
frontend_session_allow_persistency:
description: Allow session persistency
type: bool
frontend_session_lifetime:
description: session lifetime
type: int
frontend_session_lifetime_limited:
description: Specifies if limitation of session lifetime is active
type: bool
frontend_session_lifetime_scope:
description: scope for frontend_session_lifetime (days|hours|minutes)
type: str
frontend_session_timeout:
description: session timeout
type: int
frontend_session_timeout_enabled:
description: Specifies if session timeout is active
type: bool
frontend_session_timeout_scope:
description: scope for frontend_session_timeout (days|hours|minutes)
type: str
logout_delegation_urls:
description: List of logout URLs that logouts are delegated to
type: list
logout_mode:
description: Mode of logout (None|Delegation)
type: str
redirect_to_requested_url:
description: Should a redirect to the requested URL be made
type: bool
"""
from ansible.module_utils.utm_utils import UTM, UTMModule
from ansible.module_utils._text import to_native
def main():
endpoint = "reverse_proxy/auth_profile"
key_to_check_for_changes = ["aaa", "basic_prompt", "backend_mode", "backend_strip_basic_auth",
"backend_user_prefix", "backend_user_suffix", "comment", "frontend_cookie",
"frontend_cookie_secret", "frontend_form", "frontend_form_template",
"frontend_login", "frontend_logout", "frontend_mode", "frontend_realm",
"frontend_session_allow_persistency", "frontend_session_lifetime",
"frontend_session_lifetime_limited", "frontend_session_lifetime_scope",
"frontend_session_timeout", "frontend_session_timeout_enabled",
"frontend_session_timeout_scope", "logout_delegation_urls", "logout_mode",
"redirect_to_requested_url"]
module = UTMModule(
argument_spec=dict(
name=dict(type='str', required=True),
aaa=dict(type='list', elements='str', required=True),
basic_prompt=dict(type='str', required=True),
backend_mode=dict(type='str', required=False, default="None", choices=['Basic', 'None']),
backend_strip_basic_auth=dict(type='bool', required=False, default=True, choices=[True, False]),
backend_user_prefix=dict(type='str', required=False, default=""),
backend_user_suffix=dict(type='str', required=False, default=""),
comment=dict(type='str', required=False, default=""),
frontend_cookie=dict(type='str', required=False),
frontend_cookie_secret=dict(type='str', required=False),
frontend_form=dict(type='str', required=False),
frontend_form_template=dict(type='str', required=False, default=""),
frontend_login=dict(type='str', required=False),
frontend_logout=dict(type='str', required=False),
frontend_mode=dict(type='str', required=False, default="Basic", choices=['Basic', 'Form']),
frontend_realm=dict(type='str', required=False),
frontend_session_allow_persistency=dict(type='bool', required=False, default=False, choices=[True, False]),
frontend_session_lifetime=dict(type='int', required=True),
frontend_session_lifetime_limited=dict(type='bool', required=False, default=True, choices=[True, False]),
frontend_session_lifetime_scope=dict(type='str', required=False, default="hours", choices=['days', 'hours', 'minutes']),
frontend_session_timeout=dict(type='int', required=True),
frontend_session_timeout_enabled=dict(type='bool', required=False, default=True, choices=[True, False]),
frontend_session_timeout_scope=dict(type='str', required=False, default="minutes", choices=['days', 'hours', 'minutes']),
logout_delegation_urls=dict(type='list', elements='str', required=False, default=[]),
logout_mode=dict(type='str', required=False, default="None", choices=['None', 'Delegation']),
redirect_to_requested_url=dict(type='bool', required=False, default=False, choices=[True, False])
)
)
try:
UTM(module, endpoint, key_to_check_for_changes).execute()
except Exception as e:
module.fail_json(msg=to_native(e))
if __name__ == '__main__':
main()
|
[
"ansible.module_utils._text.to_native",
"ansible.module_utils.utm_utils.UTM"
] |
[((12069, 12116), 'ansible.module_utils.utm_utils.UTM', 'UTM', (['module', 'endpoint', 'key_to_check_for_changes'], {}), '(module, endpoint, key_to_check_for_changes)\n', (12072, 12116), False, 'from ansible.module_utils.utm_utils import UTM, UTMModule\n'), ((12183, 12195), 'ansible.module_utils._text.to_native', 'to_native', (['e'], {}), '(e)\n', (12192, 12195), False, 'from ansible.module_utils._text import to_native\n')]
|
# Internal modules
from views import get_handler
class Agent():
def data_agent():
page = 1
while True:
agents = get_handler(f'agents?per_page=3&page={page}')
if agents:
for agent in agents:
if agent['occasional'] != True:
yield {
'name': agent['contact']['name'],
'email': agent['contact']['email'],
'_id': agent['id'],
'created_at': agent['created_at'],
'updated_at': agent['updated_at']
}
page += 1
else:
break
|
[
"views.get_handler"
] |
[((146, 191), 'views.get_handler', 'get_handler', (['f"""agents?per_page=3&page={page}"""'], {}), "(f'agents?per_page=3&page={page}')\n", (157, 191), False, 'from views import get_handler\n')]
|
import json
from typing import Any, Dict
from aiogram import types
from aiogram.dispatcher import FSMContext
from ..bot import settings # type: ignore
from ..product_answers import get_product
from ..product_filters import ProductFilters
from ..utils import is_admin
async def prepare_order_data(
state: FSMContext, pre_checkout_query: types.PreCheckoutQuery
) -> Dict[str, Any]:
payload = json.loads(pre_checkout_query.invoice_payload)
product_index, product_filters, size_id = payload
product = await get_product(state, product_index, ProductFilters(product_filters))
order_info, address = (
pre_checkout_query.order_info,
pre_checkout_query.order_info.shipping_address,
)
return {
"items": [{"shoes": product.id, "quantity": 1, "size": int(size_id)}],
"full_name": order_info.name,
"mobile_number": order_info.phone_number,
"shipping_type": pre_checkout_query.shipping_option_id,
"shipping_city": f"{address.state} {address.city}",
"shipping_department": (
f"{address.street_line1} {address.street_line2} {address.post_code}"
),
"paytype": "online_payment",
"status": "awaiting_fulfillment",
"debug": settings.DEBUG or is_admin(pre_checkout_query.from_user.id),
}
|
[
"json.loads"
] |
[((403, 449), 'json.loads', 'json.loads', (['pre_checkout_query.invoice_payload'], {}), '(pre_checkout_query.invoice_payload)\n', (413, 449), False, 'import json\n')]
|
# -*- coding: utf-8 -*-
import json
from abc import ABC
from typing import Optional, Any, Callable
import bottle
from pip_services3_commons.config import ConfigParams
from pip_services3_commons.config.IConfigurable import IConfigurable
from pip_services3_commons.data.FilterParams import FilterParams
from pip_services3_commons.data.PagingParams import PagingParams
from pip_services3_commons.errors.BadRequestException import BadRequestException
from pip_services3_commons.errors.ConflictException import ConflictException
from pip_services3_commons.errors.NotFoundException import NotFoundException
from pip_services3_commons.errors.UnauthorizedException import UnauthorizedException
from pip_services3_commons.errors.UnknownException import UnknownException
from pip_services3_commons.refer import IReferences
from pip_services3_commons.refer.DependencyResolver import DependencyResolver
from pip_services3_commons.refer.IReferenceable import IReferenceable
from pip_services3_components.count.CompositeCounters import CompositeCounters
from pip_services3_components.log.CompositeLogger import CompositeLogger
from .HttpResponseSender import HttpResponseSender
class RestOperations(IConfigurable, IReferenceable, ABC):
def __init__(self):
super().__init__()
self._logger: CompositeLogger = CompositeLogger()
self._counters: CompositeCounters = CompositeCounters()
self._dependency_resolver: DependencyResolver = DependencyResolver()
def configure(self, config: ConfigParams):
self._dependency_resolver.configure(config)
def set_references(self, references: IReferences):
self._logger.set_references(references)
self._counters.set_references(references)
self._dependency_resolver.set_references(references)
def get_param(self, param, default=None):
return bottle.request.params.get(param, default)
def _get_correlation_id(self) -> Optional[str]:
"""
Returns correlationId from request
:returns: Returns correlationId from request
"""
correlation_id = bottle.request.query.get('correlation_id')
if correlation_id is None or correlation_id == '':
correlation_id = bottle.request.headers.get('correlation_id')
return correlation_id
def _get_filter_params(self) -> FilterParams:
data = dict(bottle.request.query.decode())
data.pop('correlation_id', None)
data.pop('skip', None)
data.pop('take', None)
data.pop('total', None)
return FilterParams(data)
def _get_paging_params(self) -> PagingParams:
params = dict(bottle.request.query.decode())
skip = params.get('skip')
take = params.get('take')
total = params.get('total')
return PagingParams(skip, take, total)
def _get_data(self) -> Optional[str]:
data = bottle.request.json
if isinstance(data, str):
return json.loads(bottle.request.json)
elif bottle.request.json:
return bottle.request.json
else:
return None
def _send_result(self, result: Any = None) -> Optional[str]:
return HttpResponseSender.send_result(result)
def _send_empty_result(self, result: Any = None) -> Optional[str]:
return HttpResponseSender.send_empty_result(result)
def _send_created_result(self, result: Any = None) -> Optional[str]:
return HttpResponseSender.send_created_result(result)
def _send_deleted_result(self, result: Any = None) -> Optional[str]:
return HttpResponseSender.send_deleted_result(result)
def _send_error(self, error: Any = None) -> str:
return HttpResponseSender.send_error(error)
def _send_bad_request(self, message: str) -> str:
correlation_id = self._get_correlation_id()
error = BadRequestException(correlation_id, 'BAD_REQUEST', message)
return self._send_error(error)
def _send_unauthorized(self, message: str) -> str:
correlation_id = self._get_correlation_id()
error = UnauthorizedException(correlation_id, 'UNAUTHORIZED', message)
return self._send_error(error)
def _send_not_found(self, message: str) -> str:
correlation_id = self._get_correlation_id()
error = NotFoundException(correlation_id, 'NOT_FOUND', message)
return self._send_error(error)
def _send_conflict(self, message: str) -> str:
correlation_id = self._get_correlation_id()
error = ConflictException(correlation_id, 'CONFLICT', message)
return self._send_error(error)
def _send_session_expired(self, message: str) -> str:
correlation_id = self._get_correlation_id()
error = UnknownException(correlation_id, 'SESSION_EXPIRED', message)
error.status = 440
return self._send_error(error)
def _send_internal_error(self, message: str) -> str:
correlation_id = self._get_correlation_id()
error = UnknownException(correlation_id, 'INTERNAL', message)
return self._send_error(error)
def _send_server_unavailable(self, message: str) -> str:
correlation_id = self._get_correlation_id()
error = ConflictException(correlation_id, 'SERVER_UNAVAILABLE', message)
error.status = 503
return self._send_error(error)
def invoke(self, operation: str) -> Callable:
for attr in dir(self):
if attr in dir(self):
return lambda param=None: getattr(self, operation)(param)
|
[
"pip_services3_commons.errors.UnauthorizedException.UnauthorizedException",
"pip_services3_commons.data.FilterParams.FilterParams",
"bottle.request.query.get",
"pip_services3_commons.errors.ConflictException.ConflictException",
"pip_services3_commons.errors.UnknownException.UnknownException",
"json.loads",
"pip_services3_commons.errors.BadRequestException.BadRequestException",
"bottle.request.params.get",
"pip_services3_commons.errors.NotFoundException.NotFoundException",
"pip_services3_components.count.CompositeCounters.CompositeCounters",
"pip_services3_commons.data.PagingParams.PagingParams",
"bottle.request.headers.get",
"pip_services3_components.log.CompositeLogger.CompositeLogger",
"pip_services3_commons.refer.DependencyResolver.DependencyResolver",
"bottle.request.query.decode"
] |
[((1319, 1336), 'pip_services3_components.log.CompositeLogger.CompositeLogger', 'CompositeLogger', ([], {}), '()\n', (1334, 1336), False, 'from pip_services3_components.log.CompositeLogger import CompositeLogger\n'), ((1381, 1400), 'pip_services3_components.count.CompositeCounters.CompositeCounters', 'CompositeCounters', ([], {}), '()\n', (1398, 1400), False, 'from pip_services3_components.count.CompositeCounters import CompositeCounters\n'), ((1457, 1477), 'pip_services3_commons.refer.DependencyResolver.DependencyResolver', 'DependencyResolver', ([], {}), '()\n', (1475, 1477), False, 'from pip_services3_commons.refer.DependencyResolver import DependencyResolver\n'), ((1855, 1896), 'bottle.request.params.get', 'bottle.request.params.get', (['param', 'default'], {}), '(param, default)\n', (1880, 1896), False, 'import bottle\n'), ((2096, 2138), 'bottle.request.query.get', 'bottle.request.query.get', (['"""correlation_id"""'], {}), "('correlation_id')\n", (2120, 2138), False, 'import bottle\n'), ((2555, 2573), 'pip_services3_commons.data.FilterParams.FilterParams', 'FilterParams', (['data'], {}), '(data)\n', (2567, 2573), False, 'from pip_services3_commons.data.FilterParams import FilterParams\n'), ((2797, 2828), 'pip_services3_commons.data.PagingParams.PagingParams', 'PagingParams', (['skip', 'take', 'total'], {}), '(skip, take, total)\n', (2809, 2828), False, 'from pip_services3_commons.data.PagingParams import PagingParams\n'), ((3856, 3915), 'pip_services3_commons.errors.BadRequestException.BadRequestException', 'BadRequestException', (['correlation_id', '"""BAD_REQUEST"""', 'message'], {}), "(correlation_id, 'BAD_REQUEST', message)\n", (3875, 3915), False, 'from pip_services3_commons.errors.BadRequestException import BadRequestException\n'), ((4079, 4141), 'pip_services3_commons.errors.UnauthorizedException.UnauthorizedException', 'UnauthorizedException', (['correlation_id', '"""UNAUTHORIZED"""', 'message'], {}), "(correlation_id, 'UNAUTHORIZED', message)\n", (4100, 4141), False, 'from pip_services3_commons.errors.UnauthorizedException import UnauthorizedException\n'), ((4302, 4357), 'pip_services3_commons.errors.NotFoundException.NotFoundException', 'NotFoundException', (['correlation_id', '"""NOT_FOUND"""', 'message'], {}), "(correlation_id, 'NOT_FOUND', message)\n", (4319, 4357), False, 'from pip_services3_commons.errors.NotFoundException import NotFoundException\n'), ((4517, 4571), 'pip_services3_commons.errors.ConflictException.ConflictException', 'ConflictException', (['correlation_id', '"""CONFLICT"""', 'message'], {}), "(correlation_id, 'CONFLICT', message)\n", (4534, 4571), False, 'from pip_services3_commons.errors.ConflictException import ConflictException\n'), ((4738, 4798), 'pip_services3_commons.errors.UnknownException.UnknownException', 'UnknownException', (['correlation_id', '"""SESSION_EXPIRED"""', 'message'], {}), "(correlation_id, 'SESSION_EXPIRED', message)\n", (4754, 4798), False, 'from pip_services3_commons.errors.UnknownException import UnknownException\n'), ((4991, 5044), 'pip_services3_commons.errors.UnknownException.UnknownException', 'UnknownException', (['correlation_id', '"""INTERNAL"""', 'message'], {}), "(correlation_id, 'INTERNAL', message)\n", (5007, 5044), False, 'from pip_services3_commons.errors.UnknownException import UnknownException\n'), ((5214, 5278), 'pip_services3_commons.errors.ConflictException.ConflictException', 'ConflictException', (['correlation_id', '"""SERVER_UNAVAILABLE"""', 'message'], {}), "(correlation_id, 'SERVER_UNAVAILABLE', message)\n", (5231, 5278), False, 'from pip_services3_commons.errors.ConflictException import ConflictException\n'), ((2227, 2271), 'bottle.request.headers.get', 'bottle.request.headers.get', (['"""correlation_id"""'], {}), "('correlation_id')\n", (2253, 2271), False, 'import bottle\n'), ((2374, 2403), 'bottle.request.query.decode', 'bottle.request.query.decode', ([], {}), '()\n', (2401, 2403), False, 'import bottle\n'), ((2647, 2676), 'bottle.request.query.decode', 'bottle.request.query.decode', ([], {}), '()\n', (2674, 2676), False, 'import bottle\n'), ((2960, 2991), 'json.loads', 'json.loads', (['bottle.request.json'], {}), '(bottle.request.json)\n', (2970, 2991), False, 'import json\n')]
|
import numpy as np
from ._Epsilon import Epsilon
class UCB1(Epsilon):
"""
Agente que soluciona el problema del el Bandido Multibrazo
(Multi-Armed Bandit) mediante el uso de una estrategia UCB1
Parámetros
----------
bandits : array of Bandit
Vector con los bandidos con los que se debe jugar
Métodos
-------
run :
Realiza una serie de tiradas con los bandidos seleccionados
por el algoritmo
update:
Actualiza los valores adicionales después de una tirada
select :
Selecciona un bandido para jugar en la próxima tirada
average_reward :
Obtención de la recompensa promedio
plot :
Representación gráfica del histórico de tiradas
References
----------
<NAME>, <NAME>, and <NAME>. "A survey of online
experiment design with the stochastic multi-armed bandit." arXiv preprint
arXiv:1510.00757 (2015).
"""
def select(self):
total = len(self._rewards)
if total < self._num_bandits:
bandit = total
else:
ucb = [0] * self._num_bandits
for i in range(self._num_bandits):
ucb[i] = self._mean[i] + np.sqrt(2 * np.log(total) / self._plays[i])
max_bandits = np.where(ucb == np.max(ucb))[0]
bandit = np.random.choice(max_bandits)
return bandit
class UCB2(Epsilon):
"""
Agente que soluciona el problema del el Bandido Multibrazo
(Multi-Armed Bandit) mediante el uso de una estrategia UCB2
Parámetros
----------
bandits : array of Bandit
Vector con los bandidos con los que se debe jugar
alpha : float
Parámetro que se influye en el ratio de aprendizaje del algoritmo
Métodos
-------
run :
Realiza una serie de tiradas con los bandidos seleccionados
por el algoritmo
update:
Actualiza los valores adicionales después de una tirada
select :
Selecciona un bandido para jugar en la próxima tirada
average_reward :
Obtención de la recompensa promedio
plot :
Representación gráfica del histórico de tiradas
References
----------
<NAME>, <NAME>, and <NAME>. "A survey of online
experiment design with the stochastic multi-armed bandit." arXiv preprint
arXiv:1510.00757 (2015).
"""
def __init__(self, bandits, alpha=0.1):
self.alpha = alpha
self._mean = [0] * len(bandits)
super(UCB2, self).__init__(bandits)
def select(self):
total = len(self._rewards)
if total == 0:
bandit = np.random.choice(self._num_bandits)
else:
ucb = [0] * num_bandits
for i in range(num_bandits):
try:
tau = int(np.ceil((1 + self.alpha) ** self._plays[i]))
if np.log(np.e * total / tau) > 0:
bonus = np.sqrt((1. + self.alpha) * np.log(np.e * total / tau) / (2 * tau))
else:
bonus = 0
except:
bonus = 0
if np.isnan(bonus):
ucb[i] = self._mean[i]
else:
ucb[i] = self._mean[i] + bonus
max_bandits = np.where(ucb == np.max(ucb))[0]
bandit = np.random.choice(max_bandits)
return bandit
class UCB1Tuned(Epsilon):
"""
Agente que soluciona el problema del el Bandido Multibrazo
(Multi-Armed Bandit) mediante el uso de una estrategia UCB1-Tuned
Parámetros
----------
bandits : array of Bandit
Vector con los bandidos con los que se debe jugar
Métodos
-------
run :
Realiza una serie de tiradas con los bandidos seleccionados
por el algoritmo
update:
Actualiza los valores adicionales después de una tirada
select :
Selecciona un bandido para jugar en la próxima tirada
average_reward :
Obtención de la recompensa promedio
plot :
Representación gráfica del histórico de tiradas
References
----------
<NAME>, <NAME>, and <NAME>. "A survey of online
experiment design with the stochastic multi-armed bandit." arXiv preprint
arXiv:1510.00757 (2015).
"""
def __init__(self, bandits):
self._mean2 = [0] * len(bandits)
super(UCB1Tuned, self).__init__(bandits)
def update(self, bandit, reward):
# Actualización de la media de los cuadrados
self._mean2[bandit] = (1 - 1.0/self._plays[bandit]) * self._mean2[bandit] \
+ 1.0/self._plays[bandit] * reward ** 2
def select(self):
total = len(self._rewards)
if total == 0:
bandit = np.random.choice(self._num_bandits)
else:
ucb = [0] * self._num_bandits
for i in range(self._num_bandits):
if self._plays[i] == 0:
v = self._mean2[i] - self._mean[i] ** 2 + np.sqrt(2 * np.log(total))
else:
v = self._mean2[i] - self._mean[i] ** 2 + np.sqrt(2 * np.log(total) / self._plays[i])
ucb[i] = self._mean[i] + np.sqrt(np.log(total) * np.min([1/4, v]))
max_bandits = np.where(ucb == np.max(ucb))[0]
bandit = np.random.choice(max_bandits)
return bandit
class UCBNormal(Epsilon):
"""
Agente que soluciona el problema del el Bandido Multibrazo
(Multi-Armed Bandit) mediante el uso de una estrategia UCB-Normal
Parámetros
----------
bandits : array of Bandit
Vector con los bandidos con los que se debe jugar
Métodos
-------
run :
Realiza una serie de tiradas con los bandidos seleccionados
por el algoritmo
update:
Actualiza los valores adicionales después de una tirada
select :
Selecciona un bandido para jugar en la próxima tirada
average_reward :
Obtención de la recompensa promedio
plot :
Representación gráfica del histórico de tiradas
References
----------
<NAME>, <NAME>, and <NAME>. "A survey of online
experiment design with the stochastic multi-armed bandit." arXiv preprint
arXiv:1510.00757 (2015).
"""
def __init__(self, bandits):
self._rewards2 = [0] * len(bandits)
super(UCBNormal, self).__init__(bandits)
def update(self, bandit, reward):
self._rewards2[bandit] += reward ** 2
def select(self):
total = len(self._rewards)
# Número de veces mínimo que debe jugar cada bandido
if total > 0:
min_plays = np.ceil(8 * np.log(total))
else:
min_plays = 1
# En caso de que algún bandido no jugase el mínimo de veces se selecciona ese
if np.any(np.array(self._plays) < min_plays):
min_bandit = np.where(np.array(self._plays) < min_plays)[0]
bandit = np.random.choice(min_bandit)
else:
ucb = [0] * self._num_bandits
for i in range(self._num_bandits):
if self._plays[i] > 1:
bonus = 16 * (self._rewards2[i] - self._plays[i] * self._mean[i]**2) / (self._plays[i] - 1)
bonus *= np.log(total - 1) / self._plays[i]
bonus = np.sqrt(bonus)
ucb[i] = self._mean[i] + bonus
else:
ucb[i] = self._mean[i]
max_bandits = np.where(ucb == np.max(ucb))[0]
bandit = np.random.choice(max_bandits)
return bandit
class UCBV(Epsilon):
"""
Agente que soluciona el problema del el Bandido Multibrazo
(Multi-Armed Bandit) mediante el uso de una estrategia UCBV
Parámetros
----------
bandits : array of Bandit
Vector con los bandidos con los que se debe jugar
b : float
Hiperparámetro para seleccionar el ration de aprendizaje
Métodos
-------
run :
Realiza una serie de tiradas con los bandidos seleccionados
por el algoritmo
update:
Actualiza los valores adicionales después de una tirada
select :
Selecciona un bandido para jugar en la próxima tirada
average_reward :
Obtención de la recompensa promedio
plot :
Representación gráfica del histórico de tiradas
References
----------
<NAME>, <NAME>, and <NAME>.
"Exploration-exploitation trade-off using variance estimates in multi-armed
bandits." Theoretical Computer Science, Volume 410, Issue 19, 28 April 2009,
Pages 1876-1902 (https://doi.org/10.1016/j.tcs.2009.01.016)
"""
def __init__(self, bandits, b=3):
self.b = b
self._mean2 = [0] * len(bandits)
super(UCBV, self).__init__(bandits)
def update(self, bandit, reward):
self._mean2[bandit] += reward**2
def select(self):
num_bandits = len(self.bandits)
total = len(self._rewards)
if total < num_bandits:
bandit = total
else:
ucb = [0] * num_bandits
for i in range(num_bandits):
var = self._mean2[i] / self._plays[i] - self._mean[i]**2
ucb[i] = self._mean[i]
ucb[i] += np.sqrt(2 * var * np.log(total) / self._plays[i])
ucb[i] += self.b * np.log(total) / self._plays[i]
max_bandits = np.where(ucb == np.max(ucb))[0]
bandit = np.random.choice(max_bandits)
|
[
"numpy.log",
"numpy.ceil",
"numpy.isnan",
"numpy.max",
"numpy.min",
"numpy.array",
"numpy.random.choice",
"numpy.sqrt"
] |
[((1368, 1397), 'numpy.random.choice', 'np.random.choice', (['max_bandits'], {}), '(max_bandits)\n', (1384, 1397), True, 'import numpy as np\n'), ((2717, 2752), 'numpy.random.choice', 'np.random.choice', (['self._num_bandits'], {}), '(self._num_bandits)\n', (2733, 2752), True, 'import numpy as np\n'), ((3484, 3513), 'numpy.random.choice', 'np.random.choice', (['max_bandits'], {}), '(max_bandits)\n', (3500, 3513), True, 'import numpy as np\n'), ((4978, 5013), 'numpy.random.choice', 'np.random.choice', (['self._num_bandits'], {}), '(self._num_bandits)\n', (4994, 5013), True, 'import numpy as np\n'), ((5571, 5600), 'numpy.random.choice', 'np.random.choice', (['max_bandits'], {}), '(max_bandits)\n', (5587, 5600), True, 'import numpy as np\n'), ((7279, 7307), 'numpy.random.choice', 'np.random.choice', (['min_bandit'], {}), '(min_bandit)\n', (7295, 7307), True, 'import numpy as np\n'), ((7898, 7927), 'numpy.random.choice', 'np.random.choice', (['max_bandits'], {}), '(max_bandits)\n', (7914, 7927), True, 'import numpy as np\n'), ((9943, 9972), 'numpy.random.choice', 'np.random.choice', (['max_bandits'], {}), '(max_bandits)\n', (9959, 9972), True, 'import numpy as np\n'), ((3262, 3277), 'numpy.isnan', 'np.isnan', (['bonus'], {}), '(bonus)\n', (3270, 3277), True, 'import numpy as np\n'), ((7150, 7171), 'numpy.array', 'np.array', (['self._plays'], {}), '(self._plays)\n', (7158, 7171), True, 'import numpy as np\n'), ((6982, 6995), 'numpy.log', 'np.log', (['total'], {}), '(total)\n', (6988, 6995), True, 'import numpy as np\n'), ((7667, 7681), 'numpy.sqrt', 'np.sqrt', (['bonus'], {}), '(bonus)\n', (7674, 7681), True, 'import numpy as np\n'), ((1331, 1342), 'numpy.max', 'np.max', (['ucb'], {}), '(ucb)\n', (1337, 1342), True, 'import numpy as np\n'), ((2908, 2951), 'numpy.ceil', 'np.ceil', (['((1 + self.alpha) ** self._plays[i])'], {}), '((1 + self.alpha) ** self._plays[i])\n', (2915, 2951), True, 'import numpy as np\n'), ((2976, 3002), 'numpy.log', 'np.log', (['(np.e * total / tau)'], {}), '(np.e * total / tau)\n', (2982, 3002), True, 'import numpy as np\n'), ((3447, 3458), 'numpy.max', 'np.max', (['ucb'], {}), '(ucb)\n', (3453, 3458), True, 'import numpy as np\n'), ((5534, 5545), 'numpy.max', 'np.max', (['ucb'], {}), '(ucb)\n', (5540, 5545), True, 'import numpy as np\n'), ((7220, 7241), 'numpy.array', 'np.array', (['self._plays'], {}), '(self._plays)\n', (7228, 7241), True, 'import numpy as np\n'), ((7604, 7621), 'numpy.log', 'np.log', (['(total - 1)'], {}), '(total - 1)\n', (7610, 7621), True, 'import numpy as np\n'), ((7861, 7872), 'numpy.max', 'np.max', (['ucb'], {}), '(ucb)\n', (7867, 7872), True, 'import numpy as np\n'), ((9807, 9820), 'numpy.log', 'np.log', (['total'], {}), '(total)\n', (9813, 9820), True, 'import numpy as np\n'), ((9906, 9917), 'numpy.max', 'np.max', (['ucb'], {}), '(ucb)\n', (9912, 9917), True, 'import numpy as np\n'), ((5445, 5458), 'numpy.log', 'np.log', (['total'], {}), '(total)\n', (5451, 5458), True, 'import numpy as np\n'), ((5461, 5479), 'numpy.min', 'np.min', (['[1 / 4, v]'], {}), '([1 / 4, v])\n', (5467, 5479), True, 'import numpy as np\n'), ((9740, 9753), 'numpy.log', 'np.log', (['total'], {}), '(total)\n', (9746, 9753), True, 'import numpy as np\n'), ((1248, 1261), 'numpy.log', 'np.log', (['total'], {}), '(total)\n', (1254, 1261), True, 'import numpy as np\n'), ((5244, 5257), 'numpy.log', 'np.log', (['total'], {}), '(total)\n', (5250, 5257), True, 'import numpy as np\n'), ((3068, 3094), 'numpy.log', 'np.log', (['(np.e * total / tau)'], {}), '(np.e * total / tau)\n', (3074, 3094), True, 'import numpy as np\n'), ((5355, 5368), 'numpy.log', 'np.log', (['total'], {}), '(total)\n', (5361, 5368), True, 'import numpy as np\n')]
|
from pathlib import Path
import platform
DB_NAME = "ucity"
if platform.system() == "Darwin":
GDRIVE_FOLDER = Path("/Volumes/GoogleDrive/Shared drives/U_City_FY_21")
elif platform.system() == "Windows":
GDRIVE_FOLDER = Path(r"G:\Shared drives\U_City_FY_21")
|
[
"platform.system",
"pathlib.Path"
] |
[((64, 81), 'platform.system', 'platform.system', ([], {}), '()\n', (79, 81), False, 'import platform\n'), ((115, 170), 'pathlib.Path', 'Path', (['"""/Volumes/GoogleDrive/Shared drives/U_City_FY_21"""'], {}), "('/Volumes/GoogleDrive/Shared drives/U_City_FY_21')\n", (119, 170), False, 'from pathlib import Path\n'), ((177, 194), 'platform.system', 'platform.system', ([], {}), '()\n', (192, 194), False, 'import platform\n'), ((229, 268), 'pathlib.Path', 'Path', (['"""G:\\\\Shared drives\\\\U_City_FY_21"""'], {}), "('G:\\\\Shared drives\\\\U_City_FY_21')\n", (233, 268), False, 'from pathlib import Path\n')]
|
from blackduck import Client
import argparse
import logging
from pprint import pprint
logging.basicConfig(
level=logging.DEBUG,
format="[%(asctime)s] {%(module)s:%(lineno)d} %(levelname)s - %(message)s"
)
parser = argparse.ArgumentParser("Quickstart demonstration with Client")
parser.add_argument("--base-url", required=True, help="Hub server URL e.g. https://your.blackduck.url")
parser.add_argument("--token-file", dest='token_file', required=True, help="containing access token")
parser.add_argument("--no-verify", dest='verify', action='store_false', help="disable TLS certificate verification")
args = parser.parse_args()
with open(args.token_file, 'r') as tf:
access_token = tf.readline().strip()
bd = Client(
base_url=args.base_url,
token=access_token,
verify=args.verify
)
for project in bd.get_resource('projects'):
print(f"Project: {project['name']}")
print("Project list_resources():")
pprint(bd.list_resources(project))
for version in bd.get_resource('versions', project):
print(f"Version: {version['versionName']}")
for bom_component in bd.get_resource('components', version):
print(f"BOM component: {bom_component['componentName']}:{bom_component['componentVersionName']}")
# print(f"Version list_resources():")
# pprint(bd.list_resources(version))
# print("Exiting after printing first project and version")
# quit(0)
|
[
"blackduck.Client",
"argparse.ArgumentParser",
"logging.basicConfig"
] |
[((88, 209), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': '"""[%(asctime)s] {%(module)s:%(lineno)d} %(levelname)s - %(message)s"""'}), "(level=logging.DEBUG, format=\n '[%(asctime)s] {%(module)s:%(lineno)d} %(levelname)s - %(message)s')\n", (107, 209), False, 'import logging\n'), ((225, 288), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""Quickstart demonstration with Client"""'], {}), "('Quickstart demonstration with Client')\n", (248, 288), False, 'import argparse\n'), ((726, 796), 'blackduck.Client', 'Client', ([], {'base_url': 'args.base_url', 'token': 'access_token', 'verify': 'args.verify'}), '(base_url=args.base_url, token=access_token, verify=args.verify)\n', (732, 796), False, 'from blackduck import Client\n')]
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2018, <NAME> <<EMAIL>>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: scaleway_volume_facts
deprecated:
removed_in: '2.13'
why: Deprecated in favour of C(_info) module.
alternative: Use M(scaleway_volume_info) instead.
short_description: Gather facts about the Scaleway volumes available.
description:
- Gather facts about the Scaleway volumes available.
version_added: "2.7"
author:
- "<NAME> (@Spredzy)"
- "<NAME> (@sieben)"
extends_documentation_fragment: scaleway
options:
region:
version_added: "2.8"
description:
- Scaleway region to use (for example par1).
required: true
choices:
- ams1
- EMEA-NL-EVS
- par1
- EMEA-FR-PAR1
'''
EXAMPLES = r'''
- name: Gather Scaleway volumes facts
scaleway_volume_facts:
region: par1
'''
RETURN = r'''
---
scaleway_volume_facts:
description: Response from Scaleway API
returned: success
type: complex
sample:
"scaleway_volume_facts": [
{
"creation_date": "2018-08-14T20:56:24.949660+00:00",
"export_uri": null,
"id": "b8d51a06-daeb-4fef-9539-a8aea016c1ba",
"modification_date": "2018-08-14T20:56:24.949660+00:00",
"name": "test-volume",
"organization": "3f709602-5e6c-4619-b80c-e841c89734af",
"server": null,
"size": 50000000000,
"state": "available",
"volume_type": "l_ssd"
}
]
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.scaleway import (
Scaleway, ScalewayException, scaleway_argument_spec,
SCALEWAY_LOCATION)
class ScalewayVolumeFacts(Scaleway):
def __init__(self, module):
super(ScalewayVolumeFacts, self).__init__(module)
self.name = 'volumes'
region = module.params["region"]
self.module.params['api_url'] = SCALEWAY_LOCATION[region]["api_endpoint"]
def main():
argument_spec = scaleway_argument_spec()
argument_spec.update(dict(
region=dict(required=True, choices=SCALEWAY_LOCATION.keys()),
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
try:
module.exit_json(
ansible_facts={'scaleway_volume_facts': ScalewayVolumeFacts(module).get_resources()}
)
except ScalewayException as exc:
module.fail_json(msg=exc.message)
if __name__ == '__main__':
main()
|
[
"ansible.module_utils.scaleway.scaleway_argument_spec",
"ansible.module_utils.basic.AnsibleModule",
"ansible.module_utils.scaleway.SCALEWAY_LOCATION.keys"
] |
[((2301, 2325), 'ansible.module_utils.scaleway.scaleway_argument_spec', 'scaleway_argument_spec', ([], {}), '()\n', (2323, 2325), False, 'from ansible.module_utils.scaleway import Scaleway, ScalewayException, scaleway_argument_spec, SCALEWAY_LOCATION\n'), ((2448, 2516), 'ansible.module_utils.basic.AnsibleModule', 'AnsibleModule', ([], {'argument_spec': 'argument_spec', 'supports_check_mode': '(True)'}), '(argument_spec=argument_spec, supports_check_mode=True)\n', (2461, 2516), False, 'from ansible.module_utils.basic import AnsibleModule\n'), ((2400, 2424), 'ansible.module_utils.scaleway.SCALEWAY_LOCATION.keys', 'SCALEWAY_LOCATION.keys', ([], {}), '()\n', (2422, 2424), False, 'from ansible.module_utils.scaleway import Scaleway, ScalewayException, scaleway_argument_spec, SCALEWAY_LOCATION\n')]
|
from test.integration.base import DBTIntegrationTest, FakeArgs, use_profile
import json
class TestChangingPartitions(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "partition-models"
def run_changes(self, before, after):
results = self.run_dbt(['run', '--vars', json.dumps(before)])
self.assertEqual(len(results), 1)
results = self.run_dbt(['run', '--vars', json.dumps(after)])
self.assertEqual(len(results), 1)
def test_partitions(self, expected):
test_results = self.run_dbt(['test', '--vars', json.dumps(expected)])
for result in test_results:
self.assertEqual(result.status, 'pass')
self.assertFalse(result.skipped)
self.assertEqual(int(result.message), 0)
@use_profile('bigquery')
def test_bigquery_add_partition(self):
before = {"partition_by": None, "cluster_by": None}
after = {"partition_by": {'field': 'cur_time',
'data_type': 'timestamp'}, "cluster_by": None}
self.run_changes(before, after)
self.test_partitions({"expected": 1})
@use_profile('bigquery')
def test_bigquery_add_partition_year(self):
before = {"partition_by": None, "cluster_by": None}
after = {"partition_by": {'field': 'cur_time', 'data_type': 'timestamp', 'granularity': 'year'}, "cluster_by": None}
self.run_changes(before, after)
self.test_partitions({"expected": 1})
@use_profile('bigquery')
def test_bigquery_add_partition_month(self):
before = {"partition_by": None, "cluster_by": None}
after = {"partition_by": {'field': 'cur_time', 'data_type': 'timestamp', 'granularity': 'month'}, "cluster_by": None}
self.run_changes(before, after)
self.test_partitions({"expected": 1})
@use_profile('bigquery')
def test_bigquery_add_partition_hour(self):
before = {"partition_by": None, "cluster_by": None}
after = {"partition_by": {'field': 'cur_time', 'data_type': 'timestamp', 'granularity': 'hour'}, "cluster_by": None}
self.run_changes(before, after)
self.test_partitions({"expected": 1})
@use_profile('bigquery')
def test_bigquery_remove_partition(self):
before = {"partition_by": {'field': 'cur_time',
'data_type': 'timestamp'}, "cluster_by": None}
after = {"partition_by": None, "cluster_by": None}
self.run_changes(before, after)
@use_profile('bigquery')
def test_bigquery_change_partitions(self):
before = {"partition_by": {'field': 'cur_time',
'data_type': 'timestamp'}, "cluster_by": None}
after = {"partition_by": {'field': "cur_date"}, "cluster_by": None}
self.run_changes(before, after)
self.test_partitions({"expected": 1})
self.run_changes(after, before)
self.test_partitions({"expected": 1})
@use_profile('bigquery')
def test_bigquery_change_partitions_from_int(self):
before = {"partition_by": {"field": "id", "data_type": "int64", "range": {
"start": 0, "end": 10, "interval": 1}}, "cluster_by": None}
after = {"partition_by": {"field": "cur_date",
"data_type": "date"}, "cluster_by": None}
self.run_changes(before, after)
self.test_partitions({"expected": 1})
self.run_changes(after, before)
self.test_partitions({"expected": 2})
@use_profile('bigquery')
def test_bigquery_add_clustering(self):
before = {"partition_by": {'field': 'cur_time',
'data_type': 'timestamp'}, "cluster_by": None}
after = {"partition_by": {'field': "cur_date"}, "cluster_by": "id"}
self.run_changes(before, after)
@use_profile('bigquery')
def test_bigquery_remove_clustering(self):
before = {"partition_by": {'field': 'cur_time',
'data_type': 'timestamp'}, "cluster_by": "id"}
after = {"partition_by": {'field': "cur_date"}, "cluster_by": None}
self.run_changes(before, after)
@use_profile('bigquery')
def test_bigquery_change_clustering(self):
before = {"partition_by": {'field': 'cur_time',
'data_type': 'timestamp'}, "cluster_by": "id"}
after = {"partition_by": {'field': "cur_date"}, "cluster_by": "name"}
self.run_changes(before, after)
@use_profile('bigquery')
def test_bigquery_change_clustering_strict(self):
before = {'partition_by': {'field': 'cur_time',
'data_type': 'timestamp'}, 'cluster_by': 'id'}
after = {'partition_by': {'field': 'cur_date',
'data_type': 'date'}, 'cluster_by': 'name'}
self.run_changes(before, after)
|
[
"test.integration.base.use_profile",
"json.dumps"
] |
[((863, 886), 'test.integration.base.use_profile', 'use_profile', (['"""bigquery"""'], {}), "('bigquery')\n", (874, 886), False, 'from test.integration.base import DBTIntegrationTest, FakeArgs, use_profile\n'), ((1218, 1241), 'test.integration.base.use_profile', 'use_profile', (['"""bigquery"""'], {}), "('bigquery')\n", (1229, 1241), False, 'from test.integration.base import DBTIntegrationTest, FakeArgs, use_profile\n'), ((1567, 1590), 'test.integration.base.use_profile', 'use_profile', (['"""bigquery"""'], {}), "('bigquery')\n", (1578, 1590), False, 'from test.integration.base import DBTIntegrationTest, FakeArgs, use_profile\n'), ((1918, 1941), 'test.integration.base.use_profile', 'use_profile', (['"""bigquery"""'], {}), "('bigquery')\n", (1929, 1941), False, 'from test.integration.base import DBTIntegrationTest, FakeArgs, use_profile\n'), ((2267, 2290), 'test.integration.base.use_profile', 'use_profile', (['"""bigquery"""'], {}), "('bigquery')\n", (2278, 2290), False, 'from test.integration.base import DBTIntegrationTest, FakeArgs, use_profile\n'), ((2580, 2603), 'test.integration.base.use_profile', 'use_profile', (['"""bigquery"""'], {}), "('bigquery')\n", (2591, 2603), False, 'from test.integration.base import DBTIntegrationTest, FakeArgs, use_profile\n'), ((3043, 3066), 'test.integration.base.use_profile', 'use_profile', (['"""bigquery"""'], {}), "('bigquery')\n", (3054, 3066), False, 'from test.integration.base import DBTIntegrationTest, FakeArgs, use_profile\n'), ((3587, 3610), 'test.integration.base.use_profile', 'use_profile', (['"""bigquery"""'], {}), "('bigquery')\n", (3598, 3610), False, 'from test.integration.base import DBTIntegrationTest, FakeArgs, use_profile\n'), ((3915, 3938), 'test.integration.base.use_profile', 'use_profile', (['"""bigquery"""'], {}), "('bigquery')\n", (3926, 3938), False, 'from test.integration.base import DBTIntegrationTest, FakeArgs, use_profile\n'), ((4246, 4269), 'test.integration.base.use_profile', 'use_profile', (['"""bigquery"""'], {}), "('bigquery')\n", (4257, 4269), False, 'from test.integration.base import DBTIntegrationTest, FakeArgs, use_profile\n'), ((4579, 4602), 'test.integration.base.use_profile', 'use_profile', (['"""bigquery"""'], {}), "('bigquery')\n", (4590, 4602), False, 'from test.integration.base import DBTIntegrationTest, FakeArgs, use_profile\n'), ((375, 393), 'json.dumps', 'json.dumps', (['before'], {}), '(before)\n', (385, 393), False, 'import json\n'), ((488, 505), 'json.dumps', 'json.dumps', (['after'], {}), '(after)\n', (498, 505), False, 'import json\n'), ((647, 667), 'json.dumps', 'json.dumps', (['expected'], {}), '(expected)\n', (657, 667), False, 'import json\n')]
|
import pyb
print("Executing main.py")
led = pyb.LED(1)
led.on()
pyb.delay(100)
led.off()
pyb.delay(100)
led.on()
pyb.delay(100)
led.off()
|
[
"pyb.delay",
"pyb.LED"
] |
[((46, 56), 'pyb.LED', 'pyb.LED', (['(1)'], {}), '(1)\n', (53, 56), False, 'import pyb\n'), ((67, 81), 'pyb.delay', 'pyb.delay', (['(100)'], {}), '(100)\n', (76, 81), False, 'import pyb\n'), ((92, 106), 'pyb.delay', 'pyb.delay', (['(100)'], {}), '(100)\n', (101, 106), False, 'import pyb\n'), ((116, 130), 'pyb.delay', 'pyb.delay', (['(100)'], {}), '(100)\n', (125, 130), False, 'import pyb\n')]
|
# Generated by Django 3.1.4 on 2021-01-27 23:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('book', '0003_book_publisher'),
]
operations = [
migrations.AlterModelOptions(
name='author',
options={'ordering': ['firstname', 'lastname'], 'verbose_name': 'autor', 'verbose_name_plural': 'autorzy'},
),
migrations.AlterModelOptions(
name='book',
options={'ordering': ['title'], 'verbose_name': 'książka', 'verbose_name_plural': 'książki'},
),
migrations.AlterModelOptions(
name='publisher',
options={'ordering': ['name'], 'verbose_name': 'wydawca', 'verbose_name_plural': 'wydawcy'},
),
migrations.AlterField(
model_name='publisher',
name='name',
field=models.CharField(max_length=50, unique=True),
),
]
|
[
"django.db.models.CharField",
"django.db.migrations.AlterModelOptions"
] |
[((228, 388), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""author"""', 'options': "{'ordering': ['firstname', 'lastname'], 'verbose_name': 'autor',\n 'verbose_name_plural': 'autorzy'}"}), "(name='author', options={'ordering': [\n 'firstname', 'lastname'], 'verbose_name': 'autor',\n 'verbose_name_plural': 'autorzy'})\n", (256, 388), False, 'from django.db import migrations, models\n'), ((424, 563), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""book"""', 'options': "{'ordering': ['title'], 'verbose_name': 'książka', 'verbose_name_plural':\n 'książki'}"}), "(name='book', options={'ordering': ['title'],\n 'verbose_name': 'książka', 'verbose_name_plural': 'książki'})\n", (452, 563), False, 'from django.db import migrations, models\n'), ((604, 748), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""publisher"""', 'options': "{'ordering': ['name'], 'verbose_name': 'wydawca', 'verbose_name_plural':\n 'wydawcy'}"}), "(name='publisher', options={'ordering': ['name'\n ], 'verbose_name': 'wydawca', 'verbose_name_plural': 'wydawcy'})\n", (632, 748), False, 'from django.db import migrations, models\n'), ((890, 934), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'unique': '(True)'}), '(max_length=50, unique=True)\n', (906, 934), False, 'from django.db import migrations, models\n')]
|
import time
import doctest
import unittest
from examples.lists.models import List, Item
from django.test import TestCase
class GenericTestCase(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
# @unittest.skip("Some reason. If you are reading this in a test run someone did not fill this in.")
def test_doctests_standin(self):
# This code just contains the old doctests for this module. They should be most likely split out into their own
# tests at some point.
self.l = List.objects.create(name='To Do')
# create a couple items using the default position
result = self.l.items.create(name='Write Tests').name
expected_result = 'Write Tests'
self.assertEqual(result, expected_result)
result = list(self.l.items.values_list('name', 'position'))
expected_result = [(u'Write Tests', 0)]
self.assertEqual(result, expected_result)
result = self.l.items.create(name='Exercise').name
expected_result = 'Exercise'
self.assertEqual(result, expected_result)
result = list(self.l.items.values_list('name', 'position').order_by('position'))
expected_result = [(u'Write Tests', 0), (u'Exercise', 1)]
self.assertEqual(result, expected_result)
# create an item with an explicit position
result = self.l.items.create(name='Learn to spell Exercise', position=0).name
expected_result = 'Learn to spell Exercise'
self.assertEqual(result, expected_result)
result = list(self.l.items.values_list('name', 'position').order_by('position'))
expected_result = [(u'Learn to spell Exercise', 0), (u'Write Tests', 1), (u'Exercise', 2)]
self.assertEqual(result, expected_result)
# save an item without changing it's position
self.exercise = self.l.items.order_by('-position')[0]
self.exercise.name = 'Exercise'
self.exercise.save()
result = list(self.l.items.values_list('name', 'position').order_by('position'))
expected_result = [(u'Learn to spell Exercise', 0), (u'Write Tests', 1), (u'Exercise', 2)]
self.assertEqual(result, expected_result)
# delete an item
self.learn_to_spell = self.l.items.order_by('position')[0]
self.learn_to_spell.delete()
result = list(self.l.items.values_list('name', 'position').order_by('position'))
expected_result = [(u'Write Tests', 0), (u'Exercise', 1)]
self.assertEqual(result, expected_result)
# create a couple more items
result = self.l.items.create(name='Drink less Coke').name
expected_result = 'Drink less Coke'
self.assertEqual(result, expected_result)
result = self.l.items.create(name='Go to Bed').name
expected_result = 'Go to Bed'
self.assertEqual(result, expected_result)
result = list(self.l.items.values_list('name', 'position').order_by('position'))
expected_result = [(u'Write Tests', 0), (u'Exercise', 1), (u'Drink less Coke', 2), (u'Go to Bed', 3)]
self.assertEqual(result, expected_result)
# move item to end using None
self.write_tests = self.l.items.order_by('position')[0]
self.write_tests.position = None
self.write_tests.save()
result = list(self.l.items.values_list('name', 'position').order_by('position'))
expected_result = [(u'Exercise', 0), (u'Drink less Coke', 1), (u'Go to Bed', 2), (u'Write Tests', 3)]
self.assertEqual(result, expected_result)
# move item using negative index
self.write_tests.position = -3
self.write_tests.save()
result = list(self.l.items.values_list('name', 'position').order_by('position'))
expected_result = [(u'Exercise', 0), (u'Write Tests', 1), (u'Drink less Coke', 2), (u'Go to Bed', 3)]
self.assertEqual(result, expected_result)
# move item to position
self.write_tests.position = 2
self.write_tests.save()
result = list(self.l.items.values_list('name', 'position').order_by('position'))
expected_result = [(u'Exercise', 0), (u'Drink less Coke', 1), (u'Write Tests', 2), (u'Go to Bed', 3)]
self.assertEqual(result, expected_result)
# move item to beginning
self.sleep = self.l.items.order_by('-position')[0]
self.sleep.position = 0
self.sleep.save()
result = list(self.l.items.values_list('name', 'position').order_by('position'))
expected_result = [(u'Go to Bed', 0), (u'Exercise', 1), (u'Drink less Coke', 2), (u'Write Tests', 3)]
self.assertEqual(result, expected_result)
# check auto_now updates
time.sleep(1) # sleep to guarantee updated time increases
sleep_updated, exercise_updated, eat_better_updated, write_tests_updated = [i.updated for i in self.l.items.order_by('position')]
self.eat_better = self.l.items.order_by('-position')[1]
self.eat_better.position = 1
self.eat_better.save()
self.todo_list = list(self.l.items.order_by('position'))
self.assertEqual(sleep_updated, self.todo_list[0].updated)
self.assertLessEqual(eat_better_updated, self.todo_list[1].updated)
self.assertLessEqual(exercise_updated, self.todo_list[2].updated)
# create an item using negative index
# http://github.com/jpwatts/django-positions/issues/#issue/5
result = list(self.l.items.values_list('name', 'position').order_by('position'))
expected_result = [(u'Go to Bed', 0), (u'Drink less Coke', 1), (u'Exercise', 2), (u'Write Tests', 3)]
self.assertEqual(result, expected_result)
self.fix_issue_5 = Item(list=self.l, name="Fix Issue #5")
result = self.fix_issue_5.position
expected_result = -1
self.assertEqual(result, expected_result)
self.fix_issue_5.position = -2
result = self.fix_issue_5.position
expected_result = -2
self.assertEqual(result, expected_result)
self.fix_issue_5.save()
result = self.fix_issue_5.position
expected_result = 3
self.assertEqual(result, expected_result)
result = list(self.l.items.values_list('name', 'position').order_by('position'))
expected_result = [(u'Go to Bed', 0), (u'Drink less Coke', 1), (u'Exercise', 2), (u'Fix Issue #5', 3), (u'Write Tests', 4)]
self.assertEqual(result, expected_result)
# Try again, now that the model has been saved.
self.fix_issue_5.position = -2
self.fix_issue_5.save()
result = self.fix_issue_5.position
expected_result = 3
self.assertEqual(result, expected_result)
result = list(self.l.items.values_list('name', 'position').order_by('position'))
expected_result = [(u'Go to Bed', 0), (u'Drink less Coke', 1), (u'Exercise', 2), (u'Fix Issue #5', 3), (u'Write Tests', 4)]
self.assertEqual(result, expected_result)
# create an item using with a position of zero
# http://github.com/jpwatts/django-positions/issues#issue/7
self.item0 = self.l.items.create(name="Fix Issue #7", position=0)
result = self.item0.position
expected_result = 0
self.assertEqual(result, expected_result)
result = list(self.l.items.values_list('name', 'position').order_by('position'))
expected_result = [(u'Fix Issue #7', 0), (u'Go to Bed', 1), (u'Drink less Coke', 2), (u'Exercise', 3), (u'Fix Issue #5', 4), (u'Write Tests', 5)]
self.assertEqual(result, expected_result)
|
[
"examples.lists.models.Item",
"examples.lists.models.List.objects.create",
"time.sleep"
] |
[((540, 573), 'examples.lists.models.List.objects.create', 'List.objects.create', ([], {'name': '"""To Do"""'}), "(name='To Do')\n", (559, 573), False, 'from examples.lists.models import List, Item\n'), ((4726, 4739), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (4736, 4739), False, 'import time\n'), ((5733, 5771), 'examples.lists.models.Item', 'Item', ([], {'list': 'self.l', 'name': '"""Fix Issue #5"""'}), "(list=self.l, name='Fix Issue #5')\n", (5737, 5771), False, 'from examples.lists.models import List, Item\n')]
|
# coding=utf-8
"""
Copyright 2016 Load Impact
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Without this the config will prompt for a token
import os
os.environ['LOADIMPACT_API_V3_TOKEN'] = 'token'
import unittest
from collections import namedtuple
from click.testing import CliRunner
from loadimpactcli import datastore_commands
try:
from unittest.mock import MagicMock
except ImportError:
from mock import MagicMock
class TestDataStores(unittest.TestCase):
def setUp(self):
self.runner = CliRunner()
DataStore = namedtuple('DataStore', ['id', 'name', 'status', 'public_url'])
self.datastore1 = DataStore(1, u'First datastore', 'status1', 'www.example.com')
self.datastore2 = DataStore(2, u'Second datastore', 'status2', 'www.example.com')
self.datastore3 = DataStore(3, u'ÅÄÖåäö', 'status3', 'www.example.com')
def test_download_csv(self):
client = datastore_commands.client
datastore_commands._download_csv = MagicMock()
client.get_data_store = MagicMock(return_value=self.datastore1)
result = self.runner.invoke(datastore_commands.download_csv, ['1'])
assert result.exit_code == 0
assert result.output == "Downloading CSV file, please wait.\nFinished download.\n"
def test_download_csv_no_params(self):
result = self.runner.invoke(datastore_commands.download_csv, [])
assert result.exit_code == 2
def test_list_datastore(self):
client = datastore_commands.client
client.DEFAULT_PROJECT = 1
client.list_data_stores = MagicMock(return_value=[self.datastore1, self.datastore2])
result = self.runner.invoke(datastore_commands.list_datastore, ['--project_id', '1'])
assert result.exit_code == 0
assert result.output == u"ID:\tNAME:\n1\tFirst datastore\n2\tSecond datastore\n"
def test_list_datastore_non_ascii_name(self):
client = datastore_commands.client
client.DEFAULT_PROJECT = 1
client.list_data_stores = MagicMock(return_value=[self.datastore1, self.datastore3])
result = self.runner.invoke(datastore_commands.list_datastore, ['--project_id', '1'])
assert result.exit_code == 0
assert result.output == u"ID:\tNAME:\n1\tFirst datastore\n3\tÅÄÖåäö\n"
def test_list_datastore_missing_project_id(self):
client = datastore_commands.client
client.list_data_stores = MagicMock(return_value=[self.datastore1, self.datastore2])
result = self.runner.invoke(datastore_commands.list_datastore, [])
assert result.exit_code == 0
assert result.output == "You need to provide a project id.\n"
def test_create_datastore(self):
client = datastore_commands.client
client.DEFAULT_PROJECT = 1
client.create_data_store = MagicMock(return_value=self.datastore1)
datastore_commands._wait_for_conversion = MagicMock(return_value=self.datastore1)
result = self.runner.invoke(datastore_commands.create_datastore, ['NewDatastore',
'tests/script',
'--project_id',
'1'])
assert result.exit_code == 0
assert result.output == "{0}\n".format("Data store conversion completed with status 'unknown'")
def test_create_datastore_missing_params(self):
client = datastore_commands.client
client.DEFAULT_PROJECT = 1
client.create_data_store = MagicMock(return_value=self.datastore1)
datastore_commands._wait_for_conversion = MagicMock(return_value=self.datastore1)
result = self.runner.invoke(datastore_commands.create_datastore, ['tests/script'])
assert result.exit_code == 2
def test_update_datastore(self):
client = datastore_commands.client
client.DEFAULT_PROJECT = 1
client.get_data_store = MagicMock(return_value=self.datastore2)
client.update_data_store = MagicMock(return_value=self.datastore2)
datastore_commands._wait_for_conversion = MagicMock(return_value=self.datastore2)
result = self.runner.invoke(datastore_commands.update_datastore, ['1',
'tests/script',
'--name',
'New name',
'--project_id',
'1'])
assert result.exit_code == 0
assert result.output == "{0}\n".format("Data store conversion completed with status 'unknown'")
def test_update_datastore_missing_params(self):
client = datastore_commands.client
client.DEFAULT_PROJECT = 1
client.get_data_store = MagicMock(return_value=self.datastore2)
client.update_data_store = MagicMock(return_value=self.datastore2)
datastore_commands._wait_for_conversion = MagicMock(return_value=self.datastore2)
result = self.runner.invoke(datastore_commands.update_datastore, ['1'])
assert result.exit_code == 2
def test_delete_datastore(self):
datastore_commands.delete_store = MagicMock(return_value="Datastore1")
result = self.runner.invoke(datastore_commands.delete_datastore, ['1', '--yes'])
assert result.exit_code == 0
assert result.output == 'Datastore1\n'
def test_delete_datastore_no_params(self):
result = self.runner.invoke(datastore_commands.delete_datastore, [])
assert result.exit_code == 2
|
[
"click.testing.CliRunner",
"collections.namedtuple",
"mock.MagicMock"
] |
[((1002, 1013), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (1011, 1013), False, 'from click.testing import CliRunner\n'), ((1034, 1097), 'collections.namedtuple', 'namedtuple', (['"""DataStore"""', "['id', 'name', 'status', 'public_url']"], {}), "('DataStore', ['id', 'name', 'status', 'public_url'])\n", (1044, 1097), False, 'from collections import namedtuple\n'), ((1477, 1488), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1486, 1488), False, 'from mock import MagicMock\n'), ((1521, 1560), 'mock.MagicMock', 'MagicMock', ([], {'return_value': 'self.datastore1'}), '(return_value=self.datastore1)\n', (1530, 1560), False, 'from mock import MagicMock\n'), ((2067, 2125), 'mock.MagicMock', 'MagicMock', ([], {'return_value': '[self.datastore1, self.datastore2]'}), '(return_value=[self.datastore1, self.datastore2])\n', (2076, 2125), False, 'from mock import MagicMock\n'), ((2510, 2568), 'mock.MagicMock', 'MagicMock', ([], {'return_value': '[self.datastore1, self.datastore3]'}), '(return_value=[self.datastore1, self.datastore3])\n', (2519, 2568), False, 'from mock import MagicMock\n'), ((2912, 2970), 'mock.MagicMock', 'MagicMock', ([], {'return_value': '[self.datastore1, self.datastore2]'}), '(return_value=[self.datastore1, self.datastore2])\n', (2921, 2970), False, 'from mock import MagicMock\n'), ((3305, 3344), 'mock.MagicMock', 'MagicMock', ([], {'return_value': 'self.datastore1'}), '(return_value=self.datastore1)\n', (3314, 3344), False, 'from mock import MagicMock\n'), ((3395, 3434), 'mock.MagicMock', 'MagicMock', ([], {'return_value': 'self.datastore1'}), '(return_value=self.datastore1)\n', (3404, 3434), False, 'from mock import MagicMock\n'), ((4093, 4132), 'mock.MagicMock', 'MagicMock', ([], {'return_value': 'self.datastore1'}), '(return_value=self.datastore1)\n', (4102, 4132), False, 'from mock import MagicMock\n'), ((4183, 4222), 'mock.MagicMock', 'MagicMock', ([], {'return_value': 'self.datastore1'}), '(return_value=self.datastore1)\n', (4192, 4222), False, 'from mock import MagicMock\n'), ((4500, 4539), 'mock.MagicMock', 'MagicMock', ([], {'return_value': 'self.datastore2'}), '(return_value=self.datastore2)\n', (4509, 4539), False, 'from mock import MagicMock\n'), ((4575, 4614), 'mock.MagicMock', 'MagicMock', ([], {'return_value': 'self.datastore2'}), '(return_value=self.datastore2)\n', (4584, 4614), False, 'from mock import MagicMock\n'), ((4665, 4704), 'mock.MagicMock', 'MagicMock', ([], {'return_value': 'self.datastore2'}), '(return_value=self.datastore2)\n', (4674, 4704), False, 'from mock import MagicMock\n'), ((5520, 5559), 'mock.MagicMock', 'MagicMock', ([], {'return_value': 'self.datastore2'}), '(return_value=self.datastore2)\n', (5529, 5559), False, 'from mock import MagicMock\n'), ((5595, 5634), 'mock.MagicMock', 'MagicMock', ([], {'return_value': 'self.datastore2'}), '(return_value=self.datastore2)\n', (5604, 5634), False, 'from mock import MagicMock\n'), ((5685, 5724), 'mock.MagicMock', 'MagicMock', ([], {'return_value': 'self.datastore2'}), '(return_value=self.datastore2)\n', (5694, 5724), False, 'from mock import MagicMock\n'), ((5923, 5959), 'mock.MagicMock', 'MagicMock', ([], {'return_value': '"""Datastore1"""'}), "(return_value='Datastore1')\n", (5932, 5959), False, 'from mock import MagicMock\n')]
|
import argparse
import asyncio
import logging
from pathlib import Path
import os
import signal
import sys
import aiohttp
import aiosqlite
import lissandra
from client import APIClient
# Test
lissandra.exiting = False
def main():
try:
parser = argparse.ArgumentParser(
description="Retrieves full match and summoner info.")
parser.add_argument(
"-k", "--key",
help="developer or production key provided by Riot", nargs=1)
parser.add_argument(
"-v", "--verbose",
help="verbose (debug) output", action="store_true", default=False)
except argparse.ArgumentError as e:
logging.error("Invalid argument: {0}.".format(e.message))
sys.exit(2)
args = parser.parse_args()
# Sets logging
if args.verbose:
logging.basicConfig(
format='%(asctime)s: %(message)s',
level=logging.DEBUG, datefmt='%d/%m/%Y %H:%M:%S')
else:
logging.basicConfig(
format='%(asctime)s: %(message)s',
level=logging.INFO, datefmt='%d/%m/%Y %H:%M:%S')
logging.getLogger("aiosqlite").setLevel(logging.WARNING)
logging.getLogger("asyncio").setLevel(logging.WARNING)
# Initializes fundamental variables
# Initializes API key
if args.key is not None:
logging.info("API key set by command-line argument.")
key = args.key
else:
try:
key = os.environ["RIOT_API_KEY"]
logging.info("API key set by environment variable DEV_KEY.")
except KeyError:
logging.error(
"API key was not set. Set key with -k argument or set environment variable DEV_KEY.")
sys.exit(2)
parameters = {}
parameters ["regions"] = ["BR", "EUNE", "EUW", "JP", "KR",
"LAN", "LAS", "NA", "OCE", "TR", "RU"]
parameters["data_path"] = Path('.', 'data')
parameters["key"] = key
loop = asyncio.get_event_loop()
# Sets signal handler
loop.add_signal_handler(signal.SIGINT, shutdown)
loop.run_until_complete(run(parameters))
logging.info("Terminated")
async def run(parameters):
loop = asyncio.get_event_loop()
clients = {}
async with aiohttp.ClientSession(loop=loop) as session:
for region in parameters["regions"]:
clients[region] = APIClient(
region, session, parameters["key"], parameters["data_path"])
# Await until ctrl+c
while not lissandra.exiting:
await asyncio.sleep(1)
# Exiting, shutdown nicely
for client in clients.values():
client.exiting = True
for client in clients.values():
await client.shutdown.wait()
def shutdown():
logging.info("Received shutdown signal")
lissandra.exiting = True
if __name__ == "__main__":
main()
|
[
"logging.error",
"asyncio.get_event_loop",
"argparse.ArgumentParser",
"logging.basicConfig",
"asyncio.sleep",
"logging.getLogger",
"aiohttp.ClientSession",
"logging.info",
"pathlib.Path",
"client.APIClient",
"sys.exit"
] |
[((1971, 1988), 'pathlib.Path', 'Path', (['"""."""', '"""data"""'], {}), "('.', 'data')\n", (1975, 1988), False, 'from pathlib import Path\n'), ((2032, 2056), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (2054, 2056), False, 'import asyncio\n'), ((2193, 2219), 'logging.info', 'logging.info', (['"""Terminated"""'], {}), "('Terminated')\n", (2205, 2219), False, 'import logging\n'), ((2264, 2288), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (2286, 2288), False, 'import asyncio\n'), ((2859, 2899), 'logging.info', 'logging.info', (['"""Received shutdown signal"""'], {}), "('Received shutdown signal')\n", (2871, 2899), False, 'import logging\n'), ((278, 356), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Retrieves full match and summoner info."""'}), "(description='Retrieves full match and summoner info.')\n", (301, 356), False, 'import argparse\n'), ((862, 970), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s: %(message)s"""', 'level': 'logging.DEBUG', 'datefmt': '"""%d/%m/%Y %H:%M:%S"""'}), "(format='%(asctime)s: %(message)s', level=logging.DEBUG,\n datefmt='%d/%m/%Y %H:%M:%S')\n", (881, 970), False, 'import logging\n'), ((1014, 1121), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s: %(message)s"""', 'level': 'logging.INFO', 'datefmt': '"""%d/%m/%Y %H:%M:%S"""'}), "(format='%(asctime)s: %(message)s', level=logging.INFO,\n datefmt='%d/%m/%Y %H:%M:%S')\n", (1033, 1121), False, 'import logging\n'), ((1376, 1429), 'logging.info', 'logging.info', (['"""API key set by command-line argument."""'], {}), "('API key set by command-line argument.')\n", (1388, 1429), False, 'import logging\n'), ((2323, 2355), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {'loop': 'loop'}), '(loop=loop)\n', (2344, 2355), False, 'import aiohttp\n'), ((763, 774), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (771, 774), False, 'import sys\n'), ((1150, 1180), 'logging.getLogger', 'logging.getLogger', (['"""aiosqlite"""'], {}), "('aiosqlite')\n", (1167, 1180), False, 'import logging\n'), ((1212, 1240), 'logging.getLogger', 'logging.getLogger', (['"""asyncio"""'], {}), "('asyncio')\n", (1229, 1240), False, 'import logging\n'), ((1538, 1598), 'logging.info', 'logging.info', (['"""API key set by environment variable DEV_KEY."""'], {}), "('API key set by environment variable DEV_KEY.')\n", (1550, 1598), False, 'import logging\n'), ((2445, 2515), 'client.APIClient', 'APIClient', (['region', 'session', "parameters['key']", "parameters['data_path']"], {}), "(region, session, parameters['key'], parameters['data_path'])\n", (2454, 2515), False, 'from client import APIClient\n'), ((1638, 1747), 'logging.error', 'logging.error', (['"""API key was not set. Set key with -k argument or set environment variable DEV_KEY."""'], {}), "(\n 'API key was not set. Set key with -k argument or set environment variable DEV_KEY.'\n )\n", (1651, 1747), False, 'import logging\n'), ((1769, 1780), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (1777, 1780), False, 'import sys\n'), ((2621, 2637), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (2634, 2637), False, 'import asyncio\n')]
|
from utils import get_page
from lxml import etree
from db import RedisClient
import random
import time
class Crawl_ip(object):
def __init__(self):
self.db = RedisClient()
def ip_xici(self):
url = 'http://www.xicidaili.com/'
con = get_page(url)
html = etree.HTML(con)
ip_list = html.xpath('//tr/td[2]/text()')
ip_port = html.xpath('//tr/td[3]/text()')
for i in range(100):
ip = ip_list[i] + ':' + ip_port[i]
self.db.add(ip)
def ip_66(self):
preurl = 'http://www.66ip.cn/'
for i in range(100):
url = preurl+str(i)+'.html'
con = get_page(url)
if con:
html = etree.HTML(con)
ip_list = html.xpath('//tr')
for i in range(2,len(ip_list)):
ip = ip_list[i].xpath('td[1]/text()')[0]+":"+ip_list[i].xpath('td[2]/text()')[0]
self.db.add(ip,10)
intr = random.randint(5,15)
time.sleep(intr*0.1)
def run(self):
self.ip_66()
self.ip_xici()
if __name__ == '__main__':
crawl = Crawl_ip()
crawl.run()
|
[
"db.RedisClient",
"random.randint",
"utils.get_page",
"time.sleep",
"lxml.etree.HTML"
] |
[((176, 189), 'db.RedisClient', 'RedisClient', ([], {}), '()\n', (187, 189), False, 'from db import RedisClient\n'), ((272, 285), 'utils.get_page', 'get_page', (['url'], {}), '(url)\n', (280, 285), False, 'from utils import get_page\n'), ((302, 317), 'lxml.etree.HTML', 'etree.HTML', (['con'], {}), '(con)\n', (312, 317), False, 'from lxml import etree\n'), ((679, 692), 'utils.get_page', 'get_page', (['url'], {}), '(url)\n', (687, 692), False, 'from utils import get_page\n'), ((738, 753), 'lxml.etree.HTML', 'etree.HTML', (['con'], {}), '(con)\n', (748, 753), False, 'from lxml import etree\n'), ((1015, 1036), 'random.randint', 'random.randint', (['(5)', '(15)'], {}), '(5, 15)\n', (1029, 1036), False, 'import random\n'), ((1053, 1075), 'time.sleep', 'time.sleep', (['(intr * 0.1)'], {}), '(intr * 0.1)\n', (1063, 1075), False, 'import time\n')]
|
import json
import logging
import os
from urllib.parse import urlparse
import connexion
from connexion import FlaskApi, ProblemException, problem
from flask import g, request
from flask_cors import CORS
from swagger_ui_bundle import swagger_ui_path
from backend.corpora.common.utils.aws import AwsSecret
from backend.corpora.common.utils.json import CustomJSONEncoder
DEPLOYMENT_STAGE = os.environ["DEPLOYMENT_STAGE"]
APP_NAME = os.environ["APP_NAME"]
def create_flask_app():
connexion_app = connexion.FlaskApp(f"{APP_NAME}-{DEPLOYMENT_STAGE}", specification_dir="backend/config")
# From https://github.com/zalando/connexion/issues/346
connexion_app.app.url_map.strict_slashes = False
def add_api(base_path, spec_file):
api_base_paths.append(base_path)
connexion_app.add_api(
spec_file,
validate_responses=True,
base_path=f"/{base_path}",
resolver_error=501,
options={
"serve_spec": True,
"swagger_path": swagger_ui_path,
"swagger_ui": True,
"swagger_url": None,
"verbose": True,
},
)
add_api(base_path="/dp", spec_file="corpora-api.yml")
add_api(base_path="/curation", spec_file="curation-api.yml")
add_api(base_path="/wmg", spec_file="wmg-api.yml")
return connexion_app.app
def configure_flask_app(flask_app):
# configure logging
gunicorn_logger = logging.getLogger("gunicorn.error")
flask_app.logger.handlers = gunicorn_logger.handlers
flask_app.logger.setLevel(gunicorn_logger.level)
flask_app.debug = False if DEPLOYMENT_STAGE == "prod" else True
# set the flask secret key, needed for session cookies
flask_secret_key = "OpenSesame"
allowed_origins = []
deployment_stage = os.environ["DEPLOYMENT_STAGE"]
if deployment_stage not in ["prod"]:
allowed_origins.extend([r"http://.*\.corporanet\.local:\d+", r"^http://localhost:\d+"])
if os.getenv("FRONTEND_URL"):
allowed_origins.append(os.getenv("FRONTEND_URL"))
if deployment_stage != "test": # pragma: no cover
secret_name = f"corpora/backend/{deployment_stage}/auth0-secret"
auth_secret = json.loads(AwsSecret(secret_name).value)
if auth_secret:
flask_secret_key = auth_secret.get("flask_secret_key", flask_secret_key)
frontend = auth_secret.get("redirect_to_frontend", None)
if frontend:
if frontend.endswith("/"):
frontend = frontend[:-1]
frontend_parse = urlparse(frontend)
allowed_origins.append(f"{frontend_parse.scheme}://{frontend_parse.netloc}")
flask_app.logger.info(f"CORS allowed_origins: {allowed_origins}")
CORS(flask_app, max_age=600, supports_credentials=True, origins=allowed_origins, allow_headers=["Content-Type"])
# FIXME, enforce that the flask_secret_key is found once all secrets are setup for all environments
require_secure_cookies = not bool(os.getenv("DEV_MODE_COOKIES"))
flask_app.config.update(
SECRET_KEY=flask_secret_key,
SESSION_COOKIE_SECURE=require_secure_cookies,
SESSION_COOKIE_HTTPONLY=True,
SESSION_COOKIE_SAMESITE="Lax",
)
flask_app.json_encoder = CustomJSONEncoder
return flask_app
api_base_paths = []
app = configure_flask_app(create_flask_app())
@app.route("/")
def apis_landing_page() -> str:
"""
Render a page that displays links to all APIs
"""
# TODO: use jinja2 template to render this
links = [f'<a href="{base_path}/ui/">{base_path}</a></br>' for base_path in api_base_paths]
return f"""
<html>
<head><title>cellxgene Platform APIs</title></head>
<body><h1>cellxgene Platform APIs</h1>{"".join(links)}</body>
</html>
"""
@app.before_request
def pre_request_logging():
message = json.dumps(dict(url=request.path, method=request.method, schema=request.scheme))
app.logger.info(message)
@app.teardown_appcontext
def close_db(e=None):
g.pop("db_session", None)
@app.errorhandler(ProblemException)
def handle_corpora_error(exception):
return FlaskApi.get_response(
problem(
exception.status,
exception.title,
exception.detail,
exception.type,
exception.instance,
exception.headers,
exception.ext,
)
)
if __name__ == "__main__":
app.run(host="0.0.0.0", debug=True)
|
[
"backend.corpora.common.utils.aws.AwsSecret",
"flask_cors.CORS",
"flask.g.pop",
"connexion.FlaskApp",
"urllib.parse.urlparse",
"connexion.problem",
"os.getenv",
"logging.getLogger"
] |
[((501, 594), 'connexion.FlaskApp', 'connexion.FlaskApp', (['f"""{APP_NAME}-{DEPLOYMENT_STAGE}"""'], {'specification_dir': '"""backend/config"""'}), "(f'{APP_NAME}-{DEPLOYMENT_STAGE}', specification_dir=\n 'backend/config')\n", (519, 594), False, 'import connexion\n'), ((1477, 1512), 'logging.getLogger', 'logging.getLogger', (['"""gunicorn.error"""'], {}), "('gunicorn.error')\n", (1494, 1512), False, 'import logging\n'), ((2010, 2035), 'os.getenv', 'os.getenv', (['"""FRONTEND_URL"""'], {}), "('FRONTEND_URL')\n", (2019, 2035), False, 'import os\n'), ((2796, 2913), 'flask_cors.CORS', 'CORS', (['flask_app'], {'max_age': '(600)', 'supports_credentials': '(True)', 'origins': 'allowed_origins', 'allow_headers': "['Content-Type']"}), "(flask_app, max_age=600, supports_credentials=True, origins=\n allowed_origins, allow_headers=['Content-Type'])\n", (2800, 2913), False, 'from flask_cors import CORS\n'), ((4081, 4106), 'flask.g.pop', 'g.pop', (['"""db_session"""', 'None'], {}), "('db_session', None)\n", (4086, 4106), False, 'from flask import g, request\n'), ((4224, 4358), 'connexion.problem', 'problem', (['exception.status', 'exception.title', 'exception.detail', 'exception.type', 'exception.instance', 'exception.headers', 'exception.ext'], {}), '(exception.status, exception.title, exception.detail, exception.type,\n exception.instance, exception.headers, exception.ext)\n', (4231, 4358), False, 'from connexion import FlaskApi, ProblemException, problem\n'), ((2068, 2093), 'os.getenv', 'os.getenv', (['"""FRONTEND_URL"""'], {}), "('FRONTEND_URL')\n", (2077, 2093), False, 'import os\n'), ((3052, 3081), 'os.getenv', 'os.getenv', (['"""DEV_MODE_COOKIES"""'], {}), "('DEV_MODE_COOKIES')\n", (3061, 3081), False, 'import os\n'), ((2256, 2278), 'backend.corpora.common.utils.aws.AwsSecret', 'AwsSecret', (['secret_name'], {}), '(secret_name)\n', (2265, 2278), False, 'from backend.corpora.common.utils.aws import AwsSecret\n'), ((2610, 2628), 'urllib.parse.urlparse', 'urlparse', (['frontend'], {}), '(frontend)\n', (2618, 2628), False, 'from urllib.parse import urlparse\n')]
|
####
# test_basic.py:
#
# Basic CMake tests.
#
####
import os
import platform
import cmake
# Test a normal build, with the ref executable and static libraries
BUILD_DIR = os.path.join("<FPRIME>")
OPTIONS = {"CMAKE_BUILD_TYPE": "TESTING"}
TARGETS = ["all", "check"]
EXPECTED = [
os.path.join("bin", platform.system(), "CFDP_Checksum_ut_exe"),
os.path.join("bin", platform.system(), "Fw_FilePacket_ut_exe"),
os.path.join("bin", platform.system(), "Fw_Log_ut_exe"),
os.path.join("bin", platform.system(), "Fw_SerializableFile_ut_exe"),
os.path.join("bin", platform.system(), "Fw_Time_ut_exe"),
os.path.join("bin", platform.system(), "Fw_Tlm_ut_exe"),
os.path.join("bin", platform.system(), "Fw_Types_ut_exe"),
os.path.join("bin", platform.system(), "Svc_ActiveLogger_ut_exe"),
os.path.join("bin", platform.system(), "Svc_ActiveRateGroup_ut_exe"),
os.path.join("bin", platform.system(), "Svc_BufferManager_ut_exe"),
os.path.join("bin", platform.system(), "Svc_CmdDispatcher_ut_exe"),
os.path.join("bin", platform.system(), "Svc_CmdSequencer_ut_exe"),
os.path.join("bin", platform.system(), "Svc_ComLogger_ut_exe"),
os.path.join("bin", platform.system(), "Svc_FileDownlink_ut_exe"),
os.path.join("bin", platform.system(), "Svc_FileManager_ut_exe"),
os.path.join("bin", platform.system(), "Svc_FileUplink_ut_exe"),
os.path.join("bin", platform.system(), "Svc_LinuxTime_ut_exe"),
os.path.join("bin", platform.system(), "Svc_PolyDb_ut_exe"),
os.path.join("bin", platform.system(), "Svc_PrmDb_ut_exe"),
os.path.join("bin", platform.system(), "Svc_RateGroupDriver_ut_exe"),
os.path.join("bin", platform.system(), "Svc_TlmChan_ut_exe"),
]
cmake.register_test(__name__, "fp-uts")
|
[
"platform.system",
"cmake.register_test",
"os.path.join"
] |
[((172, 196), 'os.path.join', 'os.path.join', (['"""<FPRIME>"""'], {}), "('<FPRIME>')\n", (184, 196), False, 'import os\n'), ((1713, 1752), 'cmake.register_test', 'cmake.register_test', (['__name__', '"""fp-uts"""'], {}), "(__name__, 'fp-uts')\n", (1732, 1752), False, 'import cmake\n'), ((303, 320), 'platform.system', 'platform.system', ([], {}), '()\n', (318, 320), False, 'import platform\n'), ((371, 388), 'platform.system', 'platform.system', ([], {}), '()\n', (386, 388), False, 'import platform\n'), ((439, 456), 'platform.system', 'platform.system', ([], {}), '()\n', (454, 456), False, 'import platform\n'), ((500, 517), 'platform.system', 'platform.system', ([], {}), '()\n', (515, 517), False, 'import platform\n'), ((574, 591), 'platform.system', 'platform.system', ([], {}), '()\n', (589, 591), False, 'import platform\n'), ((636, 653), 'platform.system', 'platform.system', ([], {}), '()\n', (651, 653), False, 'import platform\n'), ((697, 714), 'platform.system', 'platform.system', ([], {}), '()\n', (712, 714), False, 'import platform\n'), ((760, 777), 'platform.system', 'platform.system', ([], {}), '()\n', (775, 777), False, 'import platform\n'), ((831, 848), 'platform.system', 'platform.system', ([], {}), '()\n', (846, 848), False, 'import platform\n'), ((905, 922), 'platform.system', 'platform.system', ([], {}), '()\n', (920, 922), False, 'import platform\n'), ((977, 994), 'platform.system', 'platform.system', ([], {}), '()\n', (992, 994), False, 'import platform\n'), ((1049, 1066), 'platform.system', 'platform.system', ([], {}), '()\n', (1064, 1066), False, 'import platform\n'), ((1120, 1137), 'platform.system', 'platform.system', ([], {}), '()\n', (1135, 1137), False, 'import platform\n'), ((1188, 1205), 'platform.system', 'platform.system', ([], {}), '()\n', (1203, 1205), False, 'import platform\n'), ((1259, 1276), 'platform.system', 'platform.system', ([], {}), '()\n', (1274, 1276), False, 'import platform\n'), ((1329, 1346), 'platform.system', 'platform.system', ([], {}), '()\n', (1344, 1346), False, 'import platform\n'), ((1398, 1415), 'platform.system', 'platform.system', ([], {}), '()\n', (1413, 1415), False, 'import platform\n'), ((1466, 1483), 'platform.system', 'platform.system', ([], {}), '()\n', (1481, 1483), False, 'import platform\n'), ((1531, 1548), 'platform.system', 'platform.system', ([], {}), '()\n', (1546, 1548), False, 'import platform\n'), ((1595, 1612), 'platform.system', 'platform.system', ([], {}), '()\n', (1610, 1612), False, 'import platform\n'), ((1669, 1686), 'platform.system', 'platform.system', ([], {}), '()\n', (1684, 1686), False, 'import platform\n')]
|
import sys
import logging
import pymysql
import json
import os
#rds settings
rds_endpoint = os.environ['rds_endpoint']
username=os.environ['username']
password=os.environ['password']
db_name=os.environ['db_name']
logger = logging.getLogger()
logger.setLevel(logging.INFO)
#Connection
try:
connection = pymysql.connect(host=rds_endpoint, user=username,
passwd=password, db=db_name)
except pymysql.MySQLError as e:
logger.error("ERROR: Unexpected error: Could not connect to MySQL instance.")
logger.error(e)
sys.exit()
logger.info("SUCCESS: Connection to RDS MySQL instance succeeded")
def handler(event, context):
cur = connection.cursor()
## Retrieve Data
query = "DELETE FROM OpeningHours where branchId = '{}' AND dayOfWeek= '{}'".format(event['branchId'],event['dayOfWeek'])
cur.execute(query)
connection.commit()
print(cur.rowcount, "record(s) affected")
## Construct body of the response object
transactionResponse = {}
# Construct http response object
responseObject = {}
# responseObject['statusCode'] = 200
# responseObject['headers'] = {}
# responseObject['headers']['Content-Type']='application/json'
# responseObject['headers']['Access-Control-Allow-Origin']='*'
responseObject['data'] = json.dumps(transactionResponse, sort_keys=True,default=str)
#k = json.loads(responseObject['body'])
#print(k['uin'])
return responseObject
|
[
"sys.exit",
"pymysql.connect",
"logging.getLogger",
"json.dumps"
] |
[((224, 243), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (241, 243), False, 'import logging\n'), ((309, 387), 'pymysql.connect', 'pymysql.connect', ([], {'host': 'rds_endpoint', 'user': 'username', 'passwd': 'password', 'db': 'db_name'}), '(host=rds_endpoint, user=username, passwd=password, db=db_name)\n', (324, 387), False, 'import pymysql\n'), ((1282, 1342), 'json.dumps', 'json.dumps', (['transactionResponse'], {'sort_keys': '(True)', 'default': 'str'}), '(transactionResponse, sort_keys=True, default=str)\n', (1292, 1342), False, 'import json\n'), ((534, 544), 'sys.exit', 'sys.exit', ([], {}), '()\n', (542, 544), False, 'import sys\n')]
|
import Globals
import tkinter as tk
from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL,simpledialog,\
PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, \
FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk
import os
from os.path import normpath, basename
from PIL import Image, ImageTk
import cv2
from cv2 import imread, IMREAD_ANYCOLOR, IMREAD_ANYDEPTH, imwrite
import pydicom
from matplotlib.figure import Figure
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
import matplotlib as mpl
from matplotlib import cm
import matplotlib.pyplot as plt
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2Tk
import numpy as np
def drawProfiles(even):
#LAG DVH PLOT
return
def processDoseplan_usingReferencePoint(only_one):
################ RT Plan ######################
#Find each coordinate in mm to isocenter relative to first element in doseplan
iso_1 = abs(Globals.DVH_dataset_doseplan.ImagePositionPatient[0] - Globals.DVH_isocenter_mm[0])
iso_2 = abs(Globals.DVH_dataset_doseplan.ImagePositionPatient[1] - Globals.DVH_isocenter_mm[1])
iso_3 = abs(Globals.DVH_dataset_doseplan.ImagePositionPatient[2] - Globals.DVH_isocenter_mm[2])
#Given as [x,y,z] in patient coordinates
Globals.DVH_isocenter_mm = [iso_1, iso_2, iso_3]
try:
Globals.DVH_vertical = int(Globals.DVH_vertical)
except:
messagebox.showerror("Error", "Could not read the vertical displacements\n (Code: displacements to integer)")
return
try:
Globals.DVH_lateral = int(Globals.DVH_lateral)
except:
messagebox.showerror("Error", "Could not read the lateral displacements\n (Code: displacements to integer)")
return
try:
Globals.DVH_longitudinal = int(Globals.DVH_longitudinal)
except:
messagebox.showerror("Error", "Could not read the longitudinal displacements\n (Code: displacements to integer)")
return
lateral = Globals.DVH_lateral
longit = Globals.DVHlongitudinal
vertical = Globals.DVH_vertical
isocenter_px = np.zeros(3)
distance_in_doseplan_ROI_reference_point_px = []
if(Globals.DVH_dataset_doseplan.PixelSpacing==[1, 1]):
#make isocenter coordinates into pixel values
isocenter_px[0] = np.round(iso_1)
isocenter_px[1] = np.round(iso_2)
isocenter_px[2] = np.round(iso_3)
#find the pixel distance from reference point to ROI corners
distance_in_doseplan_ROI_reference_point_px.append([np.round(Globals.DVH_distance_reference_point_ROI[0][0]),\
np.round(Globals.DVH_distance_reference_point_ROI[0][1])])
distance_in_doseplan_ROI_reference_point_px.append([np.round(Globals.DVH_distance_reference_point_ROI[1][0]),\
np.round(Globals.DVH_distance_reference_point_ROI[1][1])])
distance_in_doseplan_ROI_reference_point_px.append([np.round(Globals.DVH_distance_reference_point_ROI[2][0]),\
np.round(Globals.DVH_distance_reference_point_ROI[2][1])])
distance_in_doseplan_ROI_reference_point_px.append([np.round(Globals.DVH_distance_reference_point_ROI[3][0]),\
np.round(Globals.DVH_distance_reference_point_ROI[3][1])])
#Input to px
lateral_px = np.round(lateral)
vertical_px = np.round(vertical)
longit_px = np.round(longit)
#displacment to px
doseplan_lateral_displacement_px = np.round(Globals.DVH_doseplan_lateral_displacement)
doseplan_vertical_displacement_px = np.round(Globals.DVH_doseplan_vertical_displacement)
doseplan_longitudinal_displacement_px = np.round(Globals.DVH_doseplan_longitudianl_displacement)
elif(Globals.DVH_dataset_doseplan.PixelSpacing==[2, 2]):
#make isocenter coordinates into pixel values
isocenter_px[0] = np.round(iso_1/2)
isocenter_px[1] = np.round(iso_2/2)
isocenter_px[2] = np.round(iso_3/2)
#find the pixel distance from reference point to ROI corners
distance_in_doseplan_ROI_reference_point_px.append([np.round((Globals.DVH_distance_reference_point_ROI[0][0])/2),\
np.round((Globals.DVH_distance_reference_point_ROI[0][1])/2)])
distance_in_doseplan_ROI_reference_point_px.append([np.round((Globals.DVH_distance_reference_point_ROI[1][0])/2),\
np.round((Globals.DVH_distance_reference_point_ROI[1][1])/2)])
distance_in_doseplan_ROI_reference_point_px.append([np.round((Globals.DVH_distance_reference_point_ROI[2][0])/2),\
np.round((Globals.DVH_distance_reference_point_ROI[2][1])/2)])
distance_in_doseplan_ROI_reference_point_px.append([np.round((Globals.DVH_distance_reference_point_ROI[3][0])/2),\
np.round((Globals.DVH_distance_reference_point_ROI[3][1])/2)])
#Input to px
lateral_px = np.round(lateral/2)
vertical_px = np.round(vertical/2)
longit_px = np.round(longit/2)
#displacment to pc
doseplan_lateral_displacement_px = np.round((Globals.DVH_doseplan_lateral_displacement)/2)
doseplan_vertical_displacement_px = np.round((Globals.DVH_doseplan_vertical_displacement)/2)
doseplan_longitudinal_displacement_px = np.round((Globals.DVH_doseplan_longitudianl_displacement)/2)
else:
#make isocenter coordinates into pixel values
isocenter_px[0] = np.round(iso_1/3)
isocenter_px[1] = np.round(iso_2/3)
isocenter_px[2] = np.round(iso_3/3)
#find the pixel distance from reference point to ROI corners
distance_in_doseplan_ROI_reference_point_px.append([np.round((Globals.DVH_distance_reference_point_ROI[0][0])/3),\
np.round((Globals.DVH_distance_reference_point_ROI[0][1])/3)])
distance_in_doseplan_ROI_reference_point_px.append([np.round((Globals.DVH_distance_reference_point_ROI[1][0])/3),\
np.round((Globals.DVH_distance_reference_point_ROI[1][1])/3)])
distance_in_doseplan_ROI_reference_point_px.append([np.round((Globals.DVH_distance_reference_point_ROI[2][0])/3),\
np.round((Globals.DVH_distance_reference_point_ROI[2][1])/3)])
distance_in_doseplan_ROI_reference_point_px.append([np.round((Globals.DVH_distance_reference_point_ROI[3][0])/3),\
np.round((Globals.DVH_distance_reference_point_ROI[3][1])/3)])
#Input to px
lateral_px = np.round(lateral/3)
vertical_px = np.round(vertical/3)
longit_px = np.round(longit/3)
#displacment to pc
doseplan_lateral_displacement_px = np.round((Globals.DVH_doseplan_lateral_displacement)/3)
doseplan_vertical_displacement_px = np.round((Globals.DVH_doseplan_vertical_displacement)/3)
doseplan_longitudinal_displacement_px = np.round((Globals.DVH_doseplan_longitudianl_displacement)/3)
temp_ref_point_doseplan = np.zeros(3)
#Finding reference point in doseplan
if(Globals.DVH_doseplan_patient_position=='HFS'):
temp_ref_point_doseplan[0] = int(isocenter_px[0]+ doseplan_lateral_displacement_px - lateral_px)
temp_ref_point_doseplan[1] = int(isocenter_px[1]- doseplan_vertical_displacement_px + vertical_px)
temp_ref_point_doseplan[2] = int(isocenter_px[2]+ doseplan_longitudinal_displacement_px - longit_px)
elif(Globals.DVH_doseplan_patient_position=='HFP'):
temp_ref_point_doseplan[0] = isocenter_px[0]- doseplan_lateral_displacement_px+ lateral_px
temp_ref_point_doseplan[1] = isocenter_px[1]+ doseplan_vertical_displacement_px - vertical_px
temp_ref_point_doseplan[2] = isocenter_px[2]+ doseplan_longitudinal_displacement_px - longit_px
elif(Globals.DVH_doseplan_patient_position=='HFDR'):
temp_ref_point_doseplan[0] = isocenter_px[0]- doseplan_vertical_displacement_px + vertical_px
temp_ref_point_doseplan[1] = isocenter_px[1]+ doseplan_lateral_displacement_px - lateral_px
temp_ref_point_doseplan[2] = isocenter_px[2]+ doseplan_longitudinal_displacement_px - longit_px
elif(Globals.DVH_doseplan_patient_position=='HFDL'):
temp_ref_point_doseplan[0] = isocenter_px[0]+ doseplan_vertical_displacement_px - vertical_px
temp_ref_point_doseplan[1] = isocenter_px[1]- doseplan_lateral_displacement_px + lateral_px
temp_ref_point_doseplan[2] = isocenter_px[2]+ doseplan_longitudinal_displacement_px - longit_px
elif(Globals.DVH_doseplan_patient_position=='FFS'):
temp_ref_point_doseplan[0] = isocenter_px[0]- doseplan_lateral_displacement_px + lateral_px
temp_ref_point_doseplan[1] = isocenter_px[1]+ doseplan_vertical_displacement_px - vertical_px
temp_ref_point_doseplan[2] = isocenter_px[2]- doseplan_longitudinal_displacement_px + longit_px
elif(Globals.DVH_doseplan_patient_position=='FFP'):
temp_ref_point_doseplan[0] = isocenter_px[0]+ doseplan_lateral_displacement_px- lateral_px
temp_ref_point_doseplan[1] = isocenter_px[1]- doseplan_vertical_displacement_px + vertical_px
temp_ref_point_doseplan[2] = isocenter_px[2]- doseplan_longitudinal_displacement_px + longit_px
elif(Globals.DVH_doseplan_patient_position=='FFDR'):
temp_ref_point_doseplan[0] = isocenter_px[0]- doseplan_vertical_displacement_px + vertical_px
temp_ref_point_doseplan[1] = isocenter_px[1]- doseplan_lateral_displacement_px + lateral_px
temp_ref_point_doseplan[2] = isocenter_px[2]- doseplan_longitudinal_displacement_px + longit_px
else:
temp_ref_point_doseplan[0] = isocenter_px[0] + doseplan_vertical_displacement_px - vertical_px
temp_ref_point_doseplan[1] = isocenter_px[1] + doseplan_lateral_displacement_px - lateral_px
temp_ref_point_doseplan[2] = isocenter_px[2]- doseplan_longitudinal_displacement_px + longit_px
Globals.DVH_reference_point_in_doseplan = temp_ref_point_doseplan
reference_point = np.zeros(3)
######################## Doseplan ##################################
#dataset_swapped is now the dataset entered the same way as expected with film (slice, rows, columns)
#isocenter_px and reference_point is not turned according to the doseplan and film orientation.
if(Globals.DVH_dataset_doseplan.ImageOrientationPatient==[1, 0, 0, 0, 1, 0]):
reference_point[0] = temp_ref_point_doseplan[2]
reference_point[1] = temp_ref_point_doseplan[1]
reference_point[2] = temp_ref_point_doseplan[0]
if(Globals.DVH_film_orientation.get()=='Coronal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,1)
temp_ref = reference_point[0]
reference_point[0] = reference_point[1]
reference_point[1] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Sagittal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,2)
temp_ref = reference_point[0]
reference_point[0] = reference_point[2]
reference_point[2] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Axial'):
dataset_swapped = Globals.DVH_dataset_doseplan.pixel_array
else:
messagebox.showerror("Error", "Something has gone wrong here.")
clearAll()
return
elif(Globals.DVH_dataset_doseplan.ImageOrientationPatient==[1, 0, 0, 0, 0, 1]):
reference_point[0] = temp_ref_point_doseplan[1]
reference_point[1] = temp_ref_point_doseplan[2]
reference_point[2] = temp_ref_point_doseplan[0]
if(Globals.DVH_film_orientation.get()=='Coronal'):
dataset_swapped = Globals.DVH_dataset_doseplan.pixel_array
elif(Globals.DCH_film_orientation.get()=='Sagittal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,2)
temp_ref = reference_point[0]
reference_point[0] = reference_point[2]
reference_point[2] = temp_ref
dataset_swapped = np.swapaxes(dataset_swapped, 1,2)
temp_ref = reference_point[1]
reference_point[1] = reference_point[2]
reference_point[2] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Axial'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,1)
temp_ref = reference_point[0]
reference_point[0] = reference_point[1]
reference_point[1] = temp_ref
else:
messagebox.showerror("Error", "Something has gone wrong.")
clearAll()
return
elif(Globals.DVH_dataset_doseplan.ImageOrientationPatient==[0, 1, 0, 1, 0, 0]):
reference_point[0] = temp_ref_point_doseplan[2]
reference_point[1] = temp_ref_point_doseplan[0]
reference_point[2] = temp_ref_point_doseplan[1]
if(Globals.DVH_film_orientation.get()=='Coronal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,2)
temp_ref = reference_point[0]
reference_point[0] = reference_point[2]
reference_point[2] = temp_ref
dataset_swapped = np.swapaxes(dataset_swapped, 1,2)
temp_ref = reference_point[1]
reference_point[1] = reference_point[2]
reference_point[2] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Sagittal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,1)
temp_ref = reference_point[0]
reference_point[0] = reference_point[1]
reference_point[1] = temp_ref
dataset_swapped = np.swapaxes(dataset_swapped, 1,2)
temp_ref = reference_point[1]
reference_point[1] = reference_point[2]
reference_point[2] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Axial'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 1,2)
temp_ref = reference_point[1]
reference_point[1] = reference_point[2]
reference_point[2] = temp_ref
else:
messagebox.showerror("Error", "Something has gone wrong.")
clearAll()
return
elif(Globals.DVH_dataset_doseplan.ImageOrientationPatient==[0, 1, 0, 0, 0, 1]):
reference_point[0] = temp_ref_point_doseplan[0]
reference_point[1] = temp_ref_point_doseplan[2]
reference_point[2] = temp_ref_point_doseplan[1]
if(Globals.DVH_film_orientation.get()=='Coronal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,2)
temp_ref = reference_point[0]
reference_point[0] = reference_point[2]
reference_point[2] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Sagittal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 1,2)
temp_ref = reference_point[1]
reference_point[1] = reference_point[2]
reference_point[2] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Axial'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,1)
temp_ref = reference_point[0]
reference_point[0] = reference_point[1]
reference_point[1] = temp_ref
dataset_swapped = np.swapaxes(dataset_swapped, 1,2)
temp_ref = reference_point[1]
reference_point[1] = reference_point[2]
reference_point[2] = temp_ref
else:
messagebox.showerror("Error", "Something has gone wrong.")
clearAll()
return
elif(Globals.DVH_dataset_doseplan.ImageOrientationPatient==[0, 0, 1, 1, 0, 0]):
reference_point[0] = temp_ref_point_doseplan[1]
reference_point[1] = temp_ref_point_doseplan[0]
reference_point[2] = temp_ref_point_doseplan[2]
if(Globals.DVH_film_orientation.get()=='Coronal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 1,2)
temp_ref = reference_point[1]
reference_point[1] = reference_point[2]
reference_point[2] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Sagittal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,1)
temp_ref = reference_point[0]
reference_point[0] = reference_point[1]
reference_point[1] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Axial'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,1)
temp_ref = reference_point[0]
reference_point[0] = reference_point[1]
reference_point[1] = temp_ref
dataset_swapped = np.swapaxes(dataset_swapped, 0,2)
temp_ref = reference_point[0]
reference_point[0] = reference_point[2]
reference_point[2] = temp_ref
else:
messagebox.showerror("Error", "Something has gone wrong.")
clearAll()
return
elif(Globals.DVH_dataset_doseplan.ImageOrientationPatient==[0, 0, 1, 0, 1, 0]):
reference_point[0] = temp_ref_point_doseplan[0]
reference_point[1] = temp_ref_point_doseplan[1]
reference_point[2] = temp_ref_point_doseplan[2]
if(Globals.DVH_film_orientation.get()=='Coronal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,2)
temp_ref = reference_point[0]
reference_point[0] = reference_point[2]
reference_point[2] = temp_ref
dataset_swapped = np.swapaxes(dataset_swapped, 0,1)
temp_ref = reference_point[0]
reference_point[0] = reference_point[1]
reference_point[1] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Sagittal'):
dataset_swapped = Globals.DVH_dataset_doseplan.pixel_array
elif(Globals.DCH_film_orientation.get()=='Axial'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,2)
temp_ref = reference_point[0]
reference_point[0] = reference_point[2]
reference_point[2] = temp_ref
else:
messagebox.showerror("Error", "Something has gone wrong.")
clearAll()
return
else:
messagebox.showerror("Error", "Something has gone wrong.")
clearAll()
return
if(reference_point[0]<0 or reference_point[0]>dataset_swapped.shape[0]):
messagebox.showerror("Error", "Reference point is outside of dosematrix\n\
(Code: first dimension, number of frames in dosematrix)")
return
if(reference_point[1]<0 or reference_point[1]>dataset_swapped.shape[1]):
messagebox.showerror("Error", "Reference point is outside of dosematrix\n\
(Code: second dimension, rows in dosematrix)")
return
if(reference_point[2]<0 or reference_point[2]>dataset_swapped.shape[2]):
messagebox.showerror("Error", "Reference point is outside of dosematrix\n\
(Code: third dimension, columns in dosematrix)")
return
dose_slice = dataset_swapped[int(reference_point[0]),:,:]
#calculate the coordinates of the Region of Interest in doseplan (marked on the film)
#and checks if it actualy exists in dosematrix
doseplan_ROI_coords = []
top_left_test_side = False; top_left_test_down = False
top_right_test_side = False; top_right_test_down = False
bottom_left_test_side = False; bottom_left_test_down = False
bottom_right_test_side = False; bottom_right_test_down = False
top_left_side_corr = 0; top_left_down_corr = 0
top_right_side_corr = 0; top_right_down_corr = 0
bottom_left_side_corr = 0; bottom_left_down_corr = 0
bottom_right_side_corr = 0; bottom_right_down_corr = 0
top_left_to_side = reference_point[2] - distance_in_doseplan_ROI_reference_point_px[0][0]
top_left_down = reference_point[1] - distance_in_doseplan_ROI_reference_point_px[0][1]
if(top_left_to_side < 0):
top_left_test_side = True
top_left_side_corr = abs(top_left_to_side)
top_left_to_side = 0
if(top_left_to_side > dose_slice.shape[1]):
messagebox.showerror("Fatal Error", "Fatal error: marked ROI is out of range in doseplan. Try again")
clearAll()
return
if(top_left_down < 0):
top_left_test_down = True
top_left_down_corr = abs(top_left_down)
top_left_down = 0
if(top_left_down > dose_slice.shape[0]):
messagebox.showerror("Fatal Error", "Fatal error: marked ROI is out of range in doseplan. Try again")
clearAll()
return
top_right_to_side = reference_point[2] - distance_in_doseplan_ROI_reference_point_px[1][0]
top_right_down = reference_point[1] - distance_in_doseplan_ROI_reference_point_px[1][1]
if(top_right_to_side < 0):
messagebox.showerror("Fatal Error", "Fatal error: marked ROI is out of range in doseplan. Try again")
clearAll()
return
if(top_right_to_side > dose_slice.shape[1]):
top_right_test_side = True
top_right_side_corr = top_right_to_side - dose_slice.shape[1]
top_right_to_side = dose_slice.shape[1]
if(top_right_down < 0):
top_right_test_down = True
top_right_down_corr = abs(top_right_down)
top_right_down = 0
if(top_right_down > dose_slice.shape[0]):
messagebox.showerror("Fatal Error", "Fatal error: marked ROI is out of range in doseplan. Try again")
clearAll()
return
bottom_left_to_side = reference_point[2] - distance_in_doseplan_ROI_reference_point_px[2][0]
bottom_left_down = reference_point[1] - distance_in_doseplan_ROI_reference_point_px[2][1]
if(bottom_left_to_side < 0):
bottom_left_test_side = True
bottom_left_side_corr = abs(bottom_left_to_side)
bottom_left_to_side = 0
if(bottom_left_to_side > dose_slice.shape[1]):
messagebox.showerror("Fatal Error", "Fatal error: marked ROI is out of range in doseplan. Try again")
clearAll()
return
if(bottom_left_down < 0):
messagebox.showerror("Fatal Error", "Fatal error: marked ROI is out of range in doseplan. Try again")
clearAll()
return
if(bottom_left_down > dose_slice.shape[0]):
bottom_left_down_corr = bottom_left_down - dose_slice.shape[0]
bottom_left_down = dose_slice.shape[0]
bottom_left_test_down = True
bottom_right_to_side = reference_point[2] - distance_in_doseplan_ROI_reference_point_px[3][0]
bottom_right_down = reference_point[1] - distance_in_doseplan_ROI_reference_point_px[3][1]
if(bottom_right_to_side < 0):
messagebox.showerror("Fatal Error", "Fatal error: marked ROI is out of range in doseplan. Try again")
clearAll()
return
if(bottom_right_to_side > dose_slice.shape[1]):
bottom_right_side_corr = bottom_right_to_side - dose_slice.shape[1]
bottom_right_to_side = dose_slice.shape[1]
bottom_right_test_side = True
if(bottom_right_down < 0):
messagebox.showerror("Fatal Error", "Fatal error: marked ROI is out of range in doseplan. Try again")
clearAll()
return
if(bottom_right_down > dose_slice.shape[0]):
bottom_right_down_corr = bottom_right_down - dose_slice.shape[0]
bottom_right_down = dose_slice.shape[0]
bottom_right_test_down = True
if(top_right_test_side or top_right_test_down or top_left_test_side or top_left_test_down \
or bottom_right_test_side or bottom_right_test_down or bottom_left_test_side or bottom_left_test_down):
ROI_info = "Left side: " + str(max(top_left_side_corr, bottom_left_side_corr)) + " pixels.\n"\
+ "Right side: " + str(max(top_right_side_corr, bottom_right_side_corr)) + " pixels.\n "\
+ "Top side: " + str(max(top_left_down_corr, top_right_down_corr)) + " pixels.\n"\
+ "Bottom side: " + str(max(bottom_left_down_corr, bottom_right_down_corr)) + " pixels."
messagebox.showinfo("ROI info", "The ROI marked on the film did not fit with the size of the doseplan and had to \
be cut.\n" + ROI_info )
doseplan_ROI_coords.append([top_left_to_side, top_left_down])
doseplan_ROI_coords.append([top_right_to_side, top_right_down])
doseplan_ROI_coords.append([bottom_left_to_side, bottom_left_down])
doseplan_ROI_coords.append([bottom_right_to_side, bottom_right_down])
if only_one:
Globals.DVH_doseplan_dataset_ROI = \
dose_slice[int(top_left_down):int(bottom_left_down), int(top_left_to_side):int(top_right_to_side)]
img=Globals.DVH_doseplan_dataset_ROI
if(Globals.DVH_dataset_doseplan.PixelSpacing==[1, 1]):
img = cv2.resize(img, dsize=(img.shape[1]*5,img.shape[0]*5))
elif(Globals.DVH_dataset_doseplan.PixelSpacing==[2, 2]):
img = cv2.resize(img, dsize=(img.shape[1]*10,img.shape[0]*10))
else:
img = cv2.resize(img, dsize=(img.shape[1]*15,img.shape[0]*15))
mx=np.max(img)
Globals.DVH_max_dose_doseplan = mx*Globals.DVH_dose_scaling_doseplan
img = img/mx
PIL_img_doseplan_ROI = Image.fromarray(np.uint8(cm.viridis(img)*255))
wid = PIL_img_doseplan_ROI.width;heig = PIL_img_doseplan_ROI.height
doseplan_canvas = tk.Canvas(Globals.DVH_film_panedwindow)
doseplan_canvas.grid(row=2, column=0, sticky=N+S+W+E)
Globals.DVH_film_panedwindow.add(doseplan_canvas, \
height=max(heig, Globals.profiles_doseplan_text_image.height()), \
width=wid + Globals.profiles_doseplan_text_image.width())
doseplan_canvas.config(bg='#ffffff', relief=FLAT, highlightthickness=0, \
height=max(heig, Globals.profiles_doseplan_text_image.height()), \
width=wid + Globals.profiles_doseplan_text_image.width())
Globals.DVH_doseplan_write_image = tk.Canvas(doseplan_canvas)
Globals.DVH_doseplan_write_image.grid(row=0,column=1,sticky=N+S+W+E)
Globals.DVH_doseplan_write_image.config(bg='#ffffff', relief=FLAT, highlightthickness=0, width=wid, height=heig)
doseplan_text_image_canvas = tk.Canvas(doseplan_canvas)
doseplan_text_image_canvas.grid(row=0,column=0,sticky=N+S+W+E)
doseplan_text_image_canvas.config(bg='#ffffff', relief=FLAT, highlightthickness=0, \
width=Globals.profiles_doseplan_text_image.width(), height=Globals.profiles_doseplan_text_image.height())
scaled_image_visual = PIL_img_doseplan_ROI
scaled_image_visual = ImageTk.PhotoImage(image=scaled_image_visual)
Globals.DVH_doseplan_write_image_width = scaled_image_visual.width()
Globals.DVH_doseplan_write_image_height = scaled_image_visual.height()
Globals.DVH_doseplan_write_image.create_image(0,0,image=scaled_image_visual, anchor="nw")
Globals.DVH_doseplan_write_image.image = scaled_image_visual
doseplan_text_image_canvas.create_image(0,0,image=Globals.profiles_doseplan_text_image, anchor="nw")
doseplan_text_image_canvas.image=Globals.profiles_doseplan_text_image
drawProfiles(False)
else:
img=dose_slice[int(top_left_down):int(bottom_left_down), int(top_left_to_side):int(top_right_to_side)]
Globals.DVH_doseplan_dataset_ROI_several.append(img)
Globals.DVH_number_of_doseplans+=1
if(Globals.DVH_dataset_doseplan.PixelSpacing==[1, 1]):
Globals.DVH_several_img.append(cv2.resize(img, dsize=(img.shape[1]*5,img.shape[0]*5)))
elif(Globals.DVH_dataset_doseplan.PixelSpacing==[2, 2]):
Globals.DVH_several_img.append(cv2.resize(img, dsize=(img.shape[1]*10,img.shape[0]*10)))
else:
Globals.DVH_several_img.append(cv2.resize(img, dsize=(img.shape[1]*15,img.shape[0]*15)))
def processDoseplan_usingIsocenter(only_one):
################ RT Plan ######################
#Find each coordinate in mm to isocenter relative to first element in doseplan
iso_1 = abs(Globals.DVH_dataset_doseplan.ImagePositionPatient[0] - Globals.DVH_isocenter_mm[0])
iso_2 = abs(Globals.DVH_dataset_doseplan.ImagePositionPatient[1] - Globals.DVH_isocenter_mm[1])
iso_3 = abs(Globals.DVH_dataset_doseplan.ImagePositionPatient[2] - Globals.DVH_isocenter_mm[2])
#Given as [x,y,z] in patient coordinates
Globals.DVH_isocenter_mm = [iso_1, iso_2, iso_3]
#Isocenter in pixel relative to the first element in the doseplan
isocenter_px = np.zeros(3)
distance_in_doseplan_ROI_reference_point_px = []
if(Globals.DVH_dataset_doseplan.PixelSpacing==[1, 1]):
isocenter_px[0] = np.round(iso_1)#np.round(Globals.profiles_isocenter_mm[0])
isocenter_px[1] = np.round(iso_2)#np.round(Globals.profiles_isocenter_mm[1])
isocenter_px[2] = np.round(iso_3)#np.round(Globals.profiles_isocenter_mm[2])
#Change distance in film to pixel in doseplan
distance_in_doseplan_ROI_reference_point_px.append([np.round(Globals.DVH_distance_isocenter_ROI[0][0]),\
np.round(Globals.DVH_distance_isocenter_ROI[0][1])])
distance_in_doseplan_ROI_reference_point_px.append([np.round(Globals.DVH_distance_isocenter_ROI[1][0]),\
np.round(Globals.DVH_distance_isocenter_ROI[1][1])])
distance_in_doseplan_ROI_reference_point_px.append([np.round(Globals.DVH_distance_isocenter_ROI[2][0]),\
np.round(Globals.DVH_distance_isocenter_ROI[2][1])])
distance_in_doseplan_ROI_reference_point_px.append([np.round(Globals.DVH_distance_isocenter_ROI[3][0]),\
np.round(Globals.DVH_distance_isocenter_ROI[3][1])])
elif(Globals.DVH_dataset_doseplan.PixelSpacing==[2, 2]):
isocenter_px[0] = np.round(iso_1/2)#np.round(Globals.profiles_isocenter_mm[0]/2)
isocenter_px[1] = np.round(iso_2/2)#np.round(Globals.profiles_isocenter_mm[1]/2)
isocenter_px[2] = np.round(iso_3/2)#np.round(Globals.profiles_isocenter_mm[2]/2)
#Change distance in film to pixel in doseplan
distance_in_doseplan_ROI_reference_point_px.append([np.round((Globals.DVH_distance_isocenter_ROI[0][0])/2),\
np.round((Globals.DVH_distance_isocenter_ROI[0][1])/2)])
distance_in_doseplan_ROI_reference_point_px.append([np.round((Globals.DVH_distance_isocenter_ROI[1][0])/2),\
np.round((Globals.DVH_distance_isocenter_ROI[1][1])/2)])
distance_in_doseplan_ROI_reference_point_px.append([np.round((Globals.DVH_distance_isocenter_ROI[2][0])/2),\
np.round((Globals.DVH_distance_isocenter_ROI[2][1])/2)])
distance_in_doseplan_ROI_reference_point_px.append([np.round((Globals.DVH_distance_isocenter_ROI[3][0])/2),\
np.round((Globals.DVH_distance_isocenter_ROI[3][1])/2)])
else:
isocenter_px[0] = np.round(iso_1/3)#np.round(Globals.profiles_isocenter_mm[0]/3)
isocenter_px[1] = np.round(iso_2/3)#np.round(Globals.profiles_isocenter_mm[1]/3)
isocenter_px[2] = np.round(iso_3/3)#np.round(Globals.profiles_isocenter_mm[2]/3)
#Change distance in film to pixel in doseplan
distance_in_doseplan_ROI_reference_point_px.append([np.round((Globals.DVH_distance_isocenter_ROI[0][0])/3),\
np.round((Globals.DVH_distance_isocenter_ROI[0][1])/3)])
distance_in_doseplan_ROI_reference_point_px.append([np.round((Globals.DVH_distance_isocenter_ROI[1][0])/3),\
np.round((Globals.DVH_distance_isocenter_ROI[1][1])/3)])
distance_in_doseplan_ROI_reference_point_px.append([np.round((Globals.DVH_distance_isocenter_ROI[2][0])/3),\
np.round((Globals.DVH_distance_isocenter_ROI[2][1])/3)])
distance_in_doseplan_ROI_reference_point_px.append([np.round((Globals.DVH_distance_isocenter_ROI[3][0])/3),\
np.round((Globals.DVH_distance_isocenter_ROI[3][1])/3)])
reference_point = np.zeros(3)
######################## Doseplan ##################################
#dataset_swapped is now the dataset entered the same way as expected with film (slice, rows, columns)
#isocenter_px and reference_point is not turned according to the doseplan and film orientation.
if(Globals.DVH_dataset_doseplan.ImageOrientationPatient==[1, 0, 0, 0, 1, 0]):
reference_point[0] = isocenter_px[2]
reference_point[1] = isocenter_px[1]
reference_point[2] = isocenter_px[0]
if(Globals.DVH_film_orientation.get()=='Coronal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,1)
temp_ref = reference_point[0]
reference_point[0] = reference_point[1]
reference_point[1] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Sagittal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,2)
temp_ref = reference_point[0]
reference_point[0] = reference_point[2]
reference_point[2] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Axial'):
dataset_swapped = Globals.DVH_dataset_doseplan.pixel_array
else:
messagebox.showerror("Error", "Something has gone wrong here.")
clearAll()
return
elif(Globals.DVH_dataset_doseplan.ImageOrientationPatient==[1, 0, 0, 0, 0, 1]):
reference_point[0] = isocenter_px[1]
reference_point[1] = isocenter_px[2]
reference_point[2] = isocenter_px[0]
if(Globals.DVH_film_orientation.get()=='Coronal'):
dataset_swapped = Globals.DVH_dataset_doseplan.pixel_array
elif(Globals.DVH_film_orientation.get()=='Sagittal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,2)
temp_ref = reference_point[0]
reference_point[0] = reference_point[2]
reference_point[2] = temp_ref
dataset_swapped = np.swapaxes(dataset_swapped, 1,2)
temp_ref = reference_point[1]
reference_point[1] = reference_point[2]
reference_point[2] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Axial'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,1)
temp_ref = reference_point[0]
reference_point[0] = reference_point[1]
reference_point[1] = temp_ref
else:
messagebox.showerror("Error", "Something has gone wrong.")
clearAll()
return
elif(Globals.DVH_dataset_doseplan.ImageOrientationPatient==[0, 1, 0, 1, 0, 0]):
reference_point[0] = isocenter_px[2]
reference_point[1] = isocenter_px[0]
reference_point[2] = isocenter_px[1]
if(Globals.DVH_film_orientation.get()=='Coronal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,2)
temp_ref = reference_point[0]
reference_point[0] = reference_point[2]
reference_point[2] = temp_ref
dataset_swapped = np.swapaxes(dataset_swapped, 1,2)
temp_ref = reference_point[1]
reference_point[1] = reference_point[2]
reference_point[2] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Sagittal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,1)
temp_ref = reference_point[0]
reference_point[0] = reference_point[1]
reference_point[1] = temp_ref
dataset_swapped = np.swapaxes(dataset_swapped, 1,2)
temp_ref = reference_point[1]
reference_point[1] = reference_point[2]
reference_point[2] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Axial'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 1,2)
temp_ref = reference_point[1]
reference_point[1] = reference_point[2]
reference_point[2] = temp_ref
else:
messagebox.showerror("Error", "Something has gone wrong.")
clearAll()
return
elif(Globals.DVH_dataset_doseplan.ImageOrientationPatient==[0, 1, 0, 0, 0, 1]):
reference_point[0] = isocenter_px[0]
reference_point[1] = isocenter_px[2]
reference_point[2] = isocenter_px[1]
if(Globals.DVH_film_orientation.get()=='Coronal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,2)
temp_ref = reference_point[0]
reference_point[0] = reference_point[2]
reference_point[2] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Sagittal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 1,2)
temp_ref = reference_point[1]
reference_point[1] = reference_point[2]
reference_point[2] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Axial'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,1)
temp_ref = reference_point[0]
reference_point[0] = reference_point[1]
reference_point[1] = temp_ref
dataset_swapped = np.swapaxes(dataset_swapped, 1,2)
temp_ref = reference_point[1]
reference_point[1] = reference_point[2]
reference_point[2] = temp_ref
else:
messagebox.showerror("Error", "Something has gone wrong.")
clearAll()
return
elif(Globals.DVH_dataset_doseplan.ImageOrientationPatient==[0, 0, 1, 1, 0, 0]):
reference_point[0] = isocenter_px[1]
reference_point[1] = isocenter_px[0]
reference_point[2] = isocenter_px[2]
if(Globals.DVH_film_orientation.get()=='Coronal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 1,2)
temp_ref = reference_point[1]
reference_point[1] = reference_point[2]
reference_point[2] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Sagittal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,1)
temp_ref = reference_point[0]
reference_point[0] = reference_point[1]
reference_point[1] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Axial'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,1)
temp_ref = reference_point[0]
reference_point[0] = reference_point[1]
reference_point[1] = temp_ref
dataset_swapped = np.swapaxes(dataset_swapped, 0,2)
temp_ref = reference_point[0]
reference_point[0] = reference_point[2]
reference_point[2] = temp_ref
else:
messagebox.showerror("Error", "Something has gone wrong.")
clearAll()
return
elif(Globals.DVH_dataset_doseplan.ImageOrientationPatient==[0, 0, 1, 0, 1, 0]):
reference_point[0] = isocenter_px[0]
reference_point[1] = isocenter_px[1]
reference_point[2] = isocenter_px[2]
if(Globals.DVH_film_orientation.get()=='Coronal'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,2)
temp_ref = reference_point[0]
reference_point[0] = reference_point[2]
reference_point[2] = temp_ref
dataset_swapped = np.swapaxes(dataset_swapped, 0,1)
temp_ref = reference_point[0]
reference_point[0] = reference_point[1]
reference_point[1] = temp_ref
elif(Globals.DVH_film_orientation.get()=='Sagittal'):
dataset_swapped = Globals.DVH_dataset_doseplan.pixel_array
elif(Globals.DVH_film_orientation.get()=='Axial'):
dataset_swapped = np.swapaxes(Globals.DVH_dataset_doseplan.pixel_array, 0,2)
temp_ref = reference_point[0]
reference_point[0] = reference_point[2]
reference_point[2] = temp_ref
else:
messagebox.showerror("Error", "Something has gone wrong.")
clearAll()
return
else:
messagebox.showerror("Error", "Something has gone wrong.")
clearAll()
return
####################### Match film and doseplan ###############################
#Pick the slice where the reference point is (this is the slice-position of the film)
if Globals.DVH_dataset_doseplan.PixelSpacing == [1, 1]:
offset = int(np.round(Globals.DVH_offset))
dose_slice = dataset_swapped[int(reference_point[0]) + offset]
elif Globals.DVH_dataset_doseplan.PixelSpacing == [2, 2]:
offset = int(np.round(Globals.DVH_offset/2))
dose_slice = dataset_swapped[int(reference_point[0] + offset)]
else:
offset = int(np.round(Globals.DVH_offset/3))
dose_slice = dataset_swapped[int(reference_point[0]) + offset]
#calculate the coordinates of the Region of Interest in doseplan (marked on the film)
#and checks if it actualy exists in dosematrix
doseplan_ROI_coords = []
top_left_test_side = False; top_left_test_down = False
top_right_test_side = False; top_right_test_down = False
bottom_left_test_side = False; bottom_left_test_down = False
bottom_right_test_side = False; bottom_right_test_down = False
top_left_side_corr = 0; top_left_down_corr = 0
top_right_side_corr = 0; top_right_down_corr = 0
bottom_left_side_corr = 0; bottom_left_down_corr = 0
bottom_right_side_corr = 0; bottom_right_down_corr = 0
top_left_to_side = reference_point[2] - distance_in_doseplan_ROI_reference_point_px[0][0]
top_left_down = reference_point[1] - distance_in_doseplan_ROI_reference_point_px[0][1]
if(top_left_to_side < 0):
top_left_test_side = True
top_left_side_corr = abs(top_left_to_side)
top_left_to_side = 0
if(top_left_to_side > dose_slice.shape[1]):
messagebox.showerror("Fatal Error", "Fatal error: marked ROI is out of range in doseplan. Try again")
clearAll()
return
if(top_left_down < 0):
top_left_test_down = True
top_left_down_corr = abs(top_left_down)
top_left_down = 0
if(top_left_down > dose_slice.shape[0]):
messagebox.showerror("Fatal Error", "Fatal error: marked ROI is out of range in doseplan. Try again")
clearAll()
return
top_right_to_side = reference_point[2] - distance_in_doseplan_ROI_reference_point_px[1][0]
top_right_down = reference_point[1] - distance_in_doseplan_ROI_reference_point_px[1][1]
if(top_right_to_side < 0):
messagebox.showerror("Fatal Error", "Fatal error: marked ROI is out of range in doseplan. Try again")
clearAll()
return
if(top_right_to_side > dose_slice.shape[1]):
top_right_test_side = True
top_right_side_corr = top_right_to_side - dose_slice.shape[1]
top_right_to_side = dose_slice.shape[1]
if(top_right_down < 0):
top_right_test_down = True
top_right_down_corr = abs(top_right_down)
top_right_down = 0
if(top_right_down > dose_slice.shape[0]):
messagebox.showerror("Fatal Error", "Fatal error: marked ROI is out of range in doseplan. Try again")
clearAll()
return
bottom_left_to_side = reference_point[2] - distance_in_doseplan_ROI_reference_point_px[2][0]
bottom_left_down = reference_point[1] - distance_in_doseplan_ROI_reference_point_px[2][1]
if(bottom_left_to_side < 0):
bottom_left_test_side = True
bottom_left_side_corr = abs(bottom_left_to_side)
bottom_left_to_side = 0
if(bottom_left_to_side > dose_slice.shape[1]):
messagebox.showerror("Fatal Error", "Fatal error: marked ROI is out of range in doseplan. Try again")
clearAll()
return
if(bottom_left_down < 0):
messagebox.showerror("Fatal Error", "Fatal error: marked ROI is out of range in doseplan. Try again")
clearAll()
return
if(bottom_left_down > dose_slice.shape[0]):
bottom_left_down_corr = bottom_left_down - dose_slice.shape[0]
bottom_left_down = dose_slice.shape[0]
bottom_left_test_down = True
bottom_right_to_side = reference_point[2] - distance_in_doseplan_ROI_reference_point_px[3][0]
bottom_right_down = reference_point[1] - distance_in_doseplan_ROI_reference_point_px[3][1]
if(bottom_right_to_side < 0):
messagebox.showerror("Fatal Error", "Fatal error: marked ROI is out of range in doseplan. Try again")
clearAll()
return
if(bottom_right_to_side > dose_slice.shape[1]):
bottom_right_side_corr = bottom_right_to_side - dose_slice.shape[1]
bottom_right_to_side = dose_slice.shape[1]
bottom_right_test_side = True
if(bottom_right_down < 0):
messagebox.showerror("Fatal Error", "Fatal error: marked ROI is out of range in doseplan. Try again")
clearAll()
return
if(bottom_right_down > dose_slice.shape[0]):
bottom_right_down_corr = bottom_right_down - dose_slice.shape[0]
bottom_right_down = dose_slice.shape[0]
bottom_right_test_down = True
if(top_right_test_side or top_right_test_down or top_left_test_side or top_left_test_down \
or bottom_right_test_side or bottom_right_test_down or bottom_left_test_side or bottom_left_test_down):
ROI_info = "Left side: " + str(max(top_left_side_corr, bottom_left_side_corr)) + " pixels.\n"\
+ "Right side: " + str(max(top_right_side_corr, bottom_right_side_corr)) + " pixels.\n "\
+ "Top side: " + str(max(top_left_down_corr, top_right_down_corr)) + " pixels.\n"\
+ "Bottom side: " + str(max(bottom_left_down_corr, bottom_right_down_corr)) + " pixels."
messagebox.showinfo("ROI info", "The ROI marked on the film did not fit with the size of the doseplan and had to \
be cut.\n" + ROI_info )
doseplan_ROI_coords.append([top_left_to_side, top_left_down])
doseplan_ROI_coords.append([top_right_to_side, top_right_down])
doseplan_ROI_coords.append([bottom_left_to_side, bottom_left_down])
doseplan_ROI_coords.append([bottom_right_to_side, bottom_right_down])
#dose_slice = cv2.flip(dose_slice, 1)
if(only_one):
Globals.DVH_doseplan_dataset_ROI = \
dose_slice[int(top_left_down):int(bottom_left_down), int(top_left_to_side):int(top_right_to_side)]
img=Globals.DVH_doseplan_dataset_ROI
if(Globals.DVH_dataset_doseplan.PixelSpacing==[1, 1]):
img = cv2.resize(img, dsize=(img.shape[1]*5,img.shape[0]*5))
elif(Globals.DVH_dataset_doseplan.PixelSpacing==[2, 2]):
img = cv2.resize(img, dsize=(img.shape[1]*10,img.shape[0]*10))
else:
img = cv2.resize(img, dsize=(img.shape[1]*15,img.shape[0]*15))
mx=np.max(img)
Globals.DVH_max_dose_doseplan = mx*Globals.DVH_dose_scaling_doseplan
max_dose = mx*Globals.DVH_dose_scaling_doseplan
img = img/mx
PIL_img_doseplan_ROI = Image.fromarray(np.uint8(cm.viridis(img)*255))
wid = PIL_img_doseplan_ROI.width;heig = PIL_img_doseplan_ROI.height
doseplan_canvas = tk.Canvas(Globals.DVH_film_panedwindow)
doseplan_canvas.grid(row=2, column=0, sticky=N+S+W+E)
Globals.DVH_film_panedwindow.add(doseplan_canvas, \
height=max(heig, Globals.profiles_doseplan_text_image.height()), \
width=wid + Globals.profiles_doseplan_text_image.width())
doseplan_canvas.config(bg='#ffffff', relief=FLAT, highlightthickness=0, \
height=max(heig, Globals.profiles_doseplan_text_image.height()), \
width=wid + Globals.profiles_doseplan_text_image.width())
Globals.DVH_doseplan_write_image = tk.Canvas(doseplan_canvas)
Globals.DVH_doseplan_write_image.grid(row=0,column=1,sticky=N+S+W+E)
Globals.DVH_doseplan_write_image.config(bg='#ffffff', relief=FLAT, highlightthickness=0, width=wid, height=heig)
doseplan_text_image_canvas = tk.Canvas(doseplan_canvas)
doseplan_text_image_canvas.grid(row=0,column=0,sticky=N+S+W+E)
doseplan_text_image_canvas.config(bg='#ffffff', relief=FLAT, highlightthickness=0, \
width=Globals.profiles_doseplan_text_image.width(), height=Globals.profiles_doseplan_text_image.height())
scaled_image_visual = PIL_img_doseplan_ROI
scaled_image_visual = ImageTk.PhotoImage(image=scaled_image_visual)
Globals.DVH_doseplan_write_image_width = scaled_image_visual.width()
Globals.DVH_doseplan_write_image_height = scaled_image_visual.height()
Globals.DVH_doseplan_write_image.create_image(0,0,image=scaled_image_visual, anchor="nw")
Globals.DVH_doseplan_write_image.image = scaled_image_visual
doseplan_text_image_canvas.create_image(0,0,image=Globals.profiles_doseplan_text_image, anchor="nw")
doseplan_text_image_canvas.image=Globals.profiles_doseplan_text_image
drawProfiles(False)
else:
img=dose_slice[int(top_left_down):int(bottom_left_down), int(top_left_to_side):int(top_right_to_side)]
Globals.DVH_doseplan_dataset_ROI_several.append(img)
Globals.DVH_number_of_doseplans+=1
if(Globals.DVH_dataset_doseplan.PixelSpacing==[1, 1]):
Globals.DVH_several_img.append(cv2.resize(img, dsize=(img.shape[1]*5,img.shape[0]*5)))
elif(Globals.DVH_dataset_doseplan.PixelSpacing==[2, 2]):
Globals.DVH_several_img.append(cv2.resize(img, dsize=(img.shape[1]*10,img.shape[0]*10)))
else:
Globals.DVH_several_img.append(cv2.resize(img, dsize=(img.shape[1]*15,img.shape[0]*15)))
def UploadDoseplan(only_one):
file = filedialog.askopenfilename()
ext = os.path.splitext(file)[-1].lower()
if(not(ext == '.dcm')):
if(ext == ""):
return
else:
messagebox.showerror("Error", "The file must be a *.dcm file")
return
current_folder = os.getcwd()
parent = os.path.dirname(file)
os.chdir(parent)
dataset = pydicom.dcmread(file)
try:
dose_summation_type = dataset.DoseSummationType
except:
messagebox.showerror("Error", "Could not upload the doseplan correctly. Try again or another file.\n (Code: dose summation)")
return
if(not(dose_summation_type == "PLAN")):
ok = messagebox.askokcancel("Dose summation", "You did not upload the full doseplan. Do you want to continue?")
if not ok:
return
os.chdir(current_folder)
doseplan_dataset = dataset.pixel_array
#Check that the resolution is either 1x1x1, 2x2x2 or 3x3x3
if(not((dataset.PixelSpacing==[1, 1] and dataset.SliceThickness==1) \
or (dataset.PixelSpacing==[2, 2] and dataset.SliceThickness==2) \
or (dataset.PixelSpacing==[3, 3] and dataset.SliceThickness==3))):
messagebox.showerror("Error", "The resolution in doseplan must be 1x1x1, 2x2x2 or 3x3x3")
return
#Check that the datamatrix is in right angles to the coordinate system
if(not(dataset.ImageOrientationPatient==[1, 0, 0, 0, 1, 0] or \
dataset.ImageOrientationPatient==[1, 0, 0, 0, 0, 1] or \
dataset.ImageOrientationPatient==[0, 1, 0, 1, 0, 0] or \
dataset.ImageOrientationPatient==[0, 1, 0, 0, 0, 1] or \
dataset.ImageOrientationPatient==[0, 0, 1, 1, 0, 0] or \
dataset.ImageOrientationPatient==[0, 0, 1, 0, 1, 0])):
messagebox.showerror("Error", "The Image Orientation (Patient) must be parallel to one of the main axis and perpendicular to the two others.")
return
if not only_one and Globals.DVH_number_of_doseplans > 1:
if(not (Globals.DVH_dataset_doseplan.PixelSpacing==dataset.PixelSpacing)):
messagebox.showerror("Error", "Resolution of the doseplans must be equal. \n(Code: UploadDoseplan)")
return
if(not (Globals.DVH_dataset_doseplan.DoseGridScaling == dataset.DoseGridScaling)):
messagebox.showerror("Error", "Dose grid scaling of the doseplans must be equal. \n(Code: UploadDoseplan)")
return
Globals.DVH_dataset_doseplan = dataset
Globals.DVH_dose_scaling_doseplan = dataset.DoseGridScaling
Globals.DVH_test_if_added_doseplan = True
if(Globals.DVH_test_if_added_rtplan):
if(Globals.DVH_isocenter_or_reference_point == "Isocenter"):
processDoseplan_usingIsocenter(only_one)
elif(Globals.DVH_isocenter_or_reference_point == "Ref_point"):
processDoseplan_usingReferencePoint(only_one)
else:
messagebox.showerror("Error", "Something went wrong. Try again.\n (Code: processDoseplan)")
return
if only_one:
Globals.DVH_upload_button_doseplan.config(state=DISABLED)
if not only_one:
filename = basename(normpath(file))
textbox_filename = tk.Text(Globals.DVH_doseplans_scroll_frame, width = 30, height = 1)
textbox_filename.insert(INSERT, filename)
textbox_filename.config(bg='#ffffff', font=('calibri', '12'), state=DISABLED, relief=FLAT)
textbox_filename.grid(row = Globals.DVH_number_of_doseplans_row_count, column = 0, sticky=N+S+W+E, pady=(10,10), padx=(10,10))
Globals.DVH_doseplans_scroll_frame.grid_columnconfigure(Globals.DVH_doseplans_grid_config_count, weight=0)
Globals.DVH_doseplans_scroll_frame.grid_rowconfigure(Globals.DVH_doseplans_grid_config_count, weight=0)
Globals.DVH_doseplans_filenames.append(textbox_filename)
Globals.DVH_doseplans_grid_config_count+=1;
textbox_factor = tk.Text(Globals.DVH_doseplans_scroll_frame, width = 6, height = 1)
textbox_factor.insert(INSERT, "Factor: ")
textbox_factor.config(bg='#ffffff', font=('calibri', '12'), state=DISABLED, relief=FLAT)
textbox_factor.grid(row = Globals.profiles_number_of_doseplans_row_count, column = 1, sticky=N+S+W+E, pady=(10,10), padx=(10,10))
Globals.DVH_doseplans_scroll_frame.grid_columnconfigure(Globals.DVH_doseplans_grid_config_count, weight=0)
Globals.DVH_doseplans_scroll_frame.grid_rowconfigure(Globals.DVH_doseplans_grid_config_count, weight=0)
Globals.DVH_doseplans_factor_text.append(textbox_factor)
Globals.DVH_doseplans_grid_config_count+=1;
textbox_factor_input = tk.Text(Globals.DVH_doseplans_scroll_frame)
textbox_factor_input.insert(INSERT, " ")
textbox_factor_input.config(bg='#E5f9ff', font=('calibri', '12'), state=NORMAL, bd = 2)
textbox_factor_input.grid(row = Globals.DVH_number_of_doseplans_row_count, column = 1, sticky=N+S+W+E, pady=(10,10), padx=(30,10))
Globals.DVH_doseplans_scroll_frame.grid_columnconfigure(Globals.DVH_doseplans_grid_config_count, weight=0)
Globals.DVH_doseplans_scroll_frame.grid_rowconfigure(Globals.DVH_doseplans_grid_config_count, weight=0)
Globals.DVH_doseplans_factor_input.append(textbox_factor_input)
Globals.DVH_number_of_doseplans_row_count+=1
Globals.DVH_doseplans_grid_config_count+=1;
def UploadDoseplan_button_function():
yes = messagebox.askyesno("Question", "Are you going to upload several doseplans and/or use a factor on a plan?")
if not yes:
UploadDoseplan(True)
return
several_doseplans_window = tk.Toplevel(Globals.tab5_canvas)
several_doseplans_window.geometry("600x500+10+10")
several_doseplans_window.grab_set()
doseplans_over_all_frame = tk.Frame(several_doseplans_window, bd=0, relief=FLAT)
doseplans_over_all_canvas = Canvas(doseplans_over_all_frame)
doseplans_xscrollbar = Scrollbar(doseplans_over_all_frame, orient=HORIZONTAL, command=doseplans_over_all_canvas.xview)
doseplans_yscrollbar = Scrollbar(doseplans_over_all_frame, command=doseplans_over_all_canvas.yview)
Globals.DVH_doseplans_scroll_frame = ttk.Frame(doseplans_over_all_canvas)
Globals.DVH_doseplans_scroll_frame.bind("<Configure>", lambda e: doseplans_over_all_canvas.configure(scrollregion=doseplans_over_all_canvas.bbox('all')))
doseplans_over_all_canvas.create_window((0,0), window=Globals.DVH_doseplans_scroll_frame, anchor='nw')
doseplans_over_all_canvas.configure(xscrollcommand=doseplans_xscrollbar.set, yscrollcommand=doseplans_yscrollbar.set)
doseplans_over_all_frame.config(highlightthickness=0, bg='#ffffff')
doseplans_over_all_canvas.config(highlightthickness=0, bg='#ffffff')
doseplans_over_all_frame.pack(expand=True, fill=BOTH)
doseplans_over_all_canvas.grid(row=0, column=0, sticky=N+S+E+W)
doseplans_over_all_frame.grid_columnconfigure(0, weight=1)
doseplans_over_all_frame.grid_rowconfigure(0, weight=1)
doseplans_xscrollbar.grid(row=1, column=0, sticky=E+W)
doseplans_over_all_frame.grid_columnconfigure(1, weight=0)
doseplans_over_all_frame.grid_rowconfigure(1, weight=0)
doseplans_yscrollbar.grid(row=0, column=1, sticky=N+S)
doseplans_over_all_frame.grid_columnconfigure(2, weight=0)
doseplans_over_all_frame.grid_rowconfigure(2, weight=0)
upload_doseplan_frame = tk.Frame(Globals.DVH_doseplans_scroll_frame)
upload_doseplan_frame.grid(row=0, column = 0, padx = (30,30), pady=(30,0), sticky=N+S+E+W)
Globals.DVH_doseplans_scroll_frame.grid_columnconfigure(0, weight=0)
Globals.DVH_doseplans_scroll_frame.grid_rowconfigure(0, weight=0)
upload_doseplan_frame.config(bg = '#ffffff')
upload_button_doseplan = tk.Button(upload_doseplan_frame, text='Browse', image=Globals.profiles_add_doseplans_button_image,\
cursor='hand2', font=('calibri', '14'), relief=FLAT, state=ACTIVE, command=lambda: UploadDoseplan(False))
upload_button_doseplan.pack(expand=True, fill=BOTH)
upload_button_doseplan.configure(bg='#ffffff', activebackground='#ffffff', activeforeground='#ffffff', highlightthickness=0)
upload_button_doseplan.image = Globals.profiles_add_doseplans_button_image
def closeUploadDoseplans():
if(len(Globals.DVH_doseplan_dataset_ROI_several) == 0):
messagebox.showinfo("INFO", "No doseplan has been uploaded")
return
for i in range(len(Globals.DVH_doseplan_dataset_ROI_several)):
if Globals.DVH_doseplans_factor_input[i].get("1.0", 'end-1c') == " ":
factor = 1
else:
try:
factor = float(Globals.DVH_doseplans_factor_input[i].get("1.0", 'end-1c'))
except:
messagebox.showerror("Error", "Invalid factor. Must be number.\n (Code: closeUploadDoseplans)")
return
if i == 0:
doseplan_ROI = Globals.DVH_doseplan_dataset_ROI_several[i]
doseplan_ROI= doseplan_ROI*factor
img_ROI = Globals.DVH_several_img[i]
img_ROI = img_ROI*factor
else:
doseplan_ROI+= factor*Globals.DVH_doseplan_dataset_ROI_several[i]
img_ROI+= factor*Globals.DVH_several_img[i]
mx=np.max(img_ROI)
#max_dose = mx*Globals.DVH_dose_scaling_doseplan
img_ROI = img_ROI/mx
PIL_img_doseplan_ROI = Image.fromarray(np.uint8(cm.viridis(img_ROI)*255))
wid = PIL_img_doseplan_ROI.width;heig = PIL_img_doseplan_ROI.height
doseplan_canvas = tk.Canvas(Globals.DVH_film_panedwindow)
doseplan_canvas.grid(row=2, column=0, sticky=N+S+W+E)
Globals.DVH_film_panedwindow.add(doseplan_canvas, \
height=max(heig, Globals.profiles_doseplan_text_image.height()), \
width=wid + Globals.profiles_doseplan_text_image.width())
doseplan_canvas.config(bg='#ffffff', relief=FLAT, highlightthickness=0, \
height=max(heig, Globals.profiles_doseplan_text_image.height()), \
width=wid + Globals.profiles_doseplan_text_image.width())
Globals.DVH_doseplan_write_image = tk.Canvas(doseplan_canvas)
Globals.DVH_doseplan_write_image.grid(row=0,column=1,sticky=N+S+W+E)
Globals.DVH_doseplan_write_image.config(bg='#ffffff', relief=FLAT, highlightthickness=0, width=wid, height=heig)
doseplan_text_image_canvas = tk.Canvas(doseplan_canvas)
doseplan_text_image_canvas.grid(row=0,column=0,sticky=N+S+W+E)
doseplan_text_image_canvas.config(bg='#ffffff', relief=FLAT, highlightthickness=0, \
width=Globals.profiles_doseplan_text_image.width(), height=Globals.profiles_doseplan_text_image.height())
scaled_image_visual = PIL_img_doseplan_ROI
scaled_image_visual = ImageTk.PhotoImage(image=scaled_image_visual)
Globals.DVH_doseplan_write_image_width = scaled_image_visual.width()
Globals.DVH_doseplan_write_image_height = scaled_image_visual.height()
Globals.DVH_doseplan_write_image.create_image(0,0,image=scaled_image_visual, anchor="nw")
Globals.DVH_doseplan_write_image.image = scaled_image_visual
doseplan_text_image_canvas.create_image(0,0,image=Globals.profiles_doseplan_text_image, anchor="nw")
doseplan_text_image_canvas.image=Globals.profiles_doseplan_text_image
Globals.DVH_doseplan_dataset_ROI = doseplan_ROI
Globals.DVH_upload_button_doseplan.config(state=DISABLED)
several_doseplans_window.after(500, lambda: several_doseplans_window.destroy())
drawProfiles(False)
doseplans_done_button_frame = tk.Frame(Globals.DVH_doseplans_scroll_frame)
doseplans_done_button_frame.grid(row=0, column = 1, padx=(0,40), pady=(30,0), sticky=N+S+W+E)
doseplans_done_button_frame.config(bg='#ffffff')
Globals.DVH_doseplans_scroll_frame.grid_rowconfigure(3, weight=0)
Globals.DVH_doseplans_scroll_frame.grid_columnconfigure(3, weight=0)
doseplans_done_button = tk.Button(doseplans_done_button_frame, text='Done', image=Globals.done_button_image,\
cursor='hand2', font=('calibri', '14'), relief=FLAT, state=ACTIVE, command=closeUploadDoseplans)
doseplans_done_button.pack(expand=True, fill=BOTH)
doseplans_done_button.configure(bg='#ffffff', activebackground='#ffffff', activeforeground='#ffffff', highlightthickness=0)
doseplans_done_button.image = Globals.done_button_image
filename_title = tk.Text(Globals.DVH_doseplans_scroll_frame, width = 15, height= 1)
filename_title.insert(INSERT, "Filename")
filename_title.grid(row=2, column=0, sticky=N+S+E+W, pady=(40,0), padx=(45,15))
filename_title.config(bg='#ffffff', relief=FLAT, state=DISABLED, font=('calibri', '15', 'bold'))
Globals.DVH_doseplans_scroll_frame.grid_rowconfigure(1, weight=0)
Globals.DVH_doseplans_scroll_frame.grid_columnconfigure(1, weight=0)
factor_title = tk.Text(Globals.DVH_doseplans_scroll_frame, width=30, height=2)
factor_title.insert(INSERT, "Here you can write a factor to use \non the doseplan. Defaults to 1.")
factor_title.grid(row=2, column=1, sticky=N+W+S+E, pady=(37,10), padx=(15,25))
factor_title.config(bg='#ffffff', relief=FLAT, state=DISABLED, font=('calibri', '15', 'bold'))
Globals.DVH_doseplans_scroll_frame.grid_columnconfigure(2,weight=0)
Globals.DVH_doseplans_scroll_frame.grid_rowconfigure(2, weight=0)
def UploadRTplan():
file = filedialog.askopenfilename()
ext = os.path.splitext(file)[-1].lower()
if(not(ext == '.dcm')):
if(ext == ""):
return
else:
messagebox.showerror("Error", "The file must be a *.dcm file")
return
current_folder = os.getcwd()
parent = os.path.dirname(file)
os.chdir(parent)
dataset = pydicom.dcmread(file)
os.chdir(current_folder)
Globals.DVH_dataset_rtplan = dataset
#Isocenter given in mm from origo in patient coordinate system
try:
isocenter_mm = dataset.BeamSequence[0].ControlPointSequence[0].IsocenterPosition
Globals.DVH_isocenter_mm = isocenter_mm
except:
messagebox.showerror("Error", "Could not read the RT plan file. Try again or try another file.\n\
(Code: isocenter reading)")
return
try:
Globals.DVH_doseplan_vertical_displacement = dataset.PatientSetupSequence[0].TableTopVerticalSetupDisplacement
except:
messagebox.showerror("Error", "Could not read the RT plan file. Try again or try another file. \n\
(Code: vertical table displacement)")
try:
Globals.DVH_doseplan_lateral_displacement = dataset.PatientSetupSequence[0].TableTopLateralSetupDisplacement
except:
messagebox.showerror("Error", "Could not read the RT plan file. Try again or try another file-\n\
(Code: lateral table displacement)")
try:
Globals.DVH_doseplan_longitudianl_displacement = dataset.PatientSetupSequence[0].TableTopLongitudinalSetupDisplacement
except:
messagebox.showerror("Error", "Could not read the RT plan file. Try again or try another file\n\
(Code: longitudinal table displacement)")
try:
patient_position = dataset.PatientSetupSequence[0].PatientPosition
Globals.DVH_doseplan_patient_position = patient_position
except:
messagebox.showerror("Error", "Could not read the RT plan file. Try again or try another file\n\
(Code: Patient position)")
if(not(patient_position=='HFS' or patient_position=='HFP' or patient_position=='HFDR' or patient_position == 'HFDL'\
or patient_position=='FFDR' or patient_position=='FFDL' or patient_position=='FFP' or patient_position=='FFS')):
messagebox.showerror("Error", "Fidora does only support patient positions: \n\
HFS, HFP, HFDR, HFDL, FFP, FFS, FFDR, FFDL")
return
Globals.DVH_test_if_added_rtplan = True
Globals.DVH_upload_button_doseplan.config(state=ACTIVE)
Globals.DVH_upload_button_rtplan.config(state=DISABLED)
def pixel_to_dose(P,a,b,c):
ret = c + b/(P-a)
return ret
def markIsocenter(img, new_window_isocenter_tab, image_canvas, cv2Img):
if(len(Globals.DVH_mark_isocenter_oval)>0):
image_canvas.delete(Globals.DVH_mark_isocenter_up_down_line[0])
image_canvas.delete(Globals.DVH_mark_isocenter_right_left_line[0])
image_canvas.delete(Globals.DVH_mark_isocenter_oval[0])
Globals.DVH_mark_isocenter_oval=[]
Globals.DVH_mark_isocenter_right_left_line=[]
Globals.DVH_mark_isocenter_up_down_line=[]
Globals.DVH_iscoenter_coords = []
img_mark_isocenter = ImageTk.PhotoImage(image=img)
mark_isocenter_window = tk.Toplevel(new_window_isocenter_tab)
mark_isocenter_window.geometry("1035x620+10+10")
mark_isocenter_window.grab_set()
mark_isocenter_over_all_frame = tk.Frame(mark_isocenter_window, bd=0, relief=FLAT)
mark_isocenter_over_all_canvas = Canvas(mark_isocenter_over_all_frame)
mark_isocenter_xscrollbar = Scrollbar(mark_isocenter_over_all_frame, orient=HORIZONTAL, command=mark_isocenter_over_all_canvas.xview)
mark_isocenter_yscrollbar = Scrollbar(mark_isocenter_over_all_frame, command=mark_isocenter_over_all_canvas.yview)
mark_isocenter_scroll_frame = ttk.Frame(mark_isocenter_over_all_canvas)
mark_isocenter_scroll_frame.bind("<Configure>", lambda e: mark_isocenter_over_all_canvas.configure(scrollregion=mark_isocenter_over_all_canvas.bbox('all')))
mark_isocenter_over_all_canvas.create_window((0,0), window=mark_isocenter_scroll_frame, anchor='nw')
mark_isocenter_over_all_canvas.configure(xscrollcommand=mark_isocenter_xscrollbar.set, yscrollcommand=mark_isocenter_yscrollbar.set)
mark_isocenter_over_all_frame.config(highlightthickness=0, bg='#ffffff')
mark_isocenter_over_all_canvas.config(highlightthickness=0, bg='#ffffff')
mark_isocenter_over_all_frame.pack(expand=True, fill=BOTH)
mark_isocenter_over_all_canvas.grid(row=0, column=0, sticky=N+S+E+W)
mark_isocenter_over_all_frame.grid_columnconfigure(0, weight=1)
mark_isocenter_over_all_frame.grid_rowconfigure(0, weight=1)
mark_isocenter_xscrollbar.grid(row=1, column=0, sticky=E+W)
mark_isocenter_over_all_frame.grid_columnconfigure(1, weight=0)
mark_isocenter_over_all_frame.grid_rowconfigure(1, weight=0)
mark_isocenter_yscrollbar.grid(row=0, column=1, sticky=N+S)
mark_isocenter_over_all_frame.grid_columnconfigure(2, weight=0)
mark_isocenter_over_all_frame.grid_rowconfigure(2, weight=0)
mark_isocenter_image_canvas = tk.Canvas(mark_isocenter_scroll_frame)
mark_isocenter_image_canvas.grid(row=0,column=0, rowspan=10, columnspan=3, sticky=N+S+E+W, padx=(0,0), pady=(0,0))
mark_isocenter_scroll_frame.grid_columnconfigure(0, weight=0)
mark_isocenter_scroll_frame.grid_rowconfigure(0, weight=0)
mark_isocenter_image_canvas.create_image(0,0,image=img_mark_isocenter,anchor="nw")
mark_isocenter_image_canvas.image = img_mark_isocenter
mark_isocenter_image_canvas.config(cursor='hand2', bg='#ffffff', relief=FLAT, bd=0, \
scrollregion=mark_isocenter_image_canvas.bbox(ALL), height=img_mark_isocenter.height(), width=img_mark_isocenter.width())
mark_isocenter_image_canvas.grid_propagate(0)
def findCoords(event):
mark_isocenter_image_canvas.create_oval(event.x-2, event.y-2, event.x+2, event.y+2, fill='red')
if(Globals.DVH_iscoenter_coords==[]):
Globals.DVH_iscoenter_coords.append([event.x, event.y])
mark_isocenter_image_canvas.config(cursor='hand2')
elif(len(Globals.DVH_iscoenter_coords)==1):
Globals.DVH_iscoenter_coords.append([event.x, event.y])
Globals.DVH_film_isocenter = [Globals.DVH_iscoenter_coords[0][0], Globals.DVH_iscoenter_coords[1][1]]
x1,y1 = Globals.DVH_iscoenter_coords[0]
x4,y4 = Globals.DVH_iscoenter_coords[1]
x2 = x1;y3=y4
y2=2*Globals.DVH_film_isocenter[1]-y1
x3=2*Globals.DVH_film_isocenter[0]-x4
up_down_line = image_canvas.create_line(int(x1/2),int(y1/2),int(x2/2),int(y2/2),fill='purple', smooth=1, width=2)
right_left_line = image_canvas.create_line(int(x3/2),int(y3/2),int(x4/2),int(y4/2), fill='purple', smooth=1, width=2)
oval = image_canvas.create_oval(int(Globals.DVH_film_isocenter[0]/2)-3, int(Globals.DVH_film_isocenter[1]/2)-3,\
int(Globals.DVH_film_isocenter[0]/2)+3, int(Globals.DVH_film_isocenter[1]/2)+3, fill='red')
Globals.DVH_mark_isocenter_up_down_line.append(up_down_line)
Globals.DVH_mark_isocenter_right_left_line.append(right_left_line)
Globals.DVH_mark_isocenter_oval.append(oval)
mark_isocenter_window.after(500, lambda: mark_isocenter_window.destroy())
Globals.DVH_isocenter_check = True
if(Globals.DVH_ROI_check):
Globals.DVH_done_button.config(state=ACTIVE)
mark_isocenter_image_canvas.bind("<Button 1>",findCoords)
def markReferencePoint(img, new_window_reference_point_tab, image_canvas_reference_tab, cv2Img):
if(len(Globals.DVH_mark_reference_point_oval)>0):
image_canvas_reference_tab.delete(Globals.DVH_mark_reference_point_oval[0])
Globals.DVH_mark_reference_point_oval=[]
img_mark_reference_point = ImageTk.PhotoImage(image=img)
mark_reference_point_window = tk.Toplevel(new_window_reference_point_tab)
mark_reference_point_window.geometry("1035x620+10+10")
mark_reference_point_window.grab_set()
mark_reference_point_over_all_frame = tk.Frame(mark_reference_point_window, bd=0, relief=FLAT)
mark_reference_point_over_all_canvas = Canvas(mark_reference_point_over_all_frame)
mark_reference_point_xscrollbar = Scrollbar(mark_reference_point_over_all_frame, orient=HORIZONTAL, command=mark_reference_point_over_all_canvas.xview)
mark_reference_point_yscrollbar = Scrollbar(mark_reference_point_over_all_frame, command=mark_reference_point_over_all_canvas.yview)
mark_reference_point_scroll_frame = ttk.Frame(mark_reference_point_over_all_canvas)
mark_reference_point_scroll_frame.bind("<Configure>", lambda e: mark_reference_point_over_all_canvas.configure(scrollregion=mark_reference_point_over_all_canvas.bbox('all')))
mark_reference_point_over_all_canvas.create_window((0,0), window=mark_reference_point_scroll_frame, anchor='nw')
mark_reference_point_over_all_canvas.configure(xscrollcommand=mark_reference_point_xscrollbar.set, yscrollcommand=mark_reference_point_yscrollbar.set)
mark_reference_point_over_all_frame.config(highlightthickness=0, bg='#ffffff')
mark_reference_point_over_all_canvas.config(highlightthickness=0, bg='#ffffff')
mark_reference_point_over_all_frame.pack(expand=True, fill=BOTH)
mark_reference_point_over_all_canvas.grid(row=0, column=0, sticky=N+S+E+W)
mark_reference_point_over_all_frame.grid_columnconfigure(0, weight=1)
mark_reference_point_over_all_frame.grid_rowconfigure(0, weight=1)
mark_reference_point_xscrollbar.grid(row=1, column=0, sticky=E+W)
mark_reference_point_over_all_frame.grid_columnconfigure(1, weight=0)
mark_reference_point_over_all_frame.grid_rowconfigure(1, weight=0)
mark_reference_point_yscrollbar.grid(row=0, column=1, sticky=N+S)
mark_reference_point_over_all_frame.grid_columnconfigure(2, weight=0)
mark_reference_point_over_all_frame.grid_rowconfigure(2, weight=0)
mark_reference_point_image_canvas = tk.Canvas(mark_reference_point_scroll_frame)
mark_reference_point_image_canvas.grid(row=0,column=0, rowspan=10, columnspan=3, sticky=N+S+E+W, padx=(0,0), pady=(0,0))
mark_reference_point_scroll_frame.grid_columnconfigure(0, weight=0)
mark_reference_point_scroll_frame.grid_rowconfigure(0, weight=0)
mark_reference_point_image_canvas.create_image(0,0,image=img_mark_reference_point,anchor="nw")
mark_reference_point_image_canvas.image = img_mark_reference_point
mark_reference_point_image_canvas.config(cursor='hand2', bg='#ffffff', relief=FLAT, bd=0, \
scrollregion=mark_reference_point_image_canvas.bbox(ALL), height=img_mark_reference_point.height(), width=img_mark_reference_point.width())
mark_reference_point_image_canvas.grid_propagate(0)
def findCoords(event):
mark_reference_point_image_canvas.create_oval(event.x-2, event.y-2, event.x+2, event.y+2, fill='red')
Globals.DVH_film_reference_point = [event.x, event.y]
oval = image_canvas_reference_tab.create_oval(int(Globals.DVH_film_reference_point[0]/2)-3, \
int(Globals.DVH_film_reference_point[1]/2)-3, int(Globals.DVH_film_reference_point[0]/2)+3, \
int(Globals.DVH_film_reference_point[1]/2)+3, fill='red')
Globals.DVH_mark_reference_point_oval.append(oval)
mark_reference_point_window.after(500, lambda: mark_reference_point_window.destroy())
Globals.DVH_reference_point_check = True
if(Globals.DVH_ROI_reference_point_check):
Globals.DVH_done_button_reference_point.config(state=ACTIVE)
mark_reference_point_image_canvas.bind("<Button 1>",findCoords)
def markROI(img, tab, canvas, ref_point_test):
if(len(Globals.DVH_mark_ROI_rectangle)>0):
canvas.delete(Globals.DVH_mark_ROI_rectangle[0])
Globals.DVH_mark_ROI_rectangle = []
Globals.DVH_ROI_coords = []
img_mark_ROI = ImageTk.PhotoImage(image=img)
mark_ROI_window = tk.Toplevel(tab)
mark_ROI_window.geometry("1035x620+10+10")
mark_ROI_window.grab_set()
mark_ROI_over_all_frame = tk.Frame(mark_ROI_window, bd=0, relief=FLAT)
mark_ROI_over_all_canvas = Canvas(mark_ROI_over_all_frame)
mark_ROI_xscrollbar = Scrollbar(mark_ROI_over_all_frame, orient=HORIZONTAL, command=mark_ROI_over_all_canvas.xview)
mark_ROI_yscrollbar = Scrollbar(mark_ROI_over_all_frame, command=mark_ROI_over_all_canvas.yview)
mark_ROI_scroll_frame = ttk.Frame(mark_ROI_over_all_canvas)
mark_ROI_scroll_frame.bind("<Configure>", lambda e: mark_ROI_over_all_canvas.configure(scrollregion=mark_ROI_over_all_canvas.bbox('all')))
mark_ROI_over_all_canvas.create_window((0,0), window=mark_ROI_scroll_frame, anchor='nw')
mark_ROI_over_all_canvas.configure(xscrollcommand=mark_ROI_xscrollbar.set, yscrollcommand=mark_ROI_yscrollbar.set)
mark_ROI_over_all_frame.config(highlightthickness=0, bg='#ffffff')
mark_ROI_over_all_canvas.config(highlightthickness=0, bg='#ffffff')
mark_ROI_over_all_frame.pack(expand=True, fill=BOTH)
mark_ROI_over_all_canvas.grid(row=0, column=0, sticky=N+S+E+W)
mark_ROI_over_all_frame.grid_columnconfigure(0, weight=1)
mark_ROI_over_all_frame.grid_rowconfigure(0, weight=1)
mark_ROI_xscrollbar.grid(row=1, column=0, sticky=E+W)
mark_ROI_over_all_frame.grid_columnconfigure(1, weight=0)
mark_ROI_over_all_frame.grid_rowconfigure(1, weight=0)
mark_ROI_yscrollbar.grid(row=0, column=1, sticky=N+S)
mark_ROI_over_all_frame.grid_columnconfigure(2, weight=0)
mark_ROI_over_all_frame.grid_rowconfigure(2, weight=0)
mark_ROI_image_canvas = tk.Canvas(mark_ROI_scroll_frame)
mark_ROI_image_canvas.grid(row=0,column=0, rowspan=10, columnspan=3, sticky=N+S+E+W, padx=(0,0), pady=(0,0))
mark_ROI_scroll_frame.grid_columnconfigure(0, weight=0)
mark_ROI_scroll_frame.grid_rowconfigure(0, weight=0)
mark_ROI_image_canvas.create_image(0,0,image=img_mark_ROI,anchor="nw")
mark_ROI_image_canvas.image = img_mark_ROI
mark_ROI_image_canvas.config(bg='#E5f9ff', relief=FLAT, bd=0, \
scrollregion=mark_ROI_image_canvas.bbox(ALL), height=img_mark_ROI.height(), width=img_mark_ROI.width())
mark_ROI_image_canvas.grid_propagate(0)
rectangle = mark_ROI_image_canvas.create_rectangle(0,0,0,0,outline='green')
rectangle_top_corner = []
rectangle_bottom_corner = []
def buttonPushed(event):
rectangle_top_corner.append([event.x, event.y])
def buttonMoving(event):
mark_ROI_image_canvas.coords(rectangle, rectangle_top_corner[0][0], rectangle_top_corner[0][1], \
event.x, event.y)
def buttonReleased(event):
rectangle_bottom_corner.append([event.x, event.y])
mark_ROI_image_canvas.coords(rectangle, rectangle_top_corner[0][0], rectangle_top_corner[0][1],\
rectangle_bottom_corner[0][0], rectangle_bottom_corner[0][1])
mark_ROI_image_canvas.itemconfig(rectangle, outline='Blue')
### Husk at koordinatene går bortover så nedover! Top left - top right - bottom left - bottom right
Globals.DVH_ROI_coords.append([rectangle_top_corner[0][0], rectangle_top_corner[0][1]])
Globals.DVH_ROI_coords.append([rectangle_bottom_corner[0][0], rectangle_top_corner[0][1]])
Globals.DVH_ROI_coords.append([rectangle_top_corner[0][0], rectangle_bottom_corner[0][1]])
Globals.DVH_ROI_coords.append([rectangle_bottom_corner[0][0], rectangle_bottom_corner[0][1]])
rect = canvas.create_rectangle(int((rectangle_top_corner[0][0])/2), int((rectangle_top_corner[0][1])/2),\
int((rectangle_bottom_corner[0][0])/2), int((rectangle_bottom_corner[0][1])/2), outline='Blue', width=2)
Globals.DVH_mark_ROI_rectangle.append(rect)
if(ref_point_test):
Globals.DVH_ROI_reference_point_check = True
if(Globals.DVH_reference_point_check):
Globals.DVH_done_button_reference_point.config(state=ACTIVE)
else:
Globals.DVH_ROI_check = True
if(Globals.DVH_isocenter_check):
Globals.DVH_done_button.config(state=ACTIVE)
mark_ROI_window.after(500, lambda: mark_ROI_window.destroy())
mark_ROI_image_canvas.bind("<B1-Motion>", buttonMoving)
mark_ROI_image_canvas.bind("<Button-1>", buttonPushed)
mark_ROI_image_canvas.bind("<ButtonRelease-1>", buttonReleased)
def UploadFilm():
if(Globals.DVH_film_orientation.get() == '-'):
messagebox.showerror("Missing parameter", "Film orientation missing \n (Code: UploadFilm)")
return
if Globals.DVH_film_factor_input.get("1.0", 'end-1c') == " ":
Globals.DVH_film_factor = 1
else:
try:
Globals.DVH_film_factor = float(Globals.DVH_film_factor_input.get("1.0", 'end-1c'))
except:
messagebox.showerror("Missing parameter", "Film factor invalid format. \n (Code: UploadFilm)")
return
file = filedialog.askopenfilename()
ext = os.path.splitext(file)[-1].lower()
if(ext == '.tif'):
current_folder = os.getcwd()
parent = os.path.dirname(file)
os.chdir(parent)
img = Image.open(file)
img = img.transpose(Image.FLIP_LEFT_RIGHT)
cv2Img = cv2.imread(basename(normpath(file)), cv2.IMREAD_ANYCOLOR | cv2.IMREAD_ANYDEPTH)
cv2Img = cv2.medianBlur(cv2Img, 5)
if(cv2Img is None):
messagebox.showerror("Error", "Something has gone wrong. Check that the filename does not contain Æ,Ø,Å")
return
if(cv2Img.shape[2] == 3):
if(cv2Img.shape[0]==1270 and cv2Img.shape[1]==1016):
cv2Img = abs(cv2Img-Globals.correctionMatrix127)
cv2Img = np.clip(cv2Img, 0, 65535)
cv2Img = cv2.flip(cv2Img,1)
img_scaled = img.resize((508, 635), Image.ANTIALIAS)
img_scaled = ImageTk.PhotoImage(image=img_scaled)
Globals.DVH_film_dataset = cv2Img
Globals.DVH_film_dataset_red_channel = cv2Img[:,:,2]
else:
messagebox.showerror("Error","The resolution of the image is not consistent with dpi")
return
else:
messagebox.showerror("Error","The uploaded image need to be in RGB-format")
return
os.chdir(current_folder)
if(not (img.width == 1016)):
messagebox.showerror("Error", "Dpi in image has to be 127")
return
Globals.DVH_film_orientation_menu.configure(state=DISABLED)
Globals.DVH_film_factor_input.config(state=DISABLED)
h = 635 + 20
w = 508 + 625
new_window = tk.Toplevel(Globals.tab5)
new_window.geometry("%dx%d+0+0" % (w, h))
new_window.grab_set()
new_window_over_all_frame = tk.Frame(new_window, bd=0, relief=FLAT)
new_window_over_all_canvas = Canvas(new_window_over_all_frame)
new_window_xscrollbar = Scrollbar(new_window_over_all_frame, orient=HORIZONTAL, command=new_window_over_all_canvas.xview)
new_window_yscrollbar = Scrollbar(new_window_over_all_frame, command=new_window_over_all_canvas.yview)
new_window_scroll_frame = ttk.Frame(new_window_over_all_canvas)
new_window_scroll_frame.bind("<Configure>", lambda e: new_window_over_all_canvas.configure(scrollregion=new_window_over_all_canvas.bbox('all')))
new_window_over_all_canvas.create_window((0,0), window=new_window_scroll_frame, anchor='nw')
new_window_over_all_canvas.configure(xscrollcommand=new_window_xscrollbar.set, yscrollcommand=new_window_yscrollbar.set)
new_window_over_all_frame.config(highlightthickness=0, bg='#ffffff')
new_window_over_all_canvas.config(highlightthickness=0, bg='#ffffff')
new_window_over_all_frame.pack(expand=True, fill=BOTH)
new_window_over_all_canvas.grid(row=0, column=0, sticky=N+S+E+W)
new_window_over_all_frame.grid_columnconfigure(0, weight=1)
new_window_over_all_frame.grid_rowconfigure(0, weight=1)
new_window_xscrollbar.grid(row=1, column=0, sticky=E+W)
new_window_over_all_frame.grid_columnconfigure(1, weight=0)
new_window_over_all_frame.grid_rowconfigure(1, weight=0)
new_window_yscrollbar.grid(row=0, column=1, sticky=N+S)
new_window_over_all_frame.grid_columnconfigure(2, weight=0)
new_window_over_all_frame.grid_rowconfigure(2, weight=0)
new_window_explain_text = tk.Text(new_window_scroll_frame, height= 3, width=120)
new_window_explain_text.insert(INSERT, \
"To match the film with the doseplan you have to mark either isocenter or a reference point\
on the film of your choice.In the case of the reference point you \nwill be asked to input the \
lenght in lateral, longitudinal and vertical to a reference point used in the linac. It the \
reference point in the film is the same as \nthe one in the phantom/linac you can input all zeros,\
in other cases your input is in mm. Later you will have the oppertunity to make small\
adjustments \nto the placement of either the reference point or isocenter.")
new_window_explain_text.config(state=DISABLED, font=('calibri', '13', 'bold'), bg = '#ffffff', relief=FLAT)
new_window_explain_text.grid(row=0, column=0, columnspan=5, sticky=N+S+W+E, pady=(15,5), padx=(10,10))
new_window_scroll_frame.grid_rowconfigure(0, weight=0)
new_window_scroll_frame.grid_columnconfigure(0, weight=0)
new_window_notebook = ttk.Notebook(new_window_scroll_frame)
new_window_notebook.borderWidth=0
new_window_notebook.grid(row=2, column=0, columnspan=5, sticky=E+W+N+S, pady=(0,0), padx =(0,0))
new_window_scroll_frame.grid_rowconfigure(4, weight=0)
new_window_scroll_frame.grid_columnconfigure(4, weight=0)
new_window_isocenter_tab = ttk.Frame(new_window_notebook)
new_window_notebook.add(new_window_isocenter_tab, text='Isocenter')
new_window_reference_point_tab = ttk.Frame(new_window_notebook)
new_window_notebook.add(new_window_reference_point_tab, text='Reference point')
new_window_manually_tab = ttk.Frame(new_window_notebook)
new_window_notebook.add(new_window_manually_tab, text='Manually')
image_canvas = tk.Canvas(new_window_isocenter_tab)
image_canvas.grid(row=0,column=0, rowspan=12, columnspan=3, sticky=N+S+E+W, padx=(0,0), pady=(0,0))
new_window_isocenter_tab.grid_rowconfigure(1, weight=0)
new_window_isocenter_tab.grid_columnconfigure(1, weight=0)
image_canvas.create_image(0,0,image=img_scaled,anchor="nw")
image_canvas.image = img_scaled
image_canvas.config(bg='#ffffff', relief=FLAT, bd=0, scrollregion=image_canvas.bbox(ALL), \
height=img_scaled.height(), width=img_scaled.width())
image_canvas.grid_propagate(0)
image_canvas_reference_tab = tk.Canvas(new_window_reference_point_tab)
image_canvas_reference_tab.grid(row=0,column=0, rowspan=10, columnspan=3, sticky=N+S+E+W, padx=(0,0), pady=(0,0))
new_window_reference_point_tab.grid_rowconfigure(1, weight=0)
new_window_reference_point_tab.grid_columnconfigure(1, weight=0)
image_canvas_reference_tab.create_image(0,0,image=img_scaled,anchor="nw")
image_canvas_reference_tab.image = img_scaled
image_canvas_reference_tab.config(bg='#ffffff', relief=FLAT, bd=0, scrollregion=image_canvas.bbox(ALL), \
height=img_scaled.height(), width=img_scaled.width())
image_canvas_reference_tab.grid_propagate(0)
film_window_mark_isocenter_text = tk.Text(new_window_isocenter_tab, width=55, height=7)
film_window_mark_isocenter_text.insert(INSERT, \
"When clicking the button \"Mark isocenter\" a window showing \n\
the image will appear and you are to click on the markers \n\
made on the film upon irradiation to find the isocenter. Start \n\
with the marker showing the direction of the film (see the \n\
specifications in main window). When both marks are made \n\
you will see the isocenter in the image. If you are not happy \n\
with the placement click the button again and repeat.")
film_window_mark_isocenter_text.config(bg='#ffffff', relief=FLAT, bd=0, state=DISABLED, font=('calibri', '11'))
film_window_mark_isocenter_text.grid(row=0, column=3, rowspan=3, sticky=N+S+E+W, padx=(10,10), pady=(10,0))
new_window_isocenter_tab.columnconfigure(2, weight=0)
new_window_isocenter_tab.rowconfigure(2, weight=0)
film_window_mark_reference_point_text = tk.Text(new_window_reference_point_tab, width=55, height=5)
film_window_mark_reference_point_text.insert(INSERT, \
"When clicking the button \"Mark point\" a window showing \n\
the image will appear and you are to click on the marker \n\
made on the film upon irradiation to find the point. When\n\
the mark are made you will see the isocenter in the image.\n\
If you are not happy with the placement click the button \n\
again and repeat.")
film_window_mark_reference_point_text.config(bg='#ffffff', relief=FLAT, bd=0, state=DISABLED, font=('calibri', '11'))
film_window_mark_reference_point_text.grid(row=0, column=3, rowspan=3, sticky=N+S+E+W, padx=(10,10), pady=(5,0))
new_window_reference_point_tab.columnconfigure(2, weight=0)
new_window_reference_point_tab.rowconfigure(2, weight=0)
mark_isocenter_button_frame = tk.Frame(new_window_isocenter_tab)
mark_isocenter_button_frame.grid(row=3, column=3, padx=(10,10), pady=(0,10))
mark_isocenter_button_frame.configure(bg='#ffffff')
new_window_isocenter_tab.grid_columnconfigure(3, weight=0)
new_window_isocenter_tab.grid_rowconfigure(3, weight=0)
mark_isocenter_button = tk.Button(mark_isocenter_button_frame, text='Browse', image=Globals.profiles_mark_isocenter_button_image,\
cursor='hand2',font=('calibri', '14'), relief=FLAT, state=ACTIVE, command=lambda: markIsocenter(img, new_window_isocenter_tab, image_canvas, cv2Img))
mark_isocenter_button.pack(expand=True, fill=BOTH)
mark_isocenter_button.config(bg='#ffffff', activebackground='#ffffff', activeforeground='#ffffff', highlightthickness=0)
mark_isocenter_button.image=Globals.profiles_mark_isocenter_button_image
mark_point_button_frame = tk.Frame(new_window_reference_point_tab)
mark_point_button_frame.grid(row=3, column=3, padx=(10,10), pady=(30,0))
mark_point_button_frame.configure(bg='#ffffff')
new_window_reference_point_tab.grid_columnconfigure(3, weight=0)
new_window_reference_point_tab.grid_rowconfigure(3, weight=0)
mark_point_button = tk.Button(mark_point_button_frame, text='Browse', image=Globals.profiles_mark_point_button_image,\
cursor='hand2',font=('calibri', '14'), relief=FLAT, state=ACTIVE, command=lambda: \
markReferencePoint(img, new_window_reference_point_tab, image_canvas_reference_tab, cv2Img))
mark_point_button.pack(expand=True, fill=BOTH)
mark_point_button.config(bg='#ffffff', activebackground='#ffffff', activeforeground='#ffffff', highlightthickness=0)
mark_point_button.image=Globals.profiles_mark_point_button_image
write_displacement_relative_to_reference_point = tk.Text(new_window_reference_point_tab, width = 55, height=3)
write_displacement_relative_to_reference_point.insert(INSERT, "\
If the marked reference points in the film does not match\n\
the reference point in the phantom you can write the\n\
displacemnet here (in mm). Defaults to zero ")
write_displacement_relative_to_reference_point.grid(row=4, column=3, rowspan=2, sticky=N+S+E+W, padx=(10,10), pady=(0,10))
write_displacement_relative_to_reference_point.config(bg='#ffffff', relief=FLAT, bd=0, state=DISABLED, font=('calibri', '11'))
new_window_reference_point_tab.grid_rowconfigure(6, weight=0)
new_window_reference_point_tab.grid_columnconfigure(6, weight=0)
input_lateral_text = tk.Text(new_window_reference_point_tab, width=12, height=1)
input_lateral_text.insert(INSERT, "Lateral:")
input_lateral_text.config(bg='#ffffff', relief=FLAT, bd=0, state=DISABLED, font=('calibri', '10'))
input_lateral_text.grid(row=5, column=3, sticky=N+S, padx=(0,250), pady=(25,0))
new_window_reference_point_tab.grid_rowconfigure(10, weight=0)
new_window_reference_point_tab.grid_rowconfigure(10, weight=0)
Globals.DVH_input_lateral_displacement = tk.Text(new_window_reference_point_tab, width=5, height=1)
Globals.DVH_input_lateral_displacement.insert(INSERT, " ")
Globals.DVH_input_lateral_displacement.config(bg='#E5f9ff', relief=GROOVE, bd=2, state=NORMAL, font=('calibri', '11'))
Globals.DVH_input_lateral_displacement.grid(row=5, column=3, padx=(0,285), pady=(35,0))
new_window_reference_point_tab.grid_rowconfigure(7, weight=0)
new_window_reference_point_tab.grid_columnconfigure(7, weight=0)
input_vertical_text = tk.Text(new_window_reference_point_tab, width=12, height=1)
input_vertical_text.insert(INSERT, "Vertical:")
input_vertical_text.config(bg='#ffffff', relief=FLAT, bd=0, state=DISABLED, font=('calibri', '10'))
input_vertical_text.grid(row=5, column=3, sticky=N+S, padx=(0,0), pady=(25,0))
new_window_reference_point_tab.grid_rowconfigure(11, weight=0)
new_window_reference_point_tab.grid_rowconfigure(11, weight=0)
Globals.DVH_input_vertical_displacement = tk.Text(new_window_reference_point_tab, width=4, height=1)
Globals.DVH_input_vertical_displacement.insert(INSERT, " ")
Globals.DVH_input_vertical_displacement.config(bg='#E5f9ff', relief=GROOVE, bd=2, state=NORMAL, font=('calibri', '11'))
Globals.DVH_input_vertical_displacement.grid(row=5, column=3, padx=(0,25), pady=(35,0))
new_window_reference_point_tab.grid_rowconfigure(8, weight=0)
new_window_reference_point_tab.grid_columnconfigure(8, weight=0)
input_long_text = tk.Text(new_window_reference_point_tab, width=12, height=1)
input_long_text.insert(INSERT, "Longitudinal:")
input_long_text.config(bg='#ffffff', relief=FLAT, bd=0, state=DISABLED, font=('calibri', '10'))
input_long_text.grid(row=5, column=3, sticky=N+S, padx=(250,0), pady=(25,0))
new_window_reference_point_tab.grid_rowconfigure(12, weight=0)
new_window_reference_point_tab.grid_rowconfigure(12, weight=0)
Globals.DVH_input_longitudinal_displacement = tk.Text(new_window_reference_point_tab, width=5, height=1)
Globals.DVH_input_longitudinal_displacement.insert(INSERT, " ")
Globals.DVH_input_longitudinal_displacement.config(bg='#E5f9ff', relief=GROOVE, bd=2, state=NORMAL, font=('calibri', '11'))
Globals.DVH_input_longitudinal_displacement.grid(row=5, column=3, padx=(240,0), pady=(35,0))
new_window_reference_point_tab.grid_rowconfigure(9, weight=0)
new_window_reference_point_tab.grid_columnconfigure(9, weight=0)
film_window_mark_ROI_text = tk.Text(new_window_isocenter_tab, width=55, height=7)
film_window_mark_ROI_text.insert(INSERT, \
"When clicking the button \"Mark ROI\" a window showing the\n\
image will appear and you are to drag a rectangle marking \n\
the region of interest. Fidora will assume the film has been\n\
scanned in either portrait or landscape orientation. When\n\
the ROI has been marked it will appear on the image. If you\n\
are not happy with the placement click the button again.")
film_window_mark_ROI_text.config(bg='#ffffff', relief=FLAT, bd=0, state=DISABLED, font=('calibri', '11'))
film_window_mark_ROI_text.grid(row=5, column=3, rowspan=4, sticky=N+S+E+W, padx=(10,10), pady=(0,0))
new_window_isocenter_tab.grid_columnconfigure(4, weight=0)
new_window_isocenter_tab.grid_rowconfigure(4, weight=0)
film_window_mark_ROI_reference_point_text = tk.Text(new_window_reference_point_tab, width=55, height=5)
film_window_mark_ROI_reference_point_text.insert(INSERT, \
"When clicking the button \"Mark ROI\" a window showing the\n\
image will appear and you are to drag a rectangle marking \n\
the region of interest. Fidora will assume the film has been\n\
scanned in either portrait or landscape orientation. When\n\
the ROI has been marked it will appear on the image. If you\n\
are not happy with the placement click the button again.")
film_window_mark_ROI_reference_point_text.config(bg='#ffffff', relief=FLAT, bd=0, state=DISABLED, font=('calibri', '11'))
film_window_mark_ROI_reference_point_text.grid(row=6, column=3, rowspan=3, sticky=N+E+W, padx=(10,10), pady=(10,0))
new_window_reference_point_tab.grid_columnconfigure(4, weight=0)
new_window_reference_point_tab.grid_rowconfigure(4, weight=0)
mark_ROI_button_frame = tk.Frame(new_window_isocenter_tab)
mark_ROI_button_frame.grid(row=8, column=3, padx=(10,0), pady=(0,5))
mark_ROI_button_frame.configure(bg='#ffffff')
new_window_isocenter_tab.grid_columnconfigure(5, weight=0)
new_window_isocenter_tab.grid_rowconfigure(5, weight=0)
mark_ROI_button = tk.Button(mark_ROI_button_frame, text='Browse', image=Globals.profiles_mark_ROI_button_image,\
cursor='hand2',font=('calibri', '14'), relief=FLAT, state=ACTIVE, command=lambda: markROI(img, new_window_isocenter_tab, image_canvas, False))
mark_ROI_button.pack(expand=True, fill=BOTH)
mark_ROI_button.config(bg='#ffffff', activebackground='#ffffff', activeforeground='#ffffff', highlightthickness=0)
mark_ROI_button.image=Globals.profiles_mark_ROI_button_image
slice_offset_text = tk.Text(new_window_isocenter_tab, width=25, height=1)
slice_offset_text.insert(INSERT, "Slice offset, mm (default 0):")
slice_offset_text.config(state=DISABLED, font=('calibri', '10'), bd = 0, relief=FLAT)
slice_offset_text.grid(row=9, column=3, padx=(5,110), pady=(0,0))
new_window_isocenter_tab.grid_columnconfigure(6, weight=0)
new_window_isocenter_tab.grid_rowconfigure(6, weight=0)
Globals.DVH_slice_offset = tk.Text(new_window_isocenter_tab, width=8, height=1)
Globals.DVH_slice_offset.grid(row=9, column=3, padx=(110,10), pady=(0,0))
Globals.DVH_slice_offset.insert(INSERT, " ")
Globals.DVH_slice_offset.config(state=NORMAL, font=('calibri', '10'), bd = 2, bg='#ffffff')
new_window_isocenter_tab.grid_columnconfigure(7, weight=0)
new_window_isocenter_tab.grid_rowconfigure(7, weight=0)
mark_ROI_button_reference_point_frame = tk.Frame(new_window_reference_point_tab)
mark_ROI_button_reference_point_frame.grid(row=9, column=3, padx=(10,10), pady=(0,5))
mark_ROI_button_reference_point_frame.configure(bg='#ffffff')
new_window_reference_point_tab.grid_columnconfigure(5, weight=0)
new_window_reference_point_tab.grid_rowconfigure(5, weight=0)
mark_ROI_reference_point_button = tk.Button(mark_ROI_button_reference_point_frame, text='Browse', image=Globals.profiles_mark_ROI_button_image,\
cursor='hand2',font=('calibri', '14'), relief=FLAT, state=ACTIVE, command=lambda: markROI(img, new_window_reference_point_tab, image_canvas_reference_tab, True))
mark_ROI_reference_point_button.pack(expand=True, fill=BOTH)
mark_ROI_reference_point_button.config(bg='#ffffff', activebackground='#ffffff', activeforeground='#ffffff', highlightthickness=0)
mark_ROI_reference_point_button.image=Globals.profiles_mark_ROI_button_image
def finishFilmMarkers(ref_test):
Globals.DVH_slice_offset.config(state=DISABLED)
if(ref_test):
if(not(Globals.DVH_input_lateral_displacement.get("1.0",'end-1c')==" ")):
try:
test = float(Globals.DVH_input_lateral_displacement.get("1.0",'end-1c'))
Globals.DVH_lateral = test
except:
messagebox.showerror("Error", "The displacements must be numbers\n (Code: lateral displacement)")
return
else:
Globals.DVH_lateral = 0
if(not(Globals.DVH_input_longitudinal_displacement.get("1.0",'end-1c')==" ")):
try:
test = float(Globals.DVH_input_longitudinal_displacement.get("1.0", 'end-1c'))
Globals.DVH_longitudinal = test
except:
messagebox.showerror("Error", "The displacements must be numbers\n (Code: longitudinal displacement)")
return
else:
Globals.DVH_longitudinal = 0
if(not(Globals.DVH_input_vertical_displacement.get("1.0",'end-1c')==" ")):
try:
test = float(Globals.DVH_input_vertical_displacement.get("1.0", 'end-1c'))
Globals.DVH_vertical = test
except:
messagebox.showerror("Error", "The displacements must be numbers\n (Code: vertical displacement)")
return
else:
Globals.DVH_vertical = 0
Globals.DVH_input_vertical_displacement.config(state=DISABLED)
Globals.DVH_input_longitudinal_displacement.config(state=DISABLED)
Globals.DVH_input_lateral_displacement.config(state=DISABLED)
else:
if not Globals.DVH_slice_offset.get("1.0",'end-1c')==" ":
try:
offset = float(Globals.DVH_slice_offset.get("1.0",'end-1c'))
Globals.DVH_offset = offset
except:
messagebox.showerror("Error", "Slice offset must be a number \n(Code: finishFilmMarkers(false)")
return
else:
Globals.DVH_offset = 0
if(ref_test):
choose_batch_window = tk.Toplevel(new_window_reference_point_tab)
else:
choose_batch_window = tk.Toplevel(new_window_isocenter_tab)
choose_batch_window.geometry("670x380+50+50")
choose_batch_window.grab_set()
choose_batch_frame = tk.Frame(choose_batch_window)
choose_batch_frame.pack(expand=True, fill=BOTH)
choose_batch_frame.configure(bg='#ffffff')
batch_cnt = 0
weight_cnt = 0
read = open('calibration.txt', 'r')
lines = read.readlines()
read.close()
row_cnt=0
for l in lines:
words = l.split()
line = "Batch nr. : " + words[2] + ". Date: " + words[0] + " " + words[1] + "."
write_batch_nr = tk.Text(choose_batch_frame, width=10, height=1)
write_batch_nr.grid(row=row_cnt, column=0, sticky=N+S+W+E, padx=(10,5), pady=(10,10))
choose_batch_frame.grid_columnconfigure(weight_cnt, weight=0)
choose_batch_frame.grid_rowconfigure(weight_cnt, weight=0)
write_batch_nr.insert(INSERT, "Batch nr.: ")
write_batch_nr.config(state=DISABLED, bd = 0, font=('calibri', '12', 'bold'))
weight_cnt+=1
write_batch = tk.Text(choose_batch_frame, width=20, height=1)
write_batch.grid(row=row_cnt, column=1, sticky=N+S+W+E, padx=(10,5), pady=(10,10))
choose_batch_frame.grid_columnconfigure(weight_cnt, weight=0)
choose_batch_frame.grid_rowconfigure(weight_cnt, weight=0)
write_batch.insert(INSERT, words[2])
write_batch.config(state=DISABLED, bd = 0, font=('calibri', '12'))
weight_cnt+=1
write_batch_date = tk.Text(choose_batch_frame, width=8, height=1)
write_batch_date.grid(row=row_cnt, column=2, sticky=N+S+W+E, padx=(10,5), pady=(10,10))
choose_batch_frame.grid_columnconfigure(weight_cnt, weight=0)
choose_batch_frame.grid_rowconfigure(weight_cnt, weight=0)
write_batch_date.insert(INSERT, "Date: ")
write_batch_date.config(state=DISABLED, bd = 0, font=('calibri', '12', 'bold'))
weight_cnt+=1
write_date = tk.Text(choose_batch_frame, width=30, height=1)
write_date.grid(row=row_cnt, column=3, sticky=N+S+W+E, padx=(10,5), pady=(10,10))
choose_batch_frame.grid_columnconfigure(weight_cnt, weight=0)
choose_batch_frame.grid_rowconfigure(weight_cnt, weight=0)
write_date.insert(INSERT, words[0] + ", " + words[1] + "")
write_date.config(state=DISABLED, bd = 0, font=('calibri', '12'))
weight_cnt+=1
Radiobutton(choose_batch_frame, text='',bg='#ffffff', cursor='hand2',font=('calibri', '14'), \
variable=Globals.DVH_film_batch, value=batch_cnt).grid(row=row_cnt, \
column=4, sticky=N+S+W+E, padx=(5,5), pady=(10,10))
choose_batch_frame.grid_columnconfigure(weight_cnt, weight=0)
choose_batch_frame.grid_rowconfigure(weight_cnt, weight=0)
weight_cnt+=1;row_cnt+=1;batch_cnt+=1
def set_batch():
choose_batch_window.destroy()
f = open('calibration.txt', 'r')
lines = f.readlines()
words = lines[Globals.DVH_film_batch.get()].split()
Globals.DVH_popt_red[0] = float(words[3])
Globals.DVH_popt_red[1] = float(words[4])
Globals.DVH_popt_red[2] = float(words[5])
f.close()
Globals.DVH_film_dataset_ROI_red_channel_dose = np.zeros((Globals.DVH_film_dataset_ROI_red_channel.shape[0],\
Globals.DVH_film_dataset_ROI_red_channel.shape[1]))
for i in range(Globals.DVH_film_dataset_ROI_red_channel_dose.shape[0]):
for j in range(Globals.DVH_film_dataset_ROI_red_channel_dose.shape[1]):
Globals.DVH_film_dataset_ROI_red_channel_dose[i,j] = Globals.DVH_film_factor*\
pixel_to_dose(Globals.DVH_film_dataset_ROI_red_channel[i,j], \
Globals.DVH_popt_red[0], Globals.DVH_popt_red[1], Globals.DVH_popt_red[2])
Globals.DVH_film_dataset_red_channel_dose = np.zeros((Globals.DVH_film_dataset_red_channel.shape[0],\
Globals.DVH_film_dataset_red_channel.shape[1]))
for i in range(Globals.DVH_film_dataset_red_channel_dose.shape[0]):
for j in range(Globals.DVH_film_dataset_red_channel_dose.shape[1]):
Globals.DVH_film_dataset_red_channel_dose[i,j] = Globals.DVH_film_factor*\
pixel_to_dose(Globals.DVH_film_dataset_red_channel[i,j], \
Globals.DVH_popt_red[0], Globals.DVH_popt_red[1], Globals.DVH_popt_red[2])
Globals.DVH_film_write_image.create_image(0,0,image=scaled_image_visual, anchor="nw")
Globals.DVH_film_write_image.image = scaled_image_visual
mx_film=np.max(Globals.DVH_film_dataset_ROI_red_channel_dose)
Globals.DVH_max_dose_film = mx_film
img_film = Globals.DVH_film_dataset_ROI_red_channel_dose
img_film = img_film/mx_film
PIL_img_film = Image.fromarray(np.uint8(cm.viridis(img_film)*255))
scaled_image_visual_film = ImageTk.PhotoImage(image=PIL_img_film)
Globals.DVH_film_dose_write_image.create_image(0,0,image=scaled_image_visual_film, anchor="nw")
Globals.DVH_film_dose_write_image.image = scaled_image_visual_film
film_scanned_image_text_canvas.create_image(0,0,image=Globals.profiles_scanned_image_text_image, anchor="nw")
film_scanned_image_text_canvas.image = Globals.profiles_scanned_image_text_image
film_dose_map_image_text_canvas.create_image(0,0, image=Globals.profiles_film_dose_map_text_image, anchor="nw")
film_dose_map_image_text_canvas.image=Globals.profiles_film_dose_map_text_image
new_window.destroy()
set_batch_button_frame = tk.Frame(choose_batch_frame)
set_batch_button_frame.grid(row=row_cnt, column=1, columnspan=3, padx=(10,0), pady=(5,5))
set_batch_button_frame.configure(bg='#ffffff')
choose_batch_frame.grid_columnconfigure(weight_cnt, weight=0)
choose_batch_frame.grid_rowconfigure(weight_cnt, weight=0)
set_batch_button = tk.Button(set_batch_button_frame, text='OK', image=Globals.done_button_image, cursor='hand2',\
font=('calibri', '14'), relief=FLAT, state=ACTIVE, command=set_batch)
set_batch_button.pack(expand=True, fill=BOTH)
set_batch_button.image=Globals.done_button_image
img_ROI = Globals.DVH_film_dataset[Globals.DVH_ROI_coords[0][1]:Globals.DVH_ROI_coords[2][1],\
Globals.DVH_ROI_coords[0][0]:Globals.DVH_ROI_coords[1][0], :]
img_ROI_red_channel = img_ROI[:,:,2]
Globals.DVH_film_variable_ROI_coords = [Globals.DVH_ROI_coords[0][1], Globals.DVH_ROI_coords[2][1],\
Globals.DVH_ROI_coords[0][0], Globals.DVH_ROI_coords[1][0]]
Globals.DVH_film_dataset_ROI = img_ROI
Globals.DVH_film_dataset_ROI_red_channel = img_ROI_red_channel
R = img_ROI[:,:,2];B = img_ROI[:,:,0]; G = img_ROI[:,:,1]
img_ROI_RGB = np.zeros(img_ROI.shape)
img_ROI_RGB[:,:,0]=R; img_ROI_RGB[:,:,1]=G; img_ROI_RGB[:,:,2]=B
PIL_img_ROI = (img_ROI_RGB/256).astype('uint8')
PIL_img_ROI = Image.fromarray(PIL_img_ROI, 'RGB')
#PIL_img_ROI = Image.fromarray((img_ROI_RGB * 255).astype(np.uint8), 'RGB')
wid = PIL_img_ROI.width;heig = PIL_img_ROI.height
#film_window_write_image = tk.Canvas(film_window_scroll_frame)
film_image_canvas = tk.Canvas(Globals.DVH_film_panedwindow)
film_image_canvas.grid(row=0,column=0, sticky=N+S+W+E)
Globals.DVH_film_panedwindow.add(film_image_canvas, \
height=max(heig,Globals.profiles_scanned_image_text_image.height()), \
width=wid + Globals.profiles_scanned_image_text_image.width())
film_image_canvas.config(bg='#ffffff', relief=FLAT, highlightthickness=0, \
height=max(heig,Globals.profiles_scanned_image_text_image.height()), \
width=wid + Globals.profiles_scanned_image_text_image.width())
film_dose_canvas = tk.Canvas(Globals.DVH_film_panedwindow)
film_dose_canvas.grid(row=1,column=0, sticky=N+S+W+E)
Globals.DVH_film_panedwindow.add(film_dose_canvas, \
height=max(heig,Globals.profiles_film_dose_map_text_image.height()), \
width=wid + Globals.profiles_film_dose_map_text_image.width())
film_dose_canvas.config(bg='#ffffff', relief=FLAT, highlightthickness=0, \
height=max(heig,Globals.profiles_film_dose_map_text_image.height()), \
width=wid + Globals.profiles_film_dose_map_text_image.width())
Globals.DVH_film_write_image = tk.Canvas(film_image_canvas)
Globals.DVH_film_write_image.grid(row=0,column=1,sticky=N+S+W+E)
Globals.DVH_film_write_image.config(bg='#ffffff', relief=FLAT, highlightthickness=0, width=wid, height=heig)
Globals.DVH_film_dose_write_image = tk.Canvas(film_dose_canvas)
Globals.DVH_film_dose_write_image.grid(row=0,column=1,sticky=N+S+W+E)
Globals.DVH_film_dose_write_image.config(bg='#ffffff', relief=FLAT, highlightthickness=0, width=wid, height=heig)
film_scanned_image_text_canvas=tk.Canvas(film_image_canvas)
film_scanned_image_text_canvas.grid(row=0,column=0,sticky=N+S+W+E)
film_scanned_image_text_canvas.config(bg='#ffffff', relief=FLAT, highlightthickness=0, \
height=Globals.profiles_scanned_image_text_image.height(), width=Globals.profiles_scanned_image_text_image.width())
film_dose_map_image_text_canvas=tk.Canvas(film_dose_canvas)
film_dose_map_image_text_canvas.grid(row=0,column=0,sticky=N+S+W+E)
film_dose_map_image_text_canvas.config(bg='#ffffff', relief=FLAT, highlightthickness=0, \
height=Globals.profiles_film_dose_map_text_image.height(), width=Globals.profiles_film_dose_map_text_image.width())
scaled_image_visual = PIL_img_ROI
scaled_image_visual = ImageTk.PhotoImage(image=scaled_image_visual)
Globals.DVH_upload_button_doseplan.config(state=DISABLED)
Globals.DVH_upload_button_rtplan.config(state=ACTIVE)
Globals.DVH_upload_button_film.config(state=DISABLED)
#Beregne avstand mellom ROI og isocenter gitt i mm
# [top left[mot venstre, oppover], top right[mot venstre (høyre blir negativ), oppover], bottom left, bottom right]
if(ref_test):
Globals.DVH_distance_reference_point_ROI.append([(Globals.DVH_film_reference_point[0]-Globals.DVH_ROI_coords[0][0])*0.2, \
(Globals.DVH_film_reference_point[1] -Globals.DVH_ROI_coords[0][1])*0.2])
Globals.DVH_distance_reference_point_ROI.append([(Globals.DVH_film_reference_point[0] - Globals.DVH_ROI_coords[1][0])*0.2,\
(Globals.DVH_film_reference_point[1] - Globals.DVH_ROI_coords[1][1])*0.2])
Globals.DVH_distance_reference_point_ROI.append([(Globals.DVH_film_reference_point[0] - Globals.DVH_ROI_coords[2][0])*0.2,\
(Globals.DVH_film_reference_point[1] - Globals.DVH_ROI_coords[2][1])*0.2])
Globals.DVH_distance_reference_point_ROI.append([(Globals.DVH_film_reference_point[0] - Globals.DVH_ROI_coords[3][0])*0.2,\
(Globals.DVH_film_reference_point[1] - Globals.DVH_ROI_coords[3][1])*0.2])
Globals.DVH_isocenter_or_reference_point = "Ref_point"
else:
Globals.DVH_distance_isocenter_ROI.append([(Globals.DVH_film_isocenter[0]-Globals.DVH_ROI_coords[0][0])*0.2, \
(Globals.DVH_film_isocenter[1] -Globals.DVH_ROI_coords[0][1])*0.2])
Globals.DVH_distance_isocenter_ROI.append([(Globals.DVH_film_isocenter[0] - Globals.DVH_ROI_coords[1][0])*0.2,\
(Globals.DVH_film_isocenter[1] - Globals.DVH_ROI_coords[1][1])*0.2])
Globals.DVH_distance_isocenter_ROI.append([(Globals.DVH_film_isocenter[0] - Globals.DVH_ROI_coords[2][0])*0.2,\
(Globals.DVH_film_isocenter[1] - Globals.DVH_ROI_coords[2][1])*0.2])
Globals.DVH_distance_isocenter_ROI.append([(Globals.DVH_film_isocenter[0] - Globals.DVH_ROI_coords[3][0])*0.2,\
(Globals.DVH_film_isocenter[1] - Globals.DVH_ROI_coords[3][1])*0.2])
Globals.DVH_isocenter_or_reference_point = "Isocenter"
done_button_frame = tk.Frame(new_window_isocenter_tab)
done_button_frame.grid(row=10, column=3, padx=(10,10), pady=(5,5), sticky=N+S+W+E)
done_button_frame.configure(bg='#ffffff')
new_window_isocenter_tab.grid_columnconfigure(5, weight=0)
new_window_isocenter_tab.grid_rowconfigure(5, weight=0)
Globals.DVH_done_button = tk.Button(done_button_frame, text='Done', image=Globals.done_button_image,\
cursor='hand2', font=('calibri', '14'), relief=FLAT, state=DISABLED, command=lambda: finishFilmMarkers(False))
Globals.DVH_done_button.pack(expand=True, fill=BOTH)
Globals.DVH_done_button.config(bg='#ffffff', activebackground='#ffffff', activeforeground='#ffffff', highlightthickness=0)
Globals.DVH_done_button.image=Globals.done_button_image
done_button_reference_point_frame = tk.Frame(new_window_reference_point_tab)
done_button_reference_point_frame.grid(row=10, column=3, padx=(10,10), pady=(5,5), sticky=N+S+W+E)
done_button_reference_point_frame.configure(bg='#ffffff')
new_window_reference_point_tab.grid_columnconfigure(5, weight=0)
new_window_reference_point_tab.grid_rowconfigure(5, weight=0)
Globals.DVH_done_button_reference_point= tk.Button(done_button_reference_point_frame, text='Done', image=Globals.done_button_image,\
cursor='hand2', font=('calibri', '14'), relief=FLAT, state=DISABLED, command=lambda: finishFilmMarkers(True))
Globals.DVH_done_button_reference_point.pack(expand=True, fill=BOTH)
Globals.DVH_done_button_reference_point.config(bg='#ffffff', activebackground='#ffffff', activeforeground='#ffffff', highlightthickness=0)
Globals.DVH_done_button_reference_point.image=Globals.done_button_image
elif(ext==""):
return
else:
messagebox.showerror("Error", "The file must be a *.tif file")
def help_showPlanes():
new_window = tk.Toplevel(Globals.tab5)
w = Globals.profiles_showPlanes_image.width()
h = Globals.profiles_showPlanes_image.height()
new_window.geometry("%dx%d+0+0" % (w, h))
new_window.grab_set()
canvas = tk.Canvas(new_window)
canvas.config(relief=FLAT, bg='#ffffff', highlightthickness=0)
canvas.create_image(0, 0, image=Globals.profiles_showPlanes_image, anchor='nw')
canvas.pack(expand=True, fill=BOTH)
|
[
"Globals.profiles_doseplan_text_image.width",
"Globals.DVH_doseplans_filenames.append",
"Globals.DVH_input_lateral_displacement.config",
"Globals.profiles_showPlanes_image.width",
"Globals.DVH_distance_reference_point_ROI.append",
"Globals.DCH_film_orientation.get",
"os.path.dirname",
"tkinter.filedialog.askopenfilename",
"tkinter.ttk.Frame",
"numpy.max",
"Globals.profiles_film_dose_map_text_image.height",
"Globals.DVH_doseplans_scroll_frame.grid_rowconfigure",
"Globals.DVH_doseplan_dataset_ROI_several.append",
"tkinter.messagebox.showinfo",
"Globals.DVH_input_vertical_displacement.config",
"cv2.flip",
"Globals.DVH_doseplans_factor_input.append",
"tkinter.ttk.Notebook",
"Globals.DVH_slice_offset.get",
"PIL.ImageTk.PhotoImage",
"PIL.Image.open",
"Globals.DVH_mark_isocenter_up_down_line.append",
"Globals.DVH_done_button.config",
"Globals.DVH_input_lateral_displacement.get",
"tkinter.messagebox.askyesno",
"Globals.profiles_doseplan_text_image.height",
"tkinter.Text",
"Globals.DVH_slice_offset.grid",
"Globals.DVH_film_factor_input.get",
"Globals.profiles_film_dose_map_text_image.width",
"Globals.DVH_input_longitudinal_displacement.grid",
"os.chdir",
"Globals.DVH_film_write_image.grid",
"tkinter.Toplevel",
"os.path.normpath",
"Globals.DVH_mark_reference_point_oval.append",
"Globals.DVH_input_vertical_displacement.grid",
"Globals.DVH_film_orientation.get",
"Globals.DVH_input_longitudinal_displacement.insert",
"tkinter.Canvas",
"Globals.DVH_film_orientation_menu.configure",
"tkinter.Scrollbar",
"Globals.DVH_film_dose_write_image.create_image",
"Globals.DVH_distance_isocenter_ROI.append",
"os.path.splitext",
"Globals.DVH_mark_ROI_rectangle.append",
"PIL.Image.fromarray",
"Globals.DVH_doseplans_scroll_frame.grid_columnconfigure",
"Globals.DVH_film_write_image.config",
"Globals.DVH_film_factor_input.config",
"Globals.DVH_upload_button_doseplan.config",
"Globals.DVH_done_button.pack",
"Globals.DVH_film_dose_write_image.config",
"Globals.DVH_input_longitudinal_displacement.config",
"Globals.DVH_done_button_reference_point.config",
"numpy.clip",
"Globals.DVH_upload_button_rtplan.config",
"Globals.DVH_iscoenter_coords.append",
"tkinter.Frame",
"Globals.DVH_doseplan_write_image.create_image",
"Globals.profiles_scanned_image_text_image.width",
"Globals.DVH_ROI_coords.append",
"Globals.DVH_slice_offset.insert",
"Globals.profiles_scanned_image_text_image.height",
"Globals.DVH_film_write_image.create_image",
"Globals.profiles_showPlanes_image.height",
"numpy.swapaxes",
"cv2.resize",
"Globals.DVH_input_lateral_displacement.insert",
"pydicom.dcmread",
"Globals.DVH_film_batch.get",
"Globals.DVH_input_vertical_displacement.get",
"tkinter.messagebox.showerror",
"os.getcwd",
"numpy.zeros",
"tkinter.Radiobutton",
"Globals.DVH_done_button_reference_point.pack",
"cv2.medianBlur",
"Globals.DVH_doseplans_factor_text.append",
"numpy.round",
"tkinter.Button",
"Globals.DVH_doseplan_write_image.config",
"Globals.DVH_mark_isocenter_oval.append",
"Globals.DVH_slice_offset.config",
"matplotlib.cm.viridis",
"Globals.DVH_input_lateral_displacement.grid",
"tkinter.messagebox.askokcancel",
"Globals.DVH_upload_button_film.config",
"Globals.DVH_mark_isocenter_right_left_line.append",
"Globals.DVH_input_vertical_displacement.insert",
"Globals.DVH_doseplan_write_image.grid",
"Globals.DVH_film_dose_write_image.grid",
"Globals.DVH_input_longitudinal_displacement.get"
] |
[((2165, 2176), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (2173, 2176), True, 'import numpy as np\n'), ((6927, 6938), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (6935, 6938), True, 'import numpy as np\n'), ((9940, 9951), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (9948, 9951), True, 'import numpy as np\n'), ((28706, 28717), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (28714, 28717), True, 'import numpy as np\n'), ((32116, 32127), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (32124, 32127), True, 'import numpy as np\n'), ((50068, 50096), 'tkinter.filedialog.askopenfilename', 'filedialog.askopenfilename', ([], {}), '()\n', (50094, 50096), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((50346, 50357), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (50355, 50357), False, 'import os\n'), ((50371, 50392), 'os.path.dirname', 'os.path.dirname', (['file'], {}), '(file)\n', (50386, 50392), False, 'import os\n'), ((50397, 50413), 'os.chdir', 'os.chdir', (['parent'], {}), '(parent)\n', (50405, 50413), False, 'import os\n'), ((50428, 50449), 'pydicom.dcmread', 'pydicom.dcmread', (['file'], {}), '(file)\n', (50443, 50449), False, 'import pydicom\n'), ((50887, 50911), 'os.chdir', 'os.chdir', (['current_folder'], {}), '(current_folder)\n', (50895, 50911), False, 'import os\n'), ((55491, 55602), 'tkinter.messagebox.askyesno', 'messagebox.askyesno', (['"""Question"""', '"""Are you going to upload several doseplans and/or use a factor on a plan?"""'], {}), "('Question',\n 'Are you going to upload several doseplans and/or use a factor on a plan?')\n", (55510, 55602), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((55695, 55727), 'tkinter.Toplevel', 'tk.Toplevel', (['Globals.tab5_canvas'], {}), '(Globals.tab5_canvas)\n', (55706, 55727), True, 'import tkinter as tk\n'), ((55859, 55912), 'tkinter.Frame', 'tk.Frame', (['several_doseplans_window'], {'bd': '(0)', 'relief': 'FLAT'}), '(several_doseplans_window, bd=0, relief=FLAT)\n', (55867, 55912), True, 'import tkinter as tk\n'), ((55945, 55977), 'tkinter.Canvas', 'Canvas', (['doseplans_over_all_frame'], {}), '(doseplans_over_all_frame)\n', (55951, 55977), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((56006, 56106), 'tkinter.Scrollbar', 'Scrollbar', (['doseplans_over_all_frame'], {'orient': 'HORIZONTAL', 'command': 'doseplans_over_all_canvas.xview'}), '(doseplans_over_all_frame, orient=HORIZONTAL, command=\n doseplans_over_all_canvas.xview)\n', (56015, 56106), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((56129, 56205), 'tkinter.Scrollbar', 'Scrollbar', (['doseplans_over_all_frame'], {'command': 'doseplans_over_all_canvas.yview'}), '(doseplans_over_all_frame, command=doseplans_over_all_canvas.yview)\n', (56138, 56205), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((56248, 56284), 'tkinter.ttk.Frame', 'ttk.Frame', (['doseplans_over_all_canvas'], {}), '(doseplans_over_all_canvas)\n', (56257, 56284), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((57461, 57505), 'tkinter.Frame', 'tk.Frame', (['Globals.DVH_doseplans_scroll_frame'], {}), '(Globals.DVH_doseplans_scroll_frame)\n', (57469, 57505), True, 'import tkinter as tk\n'), ((57605, 57673), 'Globals.DVH_doseplans_scroll_frame.grid_columnconfigure', 'Globals.DVH_doseplans_scroll_frame.grid_columnconfigure', (['(0)'], {'weight': '(0)'}), '(0, weight=0)\n', (57660, 57673), False, 'import Globals\n'), ((57678, 57743), 'Globals.DVH_doseplans_scroll_frame.grid_rowconfigure', 'Globals.DVH_doseplans_scroll_frame.grid_rowconfigure', (['(0)'], {'weight': '(0)'}), '(0, weight=0)\n', (57730, 57743), False, 'import Globals\n'), ((61764, 61808), 'tkinter.Frame', 'tk.Frame', (['Globals.DVH_doseplans_scroll_frame'], {}), '(Globals.DVH_doseplans_scroll_frame)\n', (61772, 61808), True, 'import tkinter as tk\n'), ((61964, 62029), 'Globals.DVH_doseplans_scroll_frame.grid_rowconfigure', 'Globals.DVH_doseplans_scroll_frame.grid_rowconfigure', (['(3)'], {'weight': '(0)'}), '(3, weight=0)\n', (62016, 62029), False, 'import Globals\n'), ((62034, 62102), 'Globals.DVH_doseplans_scroll_frame.grid_columnconfigure', 'Globals.DVH_doseplans_scroll_frame.grid_columnconfigure', (['(3)'], {'weight': '(0)'}), '(3, weight=0)\n', (62089, 62102), False, 'import Globals\n'), ((62132, 62322), 'tkinter.Button', 'tk.Button', (['doseplans_done_button_frame'], {'text': '"""Done"""', 'image': 'Globals.done_button_image', 'cursor': '"""hand2"""', 'font': "('calibri', '14')", 'relief': 'FLAT', 'state': 'ACTIVE', 'command': 'closeUploadDoseplans'}), "(doseplans_done_button_frame, text='Done', image=Globals.\n done_button_image, cursor='hand2', font=('calibri', '14'), relief=FLAT,\n state=ACTIVE, command=closeUploadDoseplans)\n", (62141, 62322), True, 'import tkinter as tk\n'), ((62593, 62656), 'tkinter.Text', 'tk.Text', (['Globals.DVH_doseplans_scroll_frame'], {'width': '(15)', 'height': '(1)'}), '(Globals.DVH_doseplans_scroll_frame, width=15, height=1)\n', (62600, 62656), True, 'import tkinter as tk\n'), ((62895, 62960), 'Globals.DVH_doseplans_scroll_frame.grid_rowconfigure', 'Globals.DVH_doseplans_scroll_frame.grid_rowconfigure', (['(1)'], {'weight': '(0)'}), '(1, weight=0)\n', (62947, 62960), False, 'import Globals\n'), ((62965, 63033), 'Globals.DVH_doseplans_scroll_frame.grid_columnconfigure', 'Globals.DVH_doseplans_scroll_frame.grid_columnconfigure', (['(1)'], {'weight': '(0)'}), '(1, weight=0)\n', (63020, 63033), False, 'import Globals\n'), ((63054, 63117), 'tkinter.Text', 'tk.Text', (['Globals.DVH_doseplans_scroll_frame'], {'width': '(30)', 'height': '(2)'}), '(Globals.DVH_doseplans_scroll_frame, width=30, height=2)\n', (63061, 63117), True, 'import tkinter as tk\n'), ((63408, 63476), 'Globals.DVH_doseplans_scroll_frame.grid_columnconfigure', 'Globals.DVH_doseplans_scroll_frame.grid_columnconfigure', (['(2)'], {'weight': '(0)'}), '(2, weight=0)\n', (63463, 63476), False, 'import Globals\n'), ((63480, 63545), 'Globals.DVH_doseplans_scroll_frame.grid_rowconfigure', 'Globals.DVH_doseplans_scroll_frame.grid_rowconfigure', (['(2)'], {'weight': '(0)'}), '(2, weight=0)\n', (63532, 63545), False, 'import Globals\n'), ((63578, 63606), 'tkinter.filedialog.askopenfilename', 'filedialog.askopenfilename', ([], {}), '()\n', (63604, 63606), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((63856, 63867), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (63865, 63867), False, 'import os\n'), ((63881, 63902), 'os.path.dirname', 'os.path.dirname', (['file'], {}), '(file)\n', (63896, 63902), False, 'import os\n'), ((63907, 63923), 'os.chdir', 'os.chdir', (['parent'], {}), '(parent)\n', (63915, 63923), False, 'import os\n'), ((63938, 63959), 'pydicom.dcmread', 'pydicom.dcmread', (['file'], {}), '(file)\n', (63953, 63959), False, 'import pydicom\n'), ((63964, 63988), 'os.chdir', 'os.chdir', (['current_folder'], {}), '(current_folder)\n', (63972, 63988), False, 'import os\n'), ((66091, 66146), 'Globals.DVH_upload_button_doseplan.config', 'Globals.DVH_upload_button_doseplan.config', ([], {'state': 'ACTIVE'}), '(state=ACTIVE)\n', (66132, 66146), False, 'import Globals\n'), ((66151, 66206), 'Globals.DVH_upload_button_rtplan.config', 'Globals.DVH_upload_button_rtplan.config', ([], {'state': 'DISABLED'}), '(state=DISABLED)\n', (66190, 66206), False, 'import Globals\n'), ((66818, 66847), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', ([], {'image': 'img'}), '(image=img)\n', (66836, 66847), False, 'from PIL import Image, ImageTk\n'), ((66876, 66913), 'tkinter.Toplevel', 'tk.Toplevel', (['new_window_isocenter_tab'], {}), '(new_window_isocenter_tab)\n', (66887, 66913), True, 'import tkinter as tk\n'), ((67041, 67091), 'tkinter.Frame', 'tk.Frame', (['mark_isocenter_window'], {'bd': '(0)', 'relief': 'FLAT'}), '(mark_isocenter_window, bd=0, relief=FLAT)\n', (67049, 67091), True, 'import tkinter as tk\n'), ((67129, 67166), 'tkinter.Canvas', 'Canvas', (['mark_isocenter_over_all_frame'], {}), '(mark_isocenter_over_all_frame)\n', (67135, 67166), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((67200, 67310), 'tkinter.Scrollbar', 'Scrollbar', (['mark_isocenter_over_all_frame'], {'orient': 'HORIZONTAL', 'command': 'mark_isocenter_over_all_canvas.xview'}), '(mark_isocenter_over_all_frame, orient=HORIZONTAL, command=\n mark_isocenter_over_all_canvas.xview)\n', (67209, 67310), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((67338, 67429), 'tkinter.Scrollbar', 'Scrollbar', (['mark_isocenter_over_all_frame'], {'command': 'mark_isocenter_over_all_canvas.yview'}), '(mark_isocenter_over_all_frame, command=\n mark_isocenter_over_all_canvas.yview)\n', (67347, 67429), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((67460, 67501), 'tkinter.ttk.Frame', 'ttk.Frame', (['mark_isocenter_over_all_canvas'], {}), '(mark_isocenter_over_all_canvas)\n', (67469, 67501), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((68760, 68798), 'tkinter.Canvas', 'tk.Canvas', (['mark_isocenter_scroll_frame'], {}), '(mark_isocenter_scroll_frame)\n', (68769, 68798), True, 'import tkinter as tk\n'), ((71568, 71597), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', ([], {'image': 'img'}), '(image=img)\n', (71586, 71597), False, 'from PIL import Image, ImageTk\n'), ((71632, 71675), 'tkinter.Toplevel', 'tk.Toplevel', (['new_window_reference_point_tab'], {}), '(new_window_reference_point_tab)\n', (71643, 71675), True, 'import tkinter as tk\n'), ((71821, 71877), 'tkinter.Frame', 'tk.Frame', (['mark_reference_point_window'], {'bd': '(0)', 'relief': 'FLAT'}), '(mark_reference_point_window, bd=0, relief=FLAT)\n', (71829, 71877), True, 'import tkinter as tk\n'), ((71921, 71964), 'tkinter.Canvas', 'Canvas', (['mark_reference_point_over_all_frame'], {}), '(mark_reference_point_over_all_frame)\n', (71927, 71964), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((72004, 72126), 'tkinter.Scrollbar', 'Scrollbar', (['mark_reference_point_over_all_frame'], {'orient': 'HORIZONTAL', 'command': 'mark_reference_point_over_all_canvas.xview'}), '(mark_reference_point_over_all_frame, orient=HORIZONTAL, command=\n mark_reference_point_over_all_canvas.xview)\n', (72013, 72126), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((72160, 72263), 'tkinter.Scrollbar', 'Scrollbar', (['mark_reference_point_over_all_frame'], {'command': 'mark_reference_point_over_all_canvas.yview'}), '(mark_reference_point_over_all_frame, command=\n mark_reference_point_over_all_canvas.yview)\n', (72169, 72263), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((72300, 72347), 'tkinter.ttk.Frame', 'ttk.Frame', (['mark_reference_point_over_all_canvas'], {}), '(mark_reference_point_over_all_canvas)\n', (72309, 72347), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((73732, 73776), 'tkinter.Canvas', 'tk.Canvas', (['mark_reference_point_scroll_frame'], {}), '(mark_reference_point_scroll_frame)\n', (73741, 73776), True, 'import tkinter as tk\n'), ((75638, 75667), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', ([], {'image': 'img'}), '(image=img)\n', (75656, 75667), False, 'from PIL import Image, ImageTk\n'), ((75690, 75706), 'tkinter.Toplevel', 'tk.Toplevel', (['tab'], {}), '(tab)\n', (75701, 75706), True, 'import tkinter as tk\n'), ((75816, 75860), 'tkinter.Frame', 'tk.Frame', (['mark_ROI_window'], {'bd': '(0)', 'relief': 'FLAT'}), '(mark_ROI_window, bd=0, relief=FLAT)\n', (75824, 75860), True, 'import tkinter as tk\n'), ((75892, 75923), 'tkinter.Canvas', 'Canvas', (['mark_ROI_over_all_frame'], {}), '(mark_ROI_over_all_frame)\n', (75898, 75923), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((75951, 76049), 'tkinter.Scrollbar', 'Scrollbar', (['mark_ROI_over_all_frame'], {'orient': 'HORIZONTAL', 'command': 'mark_ROI_over_all_canvas.xview'}), '(mark_ROI_over_all_frame, orient=HORIZONTAL, command=\n mark_ROI_over_all_canvas.xview)\n', (75960, 76049), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((76071, 76145), 'tkinter.Scrollbar', 'Scrollbar', (['mark_ROI_over_all_frame'], {'command': 'mark_ROI_over_all_canvas.yview'}), '(mark_ROI_over_all_frame, command=mark_ROI_over_all_canvas.yview)\n', (76080, 76145), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((76175, 76210), 'tkinter.ttk.Frame', 'ttk.Frame', (['mark_ROI_over_all_canvas'], {}), '(mark_ROI_over_all_canvas)\n', (76184, 76210), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((77343, 77375), 'tkinter.Canvas', 'tk.Canvas', (['mark_ROI_scroll_frame'], {}), '(mark_ROI_scroll_frame)\n', (77352, 77375), True, 'import tkinter as tk\n'), ((80666, 80694), 'tkinter.filedialog.askopenfilename', 'filedialog.askopenfilename', ([], {}), '()\n', (80692, 80694), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((117283, 117308), 'tkinter.Toplevel', 'tk.Toplevel', (['Globals.tab5'], {}), '(Globals.tab5)\n', (117294, 117308), True, 'import tkinter as tk\n'), ((117317, 117358), 'Globals.profiles_showPlanes_image.width', 'Globals.profiles_showPlanes_image.width', ([], {}), '()\n', (117356, 117358), False, 'import Globals\n'), ((117367, 117409), 'Globals.profiles_showPlanes_image.height', 'Globals.profiles_showPlanes_image.height', ([], {}), '()\n', (117407, 117409), False, 'import Globals\n'), ((117496, 117517), 'tkinter.Canvas', 'tk.Canvas', (['new_window'], {}), '(new_window)\n', (117505, 117517), True, 'import tkinter as tk\n'), ((2369, 2384), 'numpy.round', 'np.round', (['iso_1'], {}), '(iso_1)\n', (2377, 2384), True, 'import numpy as np\n'), ((2411, 2426), 'numpy.round', 'np.round', (['iso_2'], {}), '(iso_2)\n', (2419, 2426), True, 'import numpy as np\n'), ((2453, 2468), 'numpy.round', 'np.round', (['iso_3'], {}), '(iso_3)\n', (2461, 2468), True, 'import numpy as np\n'), ((3342, 3359), 'numpy.round', 'np.round', (['lateral'], {}), '(lateral)\n', (3350, 3359), True, 'import numpy as np\n'), ((3382, 3400), 'numpy.round', 'np.round', (['vertical'], {}), '(vertical)\n', (3390, 3400), True, 'import numpy as np\n'), ((3421, 3437), 'numpy.round', 'np.round', (['longit'], {}), '(longit)\n', (3429, 3437), True, 'import numpy as np\n'), ((3509, 3560), 'numpy.round', 'np.round', (['Globals.DVH_doseplan_lateral_displacement'], {}), '(Globals.DVH_doseplan_lateral_displacement)\n', (3517, 3560), True, 'import numpy as np\n'), ((3605, 3657), 'numpy.round', 'np.round', (['Globals.DVH_doseplan_vertical_displacement'], {}), '(Globals.DVH_doseplan_vertical_displacement)\n', (3613, 3657), True, 'import numpy as np\n'), ((3706, 3762), 'numpy.round', 'np.round', (['Globals.DVH_doseplan_longitudianl_displacement'], {}), '(Globals.DVH_doseplan_longitudianl_displacement)\n', (3714, 3762), True, 'import numpy as np\n'), ((18578, 18732), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Reference point is outside of dosematrix\n (Code: first dimension, number of frames in dosematrix)"""'], {}), '(\'Error\',\n """Reference point is outside of dosematrix\n (Code: first dimension, number of frames in dosematrix)"""\n )\n', (18598, 18732), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((18823, 18966), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Reference point is outside of dosematrix\n (Code: second dimension, rows in dosematrix)"""'], {}), '(\'Error\',\n """Reference point is outside of dosematrix\n (Code: second dimension, rows in dosematrix)"""\n )\n', (18843, 18966), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((19057, 19202), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Reference point is outside of dosematrix\n (Code: third dimension, columns in dosematrix)"""'], {}), '(\'Error\',\n """Reference point is outside of dosematrix\n (Code: third dimension, columns in dosematrix)"""\n )\n', (19077, 19202), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((20318, 20423), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Fatal Error"""', '"""Fatal error: marked ROI is out of range in doseplan. Try again"""'], {}), "('Fatal Error',\n 'Fatal error: marked ROI is out of range in doseplan. Try again')\n", (20338, 20423), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((20642, 20747), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Fatal Error"""', '"""Fatal error: marked ROI is out of range in doseplan. Try again"""'], {}), "('Fatal Error',\n 'Fatal error: marked ROI is out of range in doseplan. Try again')\n", (20662, 20747), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((21009, 21114), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Fatal Error"""', '"""Fatal error: marked ROI is out of range in doseplan. Try again"""'], {}), "('Fatal Error',\n 'Fatal error: marked ROI is out of range in doseplan. Try again')\n", (21029, 21114), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((21541, 21646), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Fatal Error"""', '"""Fatal error: marked ROI is out of range in doseplan. Try again"""'], {}), "('Fatal Error',\n 'Fatal error: marked ROI is out of range in doseplan. Try again')\n", (21561, 21646), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((22087, 22192), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Fatal Error"""', '"""Fatal error: marked ROI is out of range in doseplan. Try again"""'], {}), "('Fatal Error',\n 'Fatal error: marked ROI is out of range in doseplan. Try again')\n", (22107, 22192), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((22261, 22366), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Fatal Error"""', '"""Fatal error: marked ROI is out of range in doseplan. Try again"""'], {}), "('Fatal Error',\n 'Fatal error: marked ROI is out of range in doseplan. Try again')\n", (22281, 22366), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((22840, 22945), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Fatal Error"""', '"""Fatal error: marked ROI is out of range in doseplan. Try again"""'], {}), "('Fatal Error',\n 'Fatal error: marked ROI is out of range in doseplan. Try again')\n", (22860, 22945), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((23232, 23337), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Fatal Error"""', '"""Fatal error: marked ROI is out of range in doseplan. Try again"""'], {}), "('Fatal Error',\n 'Fatal error: marked ROI is out of range in doseplan. Try again')\n", (23252, 23337), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((24201, 24361), 'tkinter.messagebox.showinfo', 'messagebox.showinfo', (['"""ROI info"""', '("""The ROI marked on the film did not fit with the size of the doseplan and had to be cut.\n"""\n + ROI_info)'], {}), '(\'ROI info\', \n """The ROI marked on the film did not fit with the size of the doseplan and had to be cut.\n"""\n + ROI_info)\n', (24220, 24361), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((25233, 25244), 'numpy.max', 'np.max', (['img'], {}), '(img)\n', (25239, 25244), True, 'import numpy as np\n'), ((25524, 25563), 'tkinter.Canvas', 'tk.Canvas', (['Globals.DVH_film_panedwindow'], {}), '(Globals.DVH_film_panedwindow)\n', (25533, 25563), True, 'import tkinter as tk\n'), ((26119, 26145), 'tkinter.Canvas', 'tk.Canvas', (['doseplan_canvas'], {}), '(doseplan_canvas)\n', (26128, 26145), True, 'import tkinter as tk\n'), ((26154, 26230), 'Globals.DVH_doseplan_write_image.grid', 'Globals.DVH_doseplan_write_image.grid', ([], {'row': '(0)', 'column': '(1)', 'sticky': '(N + S + W + E)'}), '(row=0, column=1, sticky=N + S + W + E)\n', (26191, 26230), False, 'import Globals\n'), ((26231, 26347), 'Globals.DVH_doseplan_write_image.config', 'Globals.DVH_doseplan_write_image.config', ([], {'bg': '"""#ffffff"""', 'relief': 'FLAT', 'highlightthickness': '(0)', 'width': 'wid', 'height': 'heig'}), "(bg='#ffffff', relief=FLAT,\n highlightthickness=0, width=wid, height=heig)\n", (26270, 26347), False, 'import Globals\n'), ((26382, 26408), 'tkinter.Canvas', 'tk.Canvas', (['doseplan_canvas'], {}), '(doseplan_canvas)\n', (26391, 26408), True, 'import tkinter as tk\n'), ((26773, 26818), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', ([], {'image': 'scaled_image_visual'}), '(image=scaled_image_visual)\n', (26791, 26818), False, 'from PIL import Image, ImageTk\n'), ((26983, 27079), 'Globals.DVH_doseplan_write_image.create_image', 'Globals.DVH_doseplan_write_image.create_image', (['(0)', '(0)'], {'image': 'scaled_image_visual', 'anchor': '"""nw"""'}), "(0, 0, image=\n scaled_image_visual, anchor='nw')\n", (27028, 27079), False, 'import Globals\n'), ((27492, 27544), 'Globals.DVH_doseplan_dataset_ROI_several.append', 'Globals.DVH_doseplan_dataset_ROI_several.append', (['img'], {}), '(img)\n', (27539, 27544), False, 'import Globals\n'), ((28856, 28871), 'numpy.round', 'np.round', (['iso_1'], {}), '(iso_1)\n', (28864, 28871), True, 'import numpy as np\n'), ((28941, 28956), 'numpy.round', 'np.round', (['iso_2'], {}), '(iso_2)\n', (28949, 28956), True, 'import numpy as np\n'), ((29026, 29041), 'numpy.round', 'np.round', (['iso_3'], {}), '(iso_3)\n', (29034, 29041), True, 'import numpy as np\n'), ((42210, 42315), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Fatal Error"""', '"""Fatal error: marked ROI is out of range in doseplan. Try again"""'], {}), "('Fatal Error',\n 'Fatal error: marked ROI is out of range in doseplan. Try again')\n", (42230, 42315), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((42534, 42639), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Fatal Error"""', '"""Fatal error: marked ROI is out of range in doseplan. Try again"""'], {}), "('Fatal Error',\n 'Fatal error: marked ROI is out of range in doseplan. Try again')\n", (42554, 42639), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((42901, 43006), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Fatal Error"""', '"""Fatal error: marked ROI is out of range in doseplan. Try again"""'], {}), "('Fatal Error',\n 'Fatal error: marked ROI is out of range in doseplan. Try again')\n", (42921, 43006), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((43433, 43538), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Fatal Error"""', '"""Fatal error: marked ROI is out of range in doseplan. Try again"""'], {}), "('Fatal Error',\n 'Fatal error: marked ROI is out of range in doseplan. Try again')\n", (43453, 43538), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((43979, 44084), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Fatal Error"""', '"""Fatal error: marked ROI is out of range in doseplan. Try again"""'], {}), "('Fatal Error',\n 'Fatal error: marked ROI is out of range in doseplan. Try again')\n", (43999, 44084), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((44153, 44258), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Fatal Error"""', '"""Fatal error: marked ROI is out of range in doseplan. Try again"""'], {}), "('Fatal Error',\n 'Fatal error: marked ROI is out of range in doseplan. Try again')\n", (44173, 44258), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((44732, 44837), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Fatal Error"""', '"""Fatal error: marked ROI is out of range in doseplan. Try again"""'], {}), "('Fatal Error',\n 'Fatal error: marked ROI is out of range in doseplan. Try again')\n", (44752, 44837), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((45124, 45229), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Fatal Error"""', '"""Fatal error: marked ROI is out of range in doseplan. Try again"""'], {}), "('Fatal Error',\n 'Fatal error: marked ROI is out of range in doseplan. Try again')\n", (45144, 45229), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((46093, 46253), 'tkinter.messagebox.showinfo', 'messagebox.showinfo', (['"""ROI info"""', '("""The ROI marked on the film did not fit with the size of the doseplan and had to be cut.\n"""\n + ROI_info)'], {}), '(\'ROI info\', \n """The ROI marked on the film did not fit with the size of the doseplan and had to be cut.\n"""\n + ROI_info)\n', (46112, 46253), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((47174, 47185), 'numpy.max', 'np.max', (['img'], {}), '(img)\n', (47180, 47185), True, 'import numpy as np\n'), ((47521, 47560), 'tkinter.Canvas', 'tk.Canvas', (['Globals.DVH_film_panedwindow'], {}), '(Globals.DVH_film_panedwindow)\n', (47530, 47560), True, 'import tkinter as tk\n'), ((48116, 48142), 'tkinter.Canvas', 'tk.Canvas', (['doseplan_canvas'], {}), '(doseplan_canvas)\n', (48125, 48142), True, 'import tkinter as tk\n'), ((48151, 48227), 'Globals.DVH_doseplan_write_image.grid', 'Globals.DVH_doseplan_write_image.grid', ([], {'row': '(0)', 'column': '(1)', 'sticky': '(N + S + W + E)'}), '(row=0, column=1, sticky=N + S + W + E)\n', (48188, 48227), False, 'import Globals\n'), ((48228, 48344), 'Globals.DVH_doseplan_write_image.config', 'Globals.DVH_doseplan_write_image.config', ([], {'bg': '"""#ffffff"""', 'relief': 'FLAT', 'highlightthickness': '(0)', 'width': 'wid', 'height': 'heig'}), "(bg='#ffffff', relief=FLAT,\n highlightthickness=0, width=wid, height=heig)\n", (48267, 48344), False, 'import Globals\n'), ((48379, 48405), 'tkinter.Canvas', 'tk.Canvas', (['doseplan_canvas'], {}), '(doseplan_canvas)\n', (48388, 48405), True, 'import tkinter as tk\n'), ((48770, 48815), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', ([], {'image': 'scaled_image_visual'}), '(image=scaled_image_visual)\n', (48788, 48815), False, 'from PIL import Image, ImageTk\n'), ((48980, 49076), 'Globals.DVH_doseplan_write_image.create_image', 'Globals.DVH_doseplan_write_image.create_image', (['(0)', '(0)'], {'image': 'scaled_image_visual', 'anchor': '"""nw"""'}), "(0, 0, image=\n scaled_image_visual, anchor='nw')\n", (49025, 49076), False, 'import Globals\n'), ((49485, 49537), 'Globals.DVH_doseplan_dataset_ROI_several.append', 'Globals.DVH_doseplan_dataset_ROI_several.append', (['img'], {}), '(img)\n', (49532, 49537), False, 'import Globals\n'), ((50738, 50848), 'tkinter.messagebox.askokcancel', 'messagebox.askokcancel', (['"""Dose summation"""', '"""You did not upload the full doseplan. Do you want to continue?"""'], {}), "('Dose summation',\n 'You did not upload the full doseplan. Do you want to continue?')\n", (50760, 50848), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((51249, 51342), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""The resolution in doseplan must be 1x1x1, 2x2x2 or 3x3x3"""'], {}), "('Error',\n 'The resolution in doseplan must be 1x1x1, 2x2x2 or 3x3x3')\n", (51269, 51342), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((51828, 51979), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""The Image Orientation (Patient) must be parallel to one of the main axis and perpendicular to the two others."""'], {}), "('Error',\n 'The Image Orientation (Patient) must be parallel to one of the main axis and perpendicular to the two others.'\n )\n", (51848, 51979), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((53106, 53163), 'Globals.DVH_upload_button_doseplan.config', 'Globals.DVH_upload_button_doseplan.config', ([], {'state': 'DISABLED'}), '(state=DISABLED)\n', (53147, 53163), False, 'import Globals\n'), ((53257, 53320), 'tkinter.Text', 'tk.Text', (['Globals.DVH_doseplans_scroll_frame'], {'width': '(30)', 'height': '(1)'}), '(Globals.DVH_doseplans_scroll_frame, width=30, height=1)\n', (53264, 53320), True, 'import tkinter as tk\n'), ((53617, 53728), 'Globals.DVH_doseplans_scroll_frame.grid_columnconfigure', 'Globals.DVH_doseplans_scroll_frame.grid_columnconfigure', (['Globals.DVH_doseplans_grid_config_count'], {'weight': '(0)'}), '(Globals.\n DVH_doseplans_grid_config_count, weight=0)\n', (53672, 53728), False, 'import Globals\n'), ((53732, 53840), 'Globals.DVH_doseplans_scroll_frame.grid_rowconfigure', 'Globals.DVH_doseplans_scroll_frame.grid_rowconfigure', (['Globals.DVH_doseplans_grid_config_count'], {'weight': '(0)'}), '(Globals.\n DVH_doseplans_grid_config_count, weight=0)\n', (53784, 53840), False, 'import Globals\n'), ((53844, 53900), 'Globals.DVH_doseplans_filenames.append', 'Globals.DVH_doseplans_filenames.append', (['textbox_filename'], {}), '(textbox_filename)\n', (53882, 53900), False, 'import Globals\n'), ((53980, 54042), 'tkinter.Text', 'tk.Text', (['Globals.DVH_doseplans_scroll_frame'], {'width': '(6)', 'height': '(1)'}), '(Globals.DVH_doseplans_scroll_frame, width=6, height=1)\n', (53987, 54042), True, 'import tkinter as tk\n'), ((54340, 54451), 'Globals.DVH_doseplans_scroll_frame.grid_columnconfigure', 'Globals.DVH_doseplans_scroll_frame.grid_columnconfigure', (['Globals.DVH_doseplans_grid_config_count'], {'weight': '(0)'}), '(Globals.\n DVH_doseplans_grid_config_count, weight=0)\n', (54395, 54451), False, 'import Globals\n'), ((54455, 54563), 'Globals.DVH_doseplans_scroll_frame.grid_rowconfigure', 'Globals.DVH_doseplans_scroll_frame.grid_rowconfigure', (['Globals.DVH_doseplans_grid_config_count'], {'weight': '(0)'}), '(Globals.\n DVH_doseplans_grid_config_count, weight=0)\n', (54507, 54563), False, 'import Globals\n'), ((54567, 54623), 'Globals.DVH_doseplans_factor_text.append', 'Globals.DVH_doseplans_factor_text.append', (['textbox_factor'], {}), '(textbox_factor)\n', (54607, 54623), False, 'import Globals\n'), ((54709, 54752), 'tkinter.Text', 'tk.Text', (['Globals.DVH_doseplans_scroll_frame'], {}), '(Globals.DVH_doseplans_scroll_frame)\n', (54716, 54752), True, 'import tkinter as tk\n'), ((55045, 55156), 'Globals.DVH_doseplans_scroll_frame.grid_columnconfigure', 'Globals.DVH_doseplans_scroll_frame.grid_columnconfigure', (['Globals.DVH_doseplans_grid_config_count'], {'weight': '(0)'}), '(Globals.\n DVH_doseplans_grid_config_count, weight=0)\n', (55100, 55156), False, 'import Globals\n'), ((55160, 55268), 'Globals.DVH_doseplans_scroll_frame.grid_rowconfigure', 'Globals.DVH_doseplans_scroll_frame.grid_rowconfigure', (['Globals.DVH_doseplans_grid_config_count'], {'weight': '(0)'}), '(Globals.\n DVH_doseplans_grid_config_count, weight=0)\n', (55212, 55268), False, 'import Globals\n'), ((55272, 55335), 'Globals.DVH_doseplans_factor_input.append', 'Globals.DVH_doseplans_factor_input.append', (['textbox_factor_input'], {}), '(textbox_factor_input)\n', (55313, 55335), False, 'import Globals\n'), ((59396, 59411), 'numpy.max', 'np.max', (['img_ROI'], {}), '(img_ROI)\n', (59402, 59411), True, 'import numpy as np\n'), ((59683, 59722), 'tkinter.Canvas', 'tk.Canvas', (['Globals.DVH_film_panedwindow'], {}), '(Globals.DVH_film_panedwindow)\n', (59692, 59722), True, 'import tkinter as tk\n'), ((60278, 60304), 'tkinter.Canvas', 'tk.Canvas', (['doseplan_canvas'], {}), '(doseplan_canvas)\n', (60287, 60304), True, 'import tkinter as tk\n'), ((60313, 60389), 'Globals.DVH_doseplan_write_image.grid', 'Globals.DVH_doseplan_write_image.grid', ([], {'row': '(0)', 'column': '(1)', 'sticky': '(N + S + W + E)'}), '(row=0, column=1, sticky=N + S + W + E)\n', (60350, 60389), False, 'import Globals\n'), ((60390, 60506), 'Globals.DVH_doseplan_write_image.config', 'Globals.DVH_doseplan_write_image.config', ([], {'bg': '"""#ffffff"""', 'relief': 'FLAT', 'highlightthickness': '(0)', 'width': 'wid', 'height': 'heig'}), "(bg='#ffffff', relief=FLAT,\n highlightthickness=0, width=wid, height=heig)\n", (60429, 60506), False, 'import Globals\n'), ((60541, 60567), 'tkinter.Canvas', 'tk.Canvas', (['doseplan_canvas'], {}), '(doseplan_canvas)\n', (60550, 60567), True, 'import tkinter as tk\n'), ((60932, 60977), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', ([], {'image': 'scaled_image_visual'}), '(image=scaled_image_visual)\n', (60950, 60977), False, 'from PIL import Image, ImageTk\n'), ((61142, 61238), 'Globals.DVH_doseplan_write_image.create_image', 'Globals.DVH_doseplan_write_image.create_image', (['(0)', '(0)'], {'image': 'scaled_image_visual', 'anchor': '"""nw"""'}), "(0, 0, image=\n scaled_image_visual, anchor='nw')\n", (61187, 61238), False, 'import Globals\n'), ((61554, 61611), 'Globals.DVH_upload_button_doseplan.config', 'Globals.DVH_upload_button_doseplan.config', ([], {'state': 'DISABLED'}), '(state=DISABLED)\n', (61595, 61611), False, 'import Globals\n'), ((65887, 66032), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Fidora does only support patient positions: \n HFS, HFP, HFDR, HFDL, FFP, FFS, FFDR, FFDL"""'], {}), '(\'Error\',\n """Fidora does only support patient positions: \n HFS, HFP, HFDR, HFDL, FFP, FFS, FFDR, FFDL"""\n )\n', (65907, 66032), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((75001, 75051), 'Globals.DVH_mark_reference_point_oval.append', 'Globals.DVH_mark_reference_point_oval.append', (['oval'], {}), '(oval)\n', (75045, 75051), False, 'import Globals\n'), ((78799, 78890), 'Globals.DVH_ROI_coords.append', 'Globals.DVH_ROI_coords.append', (['[rectangle_top_corner[0][0], rectangle_top_corner[0][1]]'], {}), '([rectangle_top_corner[0][0],\n rectangle_top_corner[0][1]])\n', (78828, 78890), False, 'import Globals\n'), ((78895, 78989), 'Globals.DVH_ROI_coords.append', 'Globals.DVH_ROI_coords.append', (['[rectangle_bottom_corner[0][0], rectangle_top_corner[0][1]]'], {}), '([rectangle_bottom_corner[0][0],\n rectangle_top_corner[0][1]])\n', (78924, 78989), False, 'import Globals\n'), ((78994, 79088), 'Globals.DVH_ROI_coords.append', 'Globals.DVH_ROI_coords.append', (['[rectangle_top_corner[0][0], rectangle_bottom_corner[0][1]]'], {}), '([rectangle_top_corner[0][0],\n rectangle_bottom_corner[0][1]])\n', (79023, 79088), False, 'import Globals\n'), ((79093, 79190), 'Globals.DVH_ROI_coords.append', 'Globals.DVH_ROI_coords.append', (['[rectangle_bottom_corner[0][0], rectangle_bottom_corner[0][1]]'], {}), '([rectangle_bottom_corner[0][0],\n rectangle_bottom_corner[0][1]])\n', (79122, 79190), False, 'import Globals\n'), ((79427, 79470), 'Globals.DVH_mark_ROI_rectangle.append', 'Globals.DVH_mark_ROI_rectangle.append', (['rect'], {}), '(rect)\n', (79464, 79470), False, 'import Globals\n'), ((80132, 80166), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (80164, 80166), False, 'import Globals\n'), ((80184, 80282), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Missing parameter"""', '"""Film orientation missing \n (Code: UploadFilm)"""'], {}), '(\'Missing parameter\',\n """Film orientation missing \n (Code: UploadFilm)""")\n', (80204, 80282), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((80298, 80348), 'Globals.DVH_film_factor_input.get', 'Globals.DVH_film_factor_input.get', (['"""1.0"""', '"""end-1c"""'], {}), "('1.0', 'end-1c')\n", (80331, 80348), False, 'import Globals\n'), ((80788, 80799), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (80797, 80799), False, 'import os\n'), ((80817, 80838), 'os.path.dirname', 'os.path.dirname', (['file'], {}), '(file)\n', (80832, 80838), False, 'import os\n'), ((80847, 80863), 'os.chdir', 'os.chdir', (['parent'], {}), '(parent)\n', (80855, 80863), False, 'import os\n'), ((80878, 80894), 'PIL.Image.open', 'Image.open', (['file'], {}), '(file)\n', (80888, 80894), False, 'from PIL import Image, ImageTk\n'), ((81060, 81085), 'cv2.medianBlur', 'cv2.medianBlur', (['cv2Img', '(5)'], {}), '(cv2Img, 5)\n', (81074, 81085), False, 'import cv2\n'), ((82056, 82080), 'os.chdir', 'os.chdir', (['current_folder'], {}), '(current_folder)\n', (82064, 82080), False, 'import os\n'), ((82219, 82278), 'Globals.DVH_film_orientation_menu.configure', 'Globals.DVH_film_orientation_menu.configure', ([], {'state': 'DISABLED'}), '(state=DISABLED)\n', (82262, 82278), False, 'import Globals\n'), ((82287, 82339), 'Globals.DVH_film_factor_input.config', 'Globals.DVH_film_factor_input.config', ([], {'state': 'DISABLED'}), '(state=DISABLED)\n', (82323, 82339), False, 'import Globals\n'), ((82405, 82430), 'tkinter.Toplevel', 'tk.Toplevel', (['Globals.tab5'], {}), '(Globals.tab5)\n', (82416, 82430), True, 'import tkinter as tk\n'), ((82548, 82587), 'tkinter.Frame', 'tk.Frame', (['new_window'], {'bd': '(0)', 'relief': 'FLAT'}), '(new_window, bd=0, relief=FLAT)\n', (82556, 82587), True, 'import tkinter as tk\n'), ((82625, 82658), 'tkinter.Canvas', 'Canvas', (['new_window_over_all_frame'], {}), '(new_window_over_all_frame)\n', (82631, 82658), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((82692, 82794), 'tkinter.Scrollbar', 'Scrollbar', (['new_window_over_all_frame'], {'orient': 'HORIZONTAL', 'command': 'new_window_over_all_canvas.xview'}), '(new_window_over_all_frame, orient=HORIZONTAL, command=\n new_window_over_all_canvas.xview)\n', (82701, 82794), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((82822, 82900), 'tkinter.Scrollbar', 'Scrollbar', (['new_window_over_all_frame'], {'command': 'new_window_over_all_canvas.yview'}), '(new_window_over_all_frame, command=new_window_over_all_canvas.yview)\n', (82831, 82900), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((82936, 82973), 'tkinter.ttk.Frame', 'ttk.Frame', (['new_window_over_all_canvas'], {}), '(new_window_over_all_canvas)\n', (82945, 82973), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((84212, 84265), 'tkinter.Text', 'tk.Text', (['new_window_scroll_frame'], {'height': '(3)', 'width': '(120)'}), '(new_window_scroll_frame, height=3, width=120)\n', (84219, 84265), True, 'import tkinter as tk\n'), ((85251, 85288), 'tkinter.ttk.Notebook', 'ttk.Notebook', (['new_window_scroll_frame'], {}), '(new_window_scroll_frame)\n', (85263, 85288), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((85601, 85631), 'tkinter.ttk.Frame', 'ttk.Frame', (['new_window_notebook'], {}), '(new_window_notebook)\n', (85610, 85631), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((85749, 85779), 'tkinter.ttk.Frame', 'ttk.Frame', (['new_window_notebook'], {}), '(new_window_notebook)\n', (85758, 85779), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((85902, 85932), 'tkinter.ttk.Frame', 'ttk.Frame', (['new_window_notebook'], {}), '(new_window_notebook)\n', (85911, 85932), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((86031, 86066), 'tkinter.Canvas', 'tk.Canvas', (['new_window_isocenter_tab'], {}), '(new_window_isocenter_tab)\n', (86040, 86066), True, 'import tkinter as tk\n'), ((86657, 86698), 'tkinter.Canvas', 'tk.Canvas', (['new_window_reference_point_tab'], {}), '(new_window_reference_point_tab)\n', (86666, 86698), True, 'import tkinter as tk\n'), ((87376, 87429), 'tkinter.Text', 'tk.Text', (['new_window_isocenter_tab'], {'width': '(55)', 'height': '(7)'}), '(new_window_isocenter_tab, width=55, height=7)\n', (87383, 87429), True, 'import tkinter as tk\n'), ((88334, 88393), 'tkinter.Text', 'tk.Text', (['new_window_reference_point_tab'], {'width': '(55)', 'height': '(5)'}), '(new_window_reference_point_tab, width=55, height=5)\n', (88341, 88393), True, 'import tkinter as tk\n'), ((89215, 89249), 'tkinter.Frame', 'tk.Frame', (['new_window_isocenter_tab'], {}), '(new_window_isocenter_tab)\n', (89223, 89249), True, 'import tkinter as tk\n'), ((90132, 90172), 'tkinter.Frame', 'tk.Frame', (['new_window_reference_point_tab'], {}), '(new_window_reference_point_tab)\n', (90140, 90172), True, 'import tkinter as tk\n'), ((91097, 91156), 'tkinter.Text', 'tk.Text', (['new_window_reference_point_tab'], {'width': '(55)', 'height': '(3)'}), '(new_window_reference_point_tab, width=55, height=3)\n', (91104, 91156), True, 'import tkinter as tk\n'), ((91835, 91894), 'tkinter.Text', 'tk.Text', (['new_window_reference_point_tab'], {'width': '(12)', 'height': '(1)'}), '(new_window_reference_point_tab, width=12, height=1)\n', (91842, 91894), True, 'import tkinter as tk\n'), ((92336, 92394), 'tkinter.Text', 'tk.Text', (['new_window_reference_point_tab'], {'width': '(5)', 'height': '(1)'}), '(new_window_reference_point_tab, width=5, height=1)\n', (92343, 92394), True, 'import tkinter as tk\n'), ((92403, 92461), 'Globals.DVH_input_lateral_displacement.insert', 'Globals.DVH_input_lateral_displacement.insert', (['INSERT', '""" """'], {}), "(INSERT, ' ')\n", (92448, 92461), False, 'import Globals\n'), ((92470, 92592), 'Globals.DVH_input_lateral_displacement.config', 'Globals.DVH_input_lateral_displacement.config', ([], {'bg': '"""#E5f9ff"""', 'relief': 'GROOVE', 'bd': '(2)', 'state': 'NORMAL', 'font': "('calibri', '11')"}), "(bg='#E5f9ff', relief=GROOVE,\n bd=2, state=NORMAL, font=('calibri', '11'))\n", (92515, 92592), False, 'import Globals\n'), ((92597, 92690), 'Globals.DVH_input_lateral_displacement.grid', 'Globals.DVH_input_lateral_displacement.grid', ([], {'row': '(5)', 'column': '(3)', 'padx': '(0, 285)', 'pady': '(35, 0)'}), '(row=5, column=3, padx=(0, 285),\n pady=(35, 0))\n', (92640, 92690), False, 'import Globals\n'), ((92859, 92918), 'tkinter.Text', 'tk.Text', (['new_window_reference_point_tab'], {'width': '(12)', 'height': '(1)'}), '(new_window_reference_point_tab, width=12, height=1)\n', (92866, 92918), True, 'import tkinter as tk\n'), ((93363, 93421), 'tkinter.Text', 'tk.Text', (['new_window_reference_point_tab'], {'width': '(4)', 'height': '(1)'}), '(new_window_reference_point_tab, width=4, height=1)\n', (93370, 93421), True, 'import tkinter as tk\n'), ((93430, 93489), 'Globals.DVH_input_vertical_displacement.insert', 'Globals.DVH_input_vertical_displacement.insert', (['INSERT', '""" """'], {}), "(INSERT, ' ')\n", (93476, 93489), False, 'import Globals\n'), ((93498, 93621), 'Globals.DVH_input_vertical_displacement.config', 'Globals.DVH_input_vertical_displacement.config', ([], {'bg': '"""#E5f9ff"""', 'relief': 'GROOVE', 'bd': '(2)', 'state': 'NORMAL', 'font': "('calibri', '11')"}), "(bg='#E5f9ff', relief=GROOVE,\n bd=2, state=NORMAL, font=('calibri', '11'))\n", (93544, 93621), False, 'import Globals\n'), ((93626, 93719), 'Globals.DVH_input_vertical_displacement.grid', 'Globals.DVH_input_vertical_displacement.grid', ([], {'row': '(5)', 'column': '(3)', 'padx': '(0, 25)', 'pady': '(35, 0)'}), '(row=5, column=3, padx=(0, 25),\n pady=(35, 0))\n', (93670, 93719), False, 'import Globals\n'), ((93887, 93946), 'tkinter.Text', 'tk.Text', (['new_window_reference_point_tab'], {'width': '(12)', 'height': '(1)'}), '(new_window_reference_point_tab, width=12, height=1)\n', (93894, 93946), True, 'import tkinter as tk\n'), ((94389, 94447), 'tkinter.Text', 'tk.Text', (['new_window_reference_point_tab'], {'width': '(5)', 'height': '(1)'}), '(new_window_reference_point_tab, width=5, height=1)\n', (94396, 94447), True, 'import tkinter as tk\n'), ((94456, 94519), 'Globals.DVH_input_longitudinal_displacement.insert', 'Globals.DVH_input_longitudinal_displacement.insert', (['INSERT', '""" """'], {}), "(INSERT, ' ')\n", (94506, 94519), False, 'import Globals\n'), ((94528, 94656), 'Globals.DVH_input_longitudinal_displacement.config', 'Globals.DVH_input_longitudinal_displacement.config', ([], {'bg': '"""#E5f9ff"""', 'relief': 'GROOVE', 'bd': '(2)', 'state': 'NORMAL', 'font': "('calibri', '11')"}), "(bg='#E5f9ff', relief=\n GROOVE, bd=2, state=NORMAL, font=('calibri', '11'))\n", (94578, 94656), False, 'import Globals\n'), ((94660, 94758), 'Globals.DVH_input_longitudinal_displacement.grid', 'Globals.DVH_input_longitudinal_displacement.grid', ([], {'row': '(5)', 'column': '(3)', 'padx': '(240, 0)', 'pady': '(35, 0)'}), '(row=5, column=3, padx=(240,\n 0), pady=(35, 0))\n', (94708, 94758), False, 'import Globals\n'), ((94938, 94991), 'tkinter.Text', 'tk.Text', (['new_window_isocenter_tab'], {'width': '(55)', 'height': '(7)'}), '(new_window_isocenter_tab, width=55, height=7)\n', (94945, 94991), True, 'import tkinter as tk\n'), ((95830, 95889), 'tkinter.Text', 'tk.Text', (['new_window_reference_point_tab'], {'width': '(55)', 'height': '(5)'}), '(new_window_reference_point_tab, width=55, height=5)\n', (95837, 95889), True, 'import tkinter as tk\n'), ((96767, 96801), 'tkinter.Frame', 'tk.Frame', (['new_window_isocenter_tab'], {}), '(new_window_isocenter_tab)\n', (96775, 96801), True, 'import tkinter as tk\n'), ((97615, 97668), 'tkinter.Text', 'tk.Text', (['new_window_isocenter_tab'], {'width': '(25)', 'height': '(1)'}), '(new_window_isocenter_tab, width=25, height=1)\n', (97622, 97668), True, 'import tkinter as tk\n'), ((98081, 98133), 'tkinter.Text', 'tk.Text', (['new_window_isocenter_tab'], {'width': '(8)', 'height': '(1)'}), '(new_window_isocenter_tab, width=8, height=1)\n', (98088, 98133), True, 'import tkinter as tk\n'), ((98142, 98217), 'Globals.DVH_slice_offset.grid', 'Globals.DVH_slice_offset.grid', ([], {'row': '(9)', 'column': '(3)', 'padx': '(110, 10)', 'pady': '(0, 0)'}), '(row=9, column=3, padx=(110, 10), pady=(0, 0))\n', (98171, 98217), False, 'import Globals\n'), ((98224, 98268), 'Globals.DVH_slice_offset.insert', 'Globals.DVH_slice_offset.insert', (['INSERT', '""" """'], {}), "(INSERT, ' ')\n", (98255, 98268), False, 'import Globals\n'), ((98277, 98370), 'Globals.DVH_slice_offset.config', 'Globals.DVH_slice_offset.config', ([], {'state': 'NORMAL', 'font': "('calibri', '10')", 'bd': '(2)', 'bg': '"""#ffffff"""'}), "(state=NORMAL, font=('calibri', '10'), bd=2,\n bg='#ffffff')\n", (98308, 98370), False, 'import Globals\n'), ((98549, 98589), 'tkinter.Frame', 'tk.Frame', (['new_window_reference_point_tab'], {}), '(new_window_reference_point_tab)\n', (98557, 98589), True, 'import tkinter as tk\n'), ((115356, 115390), 'tkinter.Frame', 'tk.Frame', (['new_window_isocenter_tab'], {}), '(new_window_isocenter_tab)\n', (115364, 115390), True, 'import tkinter as tk\n'), ((115905, 115957), 'Globals.DVH_done_button.pack', 'Globals.DVH_done_button.pack', ([], {'expand': '(True)', 'fill': 'BOTH'}), '(expand=True, fill=BOTH)\n', (115933, 115957), False, 'import Globals\n'), ((115966, 116092), 'Globals.DVH_done_button.config', 'Globals.DVH_done_button.config', ([], {'bg': '"""#ffffff"""', 'activebackground': '"""#ffffff"""', 'activeforeground': '"""#ffffff"""', 'highlightthickness': '(0)'}), "(bg='#ffffff', activebackground='#ffffff',\n activeforeground='#ffffff', highlightthickness=0)\n", (115996, 116092), False, 'import Globals\n'), ((116198, 116238), 'tkinter.Frame', 'tk.Frame', (['new_window_reference_point_tab'], {}), '(new_window_reference_point_tab)\n', (116206, 116238), True, 'import tkinter as tk\n'), ((116827, 116895), 'Globals.DVH_done_button_reference_point.pack', 'Globals.DVH_done_button_reference_point.pack', ([], {'expand': '(True)', 'fill': 'BOTH'}), '(expand=True, fill=BOTH)\n', (116871, 116895), False, 'import Globals\n'), ((116904, 117050), 'Globals.DVH_done_button_reference_point.config', 'Globals.DVH_done_button_reference_point.config', ([], {'bg': '"""#ffffff"""', 'activebackground': '"""#ffffff"""', 'activeforeground': '"""#ffffff"""', 'highlightthickness': '(0)'}), "(bg='#ffffff',\n activebackground='#ffffff', activeforeground='#ffffff',\n highlightthickness=0)\n", (116950, 117050), False, 'import Globals\n'), ((1482, 1603), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Could not read the vertical displacements\n (Code: displacements to integer)"""'], {}), '(\'Error\',\n """Could not read the vertical displacements\n (Code: displacements to integer)"""\n )\n', (1502, 1603), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((1691, 1811), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Could not read the lateral displacements\n (Code: displacements to integer)"""'], {}), '(\'Error\',\n """Could not read the lateral displacements\n (Code: displacements to integer)"""\n )\n', (1711, 1811), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((1909, 2034), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Could not read the longitudinal displacements\n (Code: displacements to integer)"""'], {}), '(\'Error\',\n """Could not read the longitudinal displacements\n (Code: displacements to integer)"""\n )\n', (1929, 2034), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((3905, 3924), 'numpy.round', 'np.round', (['(iso_1 / 2)'], {}), '(iso_1 / 2)\n', (3913, 3924), True, 'import numpy as np\n'), ((3949, 3968), 'numpy.round', 'np.round', (['(iso_2 / 2)'], {}), '(iso_2 / 2)\n', (3957, 3968), True, 'import numpy as np\n'), ((3993, 4012), 'numpy.round', 'np.round', (['(iso_3 / 2)'], {}), '(iso_3 / 2)\n', (4001, 4012), True, 'import numpy as np\n'), ((4916, 4937), 'numpy.round', 'np.round', (['(lateral / 2)'], {}), '(lateral / 2)\n', (4924, 4937), True, 'import numpy as np\n'), ((4958, 4980), 'numpy.round', 'np.round', (['(vertical / 2)'], {}), '(vertical / 2)\n', (4966, 4980), True, 'import numpy as np\n'), ((4999, 5019), 'numpy.round', 'np.round', (['(longit / 2)'], {}), '(longit / 2)\n', (5007, 5019), True, 'import numpy as np\n'), ((5089, 5144), 'numpy.round', 'np.round', (['(Globals.DVH_doseplan_lateral_displacement / 2)'], {}), '(Globals.DVH_doseplan_lateral_displacement / 2)\n', (5097, 5144), True, 'import numpy as np\n'), ((5189, 5245), 'numpy.round', 'np.round', (['(Globals.DVH_doseplan_vertical_displacement / 2)'], {}), '(Globals.DVH_doseplan_vertical_displacement / 2)\n', (5197, 5245), True, 'import numpy as np\n'), ((5294, 5354), 'numpy.round', 'np.round', (['(Globals.DVH_doseplan_longitudianl_displacement / 2)'], {}), '(Globals.DVH_doseplan_longitudianl_displacement / 2)\n', (5302, 5354), True, 'import numpy as np\n'), ((5446, 5465), 'numpy.round', 'np.round', (['(iso_1 / 3)'], {}), '(iso_1 / 3)\n', (5454, 5465), True, 'import numpy as np\n'), ((5490, 5509), 'numpy.round', 'np.round', (['(iso_2 / 3)'], {}), '(iso_2 / 3)\n', (5498, 5509), True, 'import numpy as np\n'), ((5534, 5553), 'numpy.round', 'np.round', (['(iso_3 / 3)'], {}), '(iso_3 / 3)\n', (5542, 5553), True, 'import numpy as np\n'), ((6457, 6478), 'numpy.round', 'np.round', (['(lateral / 3)'], {}), '(lateral / 3)\n', (6465, 6478), True, 'import numpy as np\n'), ((6499, 6521), 'numpy.round', 'np.round', (['(vertical / 3)'], {}), '(vertical / 3)\n', (6507, 6521), True, 'import numpy as np\n'), ((6540, 6560), 'numpy.round', 'np.round', (['(longit / 3)'], {}), '(longit / 3)\n', (6548, 6560), True, 'import numpy as np\n'), ((6630, 6685), 'numpy.round', 'np.round', (['(Globals.DVH_doseplan_lateral_displacement / 3)'], {}), '(Globals.DVH_doseplan_lateral_displacement / 3)\n', (6638, 6685), True, 'import numpy as np\n'), ((6730, 6786), 'numpy.round', 'np.round', (['(Globals.DVH_doseplan_vertical_displacement / 3)'], {}), '(Globals.DVH_doseplan_vertical_displacement / 3)\n', (6738, 6786), True, 'import numpy as np\n'), ((6835, 6895), 'numpy.round', 'np.round', (['(Globals.DVH_doseplan_longitudianl_displacement / 3)'], {}), '(Globals.DVH_doseplan_longitudianl_displacement / 3)\n', (6843, 6895), True, 'import numpy as np\n'), ((10493, 10527), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (10525, 10527), False, 'import Globals\n'), ((10571, 10630), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(1)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 1)\n', (10582, 10630), True, 'import numpy as np\n'), ((24937, 24996), 'cv2.resize', 'cv2.resize', (['img'], {'dsize': '(img.shape[1] * 5, img.shape[0] * 5)'}), '(img, dsize=(img.shape[1] * 5, img.shape[0] * 5))\n', (24947, 24996), False, 'import cv2\n'), ((29952, 29971), 'numpy.round', 'np.round', (['(iso_1 / 2)'], {}), '(iso_1 / 2)\n', (29960, 29971), True, 'import numpy as np\n'), ((30041, 30060), 'numpy.round', 'np.round', (['(iso_2 / 2)'], {}), '(iso_2 / 2)\n', (30049, 30060), True, 'import numpy as np\n'), ((30130, 30149), 'numpy.round', 'np.round', (['(iso_3 / 2)'], {}), '(iso_3 / 2)\n', (30138, 30149), True, 'import numpy as np\n'), ((31045, 31064), 'numpy.round', 'np.round', (['(iso_1 / 3)'], {}), '(iso_1 / 3)\n', (31053, 31064), True, 'import numpy as np\n'), ((31134, 31153), 'numpy.round', 'np.round', (['(iso_2 / 3)'], {}), '(iso_2 / 3)\n', (31142, 31153), True, 'import numpy as np\n'), ((31223, 31242), 'numpy.round', 'np.round', (['(iso_3 / 3)'], {}), '(iso_3 / 3)\n', (31231, 31242), True, 'import numpy as np\n'), ((32645, 32679), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (32677, 32679), False, 'import Globals\n'), ((32723, 32782), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(1)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 1)\n', (32734, 32782), True, 'import numpy as np\n'), ((40739, 40767), 'numpy.round', 'np.round', (['Globals.DVH_offset'], {}), '(Globals.DVH_offset)\n', (40747, 40767), True, 'import numpy as np\n'), ((46878, 46937), 'cv2.resize', 'cv2.resize', (['img'], {'dsize': '(img.shape[1] * 5, img.shape[0] * 5)'}), '(img, dsize=(img.shape[1] * 5, img.shape[0] * 5))\n', (46888, 46937), False, 'import cv2\n'), ((50238, 50300), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""The file must be a *.dcm file"""'], {}), "('Error', 'The file must be a *.dcm file')\n", (50258, 50300), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((50535, 50672), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Could not upload the doseplan correctly. Try again or another file.\n (Code: dose summation)"""'], {}), '(\'Error\',\n """Could not upload the doseplan correctly. Try again or another file.\n (Code: dose summation)"""\n )\n', (50555, 50672), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((52147, 52254), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Resolution of the doseplans must be equal. \n(Code: UploadDoseplan)"""'], {}), '(\'Error\',\n """Resolution of the doseplans must be equal. \n(Code: UploadDoseplan)""")\n', (52167, 52254), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((52370, 52489), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Dose grid scaling of the doseplans must be equal. \n(Code: UploadDoseplan)"""'], {}), '(\'Error\',\n """Dose grid scaling of the doseplans must be equal. \n(Code: UploadDoseplan)"""\n )\n', (52390, 52489), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((53214, 53228), 'os.path.normpath', 'normpath', (['file'], {}), '(file)\n', (53222, 53228), False, 'from os.path import normpath, basename\n'), ((58410, 58470), 'tkinter.messagebox.showinfo', 'messagebox.showinfo', (['"""INFO"""', '"""No doseplan has been uploaded"""'], {}), "('INFO', 'No doseplan has been uploaded')\n", (58429, 58470), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((63748, 63810), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""The file must be a *.dcm file"""'], {}), "('Error', 'The file must be a *.dcm file')\n", (63768, 63810), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((64273, 64420), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Could not read the RT plan file. Try again or try another file.\n (Code: isocenter reading)"""'], {}), '(\'Error\',\n """Could not read the RT plan file. Try again or try another file.\n (Code: isocenter reading)"""\n )\n', (64293, 64420), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((64575, 64733), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Could not read the RT plan file. Try again or try another file. \n (Code: vertical table displacement)"""'], {}), '(\'Error\',\n """Could not read the RT plan file. Try again or try another file. \n (Code: vertical table displacement)"""\n )\n', (64595, 64733), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((64871, 65027), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Could not read the RT plan file. Try again or try another file-\n (Code: lateral table displacement)"""'], {}), '(\'Error\',\n """Could not read the RT plan file. Try again or try another file-\n (Code: lateral table displacement)"""\n )\n', (64891, 65027), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((65175, 65335), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Could not read the RT plan file. Try again or try another file\n (Code: longitudinal table displacement)"""'], {}), '(\'Error\',\n """Could not read the RT plan file. Try again or try another file\n (Code: longitudinal table displacement)"""\n )\n', (65195, 65335), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((65496, 65641), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Could not read the RT plan file. Try again or try another file\n (Code: Patient position)"""'], {}), '(\'Error\',\n """Could not read the RT plan file. Try again or try another file\n (Code: Patient position)"""\n )\n', (65516, 65641), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((69654, 69709), 'Globals.DVH_iscoenter_coords.append', 'Globals.DVH_iscoenter_coords.append', (['[event.x, event.y]'], {}), '([event.x, event.y])\n', (69689, 69709), False, 'import Globals\n'), ((75259, 75319), 'Globals.DVH_done_button_reference_point.config', 'Globals.DVH_done_button_reference_point.config', ([], {'state': 'ACTIVE'}), '(state=ACTIVE)\n', (75305, 75319), False, 'import Globals\n'), ((81126, 81235), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Something has gone wrong. Check that the filename does not contain Æ,Ø,Å"""'], {}), "('Error',\n 'Something has gone wrong. Check that the filename does not contain Æ,Ø,Å')\n", (81146, 81235), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((81952, 82028), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""The uploaded image need to be in RGB-format"""'], {}), "('Error', 'The uploaded image need to be in RGB-format')\n", (81972, 82028), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((82131, 82190), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Dpi in image has to be 127"""'], {}), "('Error', 'Dpi in image has to be 127')\n", (82151, 82190), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((99572, 99619), 'Globals.DVH_slice_offset.config', 'Globals.DVH_slice_offset.config', ([], {'state': 'DISABLED'}), '(state=DISABLED)\n', (99603, 99619), False, 'import Globals\n'), ((102285, 102314), 'tkinter.Frame', 'tk.Frame', (['choose_batch_window'], {}), '(choose_batch_window)\n', (102293, 102314), True, 'import tkinter as tk\n'), ((108381, 108409), 'tkinter.Frame', 'tk.Frame', (['choose_batch_frame'], {}), '(choose_batch_frame)\n', (108389, 108409), True, 'import tkinter as tk\n'), ((108748, 108920), 'tkinter.Button', 'tk.Button', (['set_batch_button_frame'], {'text': '"""OK"""', 'image': 'Globals.done_button_image', 'cursor': '"""hand2"""', 'font': "('calibri', '14')", 'relief': 'FLAT', 'state': 'ACTIVE', 'command': 'set_batch'}), "(set_batch_button_frame, text='OK', image=Globals.\n done_button_image, cursor='hand2', font=('calibri', '14'), relief=FLAT,\n state=ACTIVE, command=set_batch)\n", (108757, 108920), True, 'import tkinter as tk\n'), ((109707, 109730), 'numpy.zeros', 'np.zeros', (['img_ROI.shape'], {}), '(img_ROI.shape)\n', (109715, 109730), True, 'import numpy as np\n'), ((109895, 109930), 'PIL.Image.fromarray', 'Image.fromarray', (['PIL_img_ROI', '"""RGB"""'], {}), "(PIL_img_ROI, 'RGB')\n", (109910, 109930), False, 'from PIL import Image, ImageTk\n'), ((110201, 110240), 'tkinter.Canvas', 'tk.Canvas', (['Globals.DVH_film_panedwindow'], {}), '(Globals.DVH_film_panedwindow)\n', (110210, 110240), True, 'import tkinter as tk\n'), ((110834, 110873), 'tkinter.Canvas', 'tk.Canvas', (['Globals.DVH_film_panedwindow'], {}), '(Globals.DVH_film_panedwindow)\n', (110843, 110873), True, 'import tkinter as tk\n'), ((111476, 111504), 'tkinter.Canvas', 'tk.Canvas', (['film_image_canvas'], {}), '(film_image_canvas)\n', (111485, 111504), True, 'import tkinter as tk\n'), ((111517, 111589), 'Globals.DVH_film_write_image.grid', 'Globals.DVH_film_write_image.grid', ([], {'row': '(0)', 'column': '(1)', 'sticky': '(N + S + W + E)'}), '(row=0, column=1, sticky=N + S + W + E)\n', (111550, 111589), False, 'import Globals\n'), ((111594, 111706), 'Globals.DVH_film_write_image.config', 'Globals.DVH_film_write_image.config', ([], {'bg': '"""#ffffff"""', 'relief': 'FLAT', 'highlightthickness': '(0)', 'width': 'wid', 'height': 'heig'}), "(bg='#ffffff', relief=FLAT,\n highlightthickness=0, width=wid, height=heig)\n", (111629, 111706), False, 'import Globals\n'), ((111764, 111791), 'tkinter.Canvas', 'tk.Canvas', (['film_dose_canvas'], {}), '(film_dose_canvas)\n', (111773, 111791), True, 'import tkinter as tk\n'), ((111804, 111881), 'Globals.DVH_film_dose_write_image.grid', 'Globals.DVH_film_dose_write_image.grid', ([], {'row': '(0)', 'column': '(1)', 'sticky': '(N + S + W + E)'}), '(row=0, column=1, sticky=N + S + W + E)\n', (111842, 111881), False, 'import Globals\n'), ((111886, 112003), 'Globals.DVH_film_dose_write_image.config', 'Globals.DVH_film_dose_write_image.config', ([], {'bg': '"""#ffffff"""', 'relief': 'FLAT', 'highlightthickness': '(0)', 'width': 'wid', 'height': 'heig'}), "(bg='#ffffff', relief=FLAT,\n highlightthickness=0, width=wid, height=heig)\n", (111926, 112003), False, 'import Globals\n'), ((112055, 112083), 'tkinter.Canvas', 'tk.Canvas', (['film_image_canvas'], {}), '(film_image_canvas)\n', (112064, 112083), True, 'import tkinter as tk\n'), ((112441, 112468), 'tkinter.Canvas', 'tk.Canvas', (['film_dose_canvas'], {}), '(film_dose_canvas)\n', (112450, 112468), True, 'import tkinter as tk\n'), ((112864, 112909), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', ([], {'image': 'scaled_image_visual'}), '(image=scaled_image_visual)\n', (112882, 112909), False, 'from PIL import Image, ImageTk\n'), ((112923, 112980), 'Globals.DVH_upload_button_doseplan.config', 'Globals.DVH_upload_button_doseplan.config', ([], {'state': 'DISABLED'}), '(state=DISABLED)\n', (112964, 112980), False, 'import Globals\n'), ((112993, 113046), 'Globals.DVH_upload_button_rtplan.config', 'Globals.DVH_upload_button_rtplan.config', ([], {'state': 'ACTIVE'}), '(state=ACTIVE)\n', (113032, 113046), False, 'import Globals\n'), ((113059, 113112), 'Globals.DVH_upload_button_film.config', 'Globals.DVH_upload_button_film.config', ([], {'state': 'DISABLED'}), '(state=DISABLED)\n', (113096, 113112), False, 'import Globals\n'), ((117178, 117240), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""The file must be a *.tif file"""'], {}), "('Error', 'The file must be a *.tif file')\n", (117198, 117240), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((2599, 2655), 'numpy.round', 'np.round', (['Globals.DVH_distance_reference_point_ROI[0][0]'], {}), '(Globals.DVH_distance_reference_point_ROI[0][0])\n', (2607, 2655), True, 'import numpy as np\n'), ((2670, 2726), 'numpy.round', 'np.round', (['Globals.DVH_distance_reference_point_ROI[0][1]'], {}), '(Globals.DVH_distance_reference_point_ROI[0][1])\n', (2678, 2726), True, 'import numpy as np\n'), ((2789, 2845), 'numpy.round', 'np.round', (['Globals.DVH_distance_reference_point_ROI[1][0]'], {}), '(Globals.DVH_distance_reference_point_ROI[1][0])\n', (2797, 2845), True, 'import numpy as np\n'), ((2860, 2916), 'numpy.round', 'np.round', (['Globals.DVH_distance_reference_point_ROI[1][1]'], {}), '(Globals.DVH_distance_reference_point_ROI[1][1])\n', (2868, 2916), True, 'import numpy as np\n'), ((2979, 3035), 'numpy.round', 'np.round', (['Globals.DVH_distance_reference_point_ROI[2][0]'], {}), '(Globals.DVH_distance_reference_point_ROI[2][0])\n', (2987, 3035), True, 'import numpy as np\n'), ((3050, 3106), 'numpy.round', 'np.round', (['Globals.DVH_distance_reference_point_ROI[2][1]'], {}), '(Globals.DVH_distance_reference_point_ROI[2][1])\n', (3058, 3106), True, 'import numpy as np\n'), ((3169, 3225), 'numpy.round', 'np.round', (['Globals.DVH_distance_reference_point_ROI[3][0]'], {}), '(Globals.DVH_distance_reference_point_ROI[3][0])\n', (3177, 3225), True, 'import numpy as np\n'), ((3240, 3296), 'numpy.round', 'np.round', (['Globals.DVH_distance_reference_point_ROI[3][1]'], {}), '(Globals.DVH_distance_reference_point_ROI[3][1])\n', (3248, 3296), True, 'import numpy as np\n'), ((10779, 10813), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (10811, 10813), False, 'import Globals\n'), ((10858, 10917), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(2)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 2)\n', (10869, 10917), True, 'import numpy as np\n'), ((11578, 11612), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (11610, 11612), False, 'import Globals\n'), ((25075, 25136), 'cv2.resize', 'cv2.resize', (['img'], {'dsize': '(img.shape[1] * 10, img.shape[0] * 10)'}), '(img, dsize=(img.shape[1] * 10, img.shape[0] * 10))\n', (25085, 25136), False, 'import cv2\n'), ((25164, 25225), 'cv2.resize', 'cv2.resize', (['img'], {'dsize': '(img.shape[1] * 15, img.shape[0] * 15)'}), '(img, dsize=(img.shape[1] * 15, img.shape[0] * 15))\n', (25174, 25225), False, 'import cv2\n'), ((26591, 26635), 'Globals.profiles_doseplan_text_image.width', 'Globals.profiles_doseplan_text_image.width', ([], {}), '()\n', (26633, 26635), False, 'import Globals\n'), ((26644, 26689), 'Globals.profiles_doseplan_text_image.height', 'Globals.profiles_doseplan_text_image.height', ([], {}), '()\n', (26687, 26689), False, 'import Globals\n'), ((27695, 27754), 'cv2.resize', 'cv2.resize', (['img'], {'dsize': '(img.shape[1] * 5, img.shape[0] * 5)'}), '(img, dsize=(img.shape[1] * 5, img.shape[0] * 5))\n', (27705, 27754), False, 'import cv2\n'), ((29208, 29258), 'numpy.round', 'np.round', (['Globals.DVH_distance_isocenter_ROI[0][0]'], {}), '(Globals.DVH_distance_isocenter_ROI[0][0])\n', (29216, 29258), True, 'import numpy as np\n'), ((29273, 29323), 'numpy.round', 'np.round', (['Globals.DVH_distance_isocenter_ROI[0][1]'], {}), '(Globals.DVH_distance_isocenter_ROI[0][1])\n', (29281, 29323), True, 'import numpy as np\n'), ((29386, 29436), 'numpy.round', 'np.round', (['Globals.DVH_distance_isocenter_ROI[1][0]'], {}), '(Globals.DVH_distance_isocenter_ROI[1][0])\n', (29394, 29436), True, 'import numpy as np\n'), ((29451, 29501), 'numpy.round', 'np.round', (['Globals.DVH_distance_isocenter_ROI[1][1]'], {}), '(Globals.DVH_distance_isocenter_ROI[1][1])\n', (29459, 29501), True, 'import numpy as np\n'), ((29564, 29614), 'numpy.round', 'np.round', (['Globals.DVH_distance_isocenter_ROI[2][0]'], {}), '(Globals.DVH_distance_isocenter_ROI[2][0])\n', (29572, 29614), True, 'import numpy as np\n'), ((29629, 29679), 'numpy.round', 'np.round', (['Globals.DVH_distance_isocenter_ROI[2][1]'], {}), '(Globals.DVH_distance_isocenter_ROI[2][1])\n', (29637, 29679), True, 'import numpy as np\n'), ((29742, 29792), 'numpy.round', 'np.round', (['Globals.DVH_distance_isocenter_ROI[3][0]'], {}), '(Globals.DVH_distance_isocenter_ROI[3][0])\n', (29750, 29792), True, 'import numpy as np\n'), ((29807, 29857), 'numpy.round', 'np.round', (['Globals.DVH_distance_isocenter_ROI[3][1]'], {}), '(Globals.DVH_distance_isocenter_ROI[3][1])\n', (29815, 29857), True, 'import numpy as np\n'), ((32931, 32965), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (32963, 32965), False, 'import Globals\n'), ((33010, 33069), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(2)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 2)\n', (33021, 33069), True, 'import numpy as np\n'), ((33697, 33731), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (33729, 33731), False, 'import Globals\n'), ((40923, 40955), 'numpy.round', 'np.round', (['(Globals.DVH_offset / 2)'], {}), '(Globals.DVH_offset / 2)\n', (40931, 40955), True, 'import numpy as np\n'), ((41057, 41089), 'numpy.round', 'np.round', (['(Globals.DVH_offset / 3)'], {}), '(Globals.DVH_offset / 3)\n', (41065, 41089), True, 'import numpy as np\n'), ((47016, 47077), 'cv2.resize', 'cv2.resize', (['img'], {'dsize': '(img.shape[1] * 10, img.shape[0] * 10)'}), '(img, dsize=(img.shape[1] * 10, img.shape[0] * 10))\n', (47026, 47077), False, 'import cv2\n'), ((47105, 47166), 'cv2.resize', 'cv2.resize', (['img'], {'dsize': '(img.shape[1] * 15, img.shape[0] * 15)'}), '(img, dsize=(img.shape[1] * 15, img.shape[0] * 15))\n', (47115, 47166), False, 'import cv2\n'), ((48588, 48632), 'Globals.profiles_doseplan_text_image.width', 'Globals.profiles_doseplan_text_image.width', ([], {}), '()\n', (48630, 48632), False, 'import Globals\n'), ((48641, 48686), 'Globals.profiles_doseplan_text_image.height', 'Globals.profiles_doseplan_text_image.height', ([], {}), '()\n', (48684, 48686), False, 'import Globals\n'), ((49688, 49747), 'cv2.resize', 'cv2.resize', (['img'], {'dsize': '(img.shape[1] * 5, img.shape[0] * 5)'}), '(img, dsize=(img.shape[1] * 5, img.shape[0] * 5))\n', (49698, 49747), False, 'import cv2\n'), ((50107, 50129), 'os.path.splitext', 'os.path.splitext', (['file'], {}), '(file)\n', (50123, 50129), False, 'import os\n'), ((52969, 53067), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Something went wrong. Try again.\n (Code: processDoseplan)"""'], {}), '(\'Error\',\n """Something went wrong. Try again.\n (Code: processDoseplan)""")\n', (52989, 53067), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((60750, 60794), 'Globals.profiles_doseplan_text_image.width', 'Globals.profiles_doseplan_text_image.width', ([], {}), '()\n', (60792, 60794), False, 'import Globals\n'), ((60803, 60848), 'Globals.profiles_doseplan_text_image.height', 'Globals.profiles_doseplan_text_image.height', ([], {}), '()\n', (60846, 60848), False, 'import Globals\n'), ((63617, 63639), 'os.path.splitext', 'os.path.splitext', (['file'], {}), '(file)\n', (63633, 63639), False, 'import os\n'), ((69838, 69893), 'Globals.DVH_iscoenter_coords.append', 'Globals.DVH_iscoenter_coords.append', (['[event.x, event.y]'], {}), '([event.x, event.y])\n', (69873, 69893), False, 'import Globals\n'), ((70740, 70800), 'Globals.DVH_mark_isocenter_up_down_line.append', 'Globals.DVH_mark_isocenter_up_down_line.append', (['up_down_line'], {}), '(up_down_line)\n', (70786, 70800), False, 'import Globals\n'), ((70813, 70879), 'Globals.DVH_mark_isocenter_right_left_line.append', 'Globals.DVH_mark_isocenter_right_left_line.append', (['right_left_line'], {}), '(right_left_line)\n', (70862, 70879), False, 'import Globals\n'), ((70892, 70936), 'Globals.DVH_mark_isocenter_oval.append', 'Globals.DVH_mark_isocenter_oval.append', (['oval'], {}), '(oval)\n', (70930, 70936), False, 'import Globals\n'), ((79624, 79684), 'Globals.DVH_done_button_reference_point.config', 'Globals.DVH_done_button_reference_point.config', ([], {'state': 'ACTIVE'}), '(state=ACTIVE)\n', (79670, 79684), False, 'import Globals\n'), ((79801, 79845), 'Globals.DVH_done_button.config', 'Globals.DVH_done_button.config', ([], {'state': 'ACTIVE'}), '(state=ACTIVE)\n', (79831, 79845), False, 'import Globals\n'), ((80460, 80510), 'Globals.DVH_film_factor_input.get', 'Globals.DVH_film_factor_input.get', (['"""1.0"""', '"""end-1c"""'], {}), "('1.0', 'end-1c')\n", (80493, 80510), False, 'import Globals\n'), ((80540, 80641), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Missing parameter"""', '"""Film factor invalid format. \n (Code: UploadFilm)"""'], {}), '(\'Missing parameter\',\n """Film factor invalid format. \n (Code: UploadFilm)""")\n', (80560, 80641), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((80705, 80727), 'os.path.splitext', 'os.path.splitext', (['file'], {}), '(file)\n', (80721, 80727), False, 'import os\n'), ((80983, 80997), 'os.path.normpath', 'normpath', (['file'], {}), '(file)\n', (80991, 80997), False, 'from os.path import normpath, basename\n'), ((81440, 81465), 'numpy.clip', 'np.clip', (['cv2Img', '(0)', '(65535)'], {}), '(cv2Img, 0, 65535)\n', (81447, 81465), True, 'import numpy as np\n'), ((81491, 81510), 'cv2.flip', 'cv2.flip', (['cv2Img', '(1)'], {}), '(cv2Img, 1)\n', (81499, 81510), False, 'import cv2\n'), ((81608, 81644), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', ([], {'image': 'img_scaled'}), '(image=img_scaled)\n', (81626, 81644), False, 'from PIL import Image, ImageTk\n'), ((81816, 81907), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""The resolution of the image is not consistent with dpi"""'], {}), "('Error',\n 'The resolution of the image is not consistent with dpi')\n", (81836, 81907), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((101224, 101286), 'Globals.DVH_input_vertical_displacement.config', 'Globals.DVH_input_vertical_displacement.config', ([], {'state': 'DISABLED'}), '(state=DISABLED)\n', (101270, 101286), False, 'import Globals\n'), ((101303, 101369), 'Globals.DVH_input_longitudinal_displacement.config', 'Globals.DVH_input_longitudinal_displacement.config', ([], {'state': 'DISABLED'}), '(state=DISABLED)\n', (101353, 101369), False, 'import Globals\n'), ((101386, 101447), 'Globals.DVH_input_lateral_displacement.config', 'Globals.DVH_input_lateral_displacement.config', ([], {'state': 'DISABLED'}), '(state=DISABLED)\n', (101431, 101447), False, 'import Globals\n'), ((102011, 102054), 'tkinter.Toplevel', 'tk.Toplevel', (['new_window_reference_point_tab'], {}), '(new_window_reference_point_tab)\n', (102022, 102054), True, 'import tkinter as tk\n'), ((102111, 102148), 'tkinter.Toplevel', 'tk.Toplevel', (['new_window_isocenter_tab'], {}), '(new_window_isocenter_tab)\n', (102122, 102148), True, 'import tkinter as tk\n'), ((102814, 102861), 'tkinter.Text', 'tk.Text', (['choose_batch_frame'], {'width': '(10)', 'height': '(1)'}), '(choose_batch_frame, width=10, height=1)\n', (102821, 102861), True, 'import tkinter as tk\n'), ((103332, 103379), 'tkinter.Text', 'tk.Text', (['choose_batch_frame'], {'width': '(20)', 'height': '(1)'}), '(choose_batch_frame, width=20, height=1)\n', (103339, 103379), True, 'import tkinter as tk\n'), ((103833, 103879), 'tkinter.Text', 'tk.Text', (['choose_batch_frame'], {'width': '(8)', 'height': '(1)'}), '(choose_batch_frame, width=8, height=1)\n', (103840, 103879), True, 'import tkinter as tk\n'), ((104350, 104397), 'tkinter.Text', 'tk.Text', (['choose_batch_frame'], {'width': '(30)', 'height': '(1)'}), '(choose_batch_frame, width=30, height=1)\n', (104357, 104397), True, 'import tkinter as tk\n'), ((105813, 105930), 'numpy.zeros', 'np.zeros', (['(Globals.DVH_film_dataset_ROI_red_channel.shape[0], Globals.\n DVH_film_dataset_ROI_red_channel.shape[1])'], {}), '((Globals.DVH_film_dataset_ROI_red_channel.shape[0], Globals.\n DVH_film_dataset_ROI_red_channel.shape[1]))\n', (105821, 105930), True, 'import numpy as np\n'), ((106485, 106594), 'numpy.zeros', 'np.zeros', (['(Globals.DVH_film_dataset_red_channel.shape[0], Globals.\n DVH_film_dataset_red_channel.shape[1])'], {}), '((Globals.DVH_film_dataset_red_channel.shape[0], Globals.\n DVH_film_dataset_red_channel.shape[1]))\n', (106493, 106594), True, 'import numpy as np\n'), ((107089, 107180), 'Globals.DVH_film_write_image.create_image', 'Globals.DVH_film_write_image.create_image', (['(0)', '(0)'], {'image': 'scaled_image_visual', 'anchor': '"""nw"""'}), "(0, 0, image=scaled_image_visual,\n anchor='nw')\n", (107130, 107180), False, 'import Globals\n'), ((107273, 107326), 'numpy.max', 'np.max', (['Globals.DVH_film_dataset_ROI_red_channel_dose'], {}), '(Globals.DVH_film_dataset_ROI_red_channel_dose)\n', (107279, 107326), True, 'import numpy as np\n'), ((107623, 107661), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', ([], {'image': 'PIL_img_film'}), '(image=PIL_img_film)\n', (107641, 107661), False, 'from PIL import Image, ImageTk\n'), ((107678, 107780), 'Globals.DVH_film_dose_write_image.create_image', 'Globals.DVH_film_dose_write_image.create_image', (['(0)', '(0)'], {'image': 'scaled_image_visual_film', 'anchor': '"""nw"""'}), "(0, 0, image=\n scaled_image_visual_film, anchor='nw')\n", (107724, 107780), False, 'import Globals\n'), ((113348, 113559), 'Globals.DVH_distance_reference_point_ROI.append', 'Globals.DVH_distance_reference_point_ROI.append', (['[(Globals.DVH_film_reference_point[0] - Globals.DVH_ROI_coords[0][0]) * 0.2,\n (Globals.DVH_film_reference_point[1] - Globals.DVH_ROI_coords[0][1]) * 0.2]'], {}), '([(Globals.\n DVH_film_reference_point[0] - Globals.DVH_ROI_coords[0][0]) * 0.2, (\n Globals.DVH_film_reference_point[1] - Globals.DVH_ROI_coords[0][1]) * 0.2])\n', (113395, 113559), False, 'import Globals\n'), ((113581, 113792), 'Globals.DVH_distance_reference_point_ROI.append', 'Globals.DVH_distance_reference_point_ROI.append', (['[(Globals.DVH_film_reference_point[0] - Globals.DVH_ROI_coords[1][0]) * 0.2,\n (Globals.DVH_film_reference_point[1] - Globals.DVH_ROI_coords[1][1]) * 0.2]'], {}), '([(Globals.\n DVH_film_reference_point[0] - Globals.DVH_ROI_coords[1][0]) * 0.2, (\n Globals.DVH_film_reference_point[1] - Globals.DVH_ROI_coords[1][1]) * 0.2])\n', (113628, 113792), False, 'import Globals\n'), ((113816, 114027), 'Globals.DVH_distance_reference_point_ROI.append', 'Globals.DVH_distance_reference_point_ROI.append', (['[(Globals.DVH_film_reference_point[0] - Globals.DVH_ROI_coords[2][0]) * 0.2,\n (Globals.DVH_film_reference_point[1] - Globals.DVH_ROI_coords[2][1]) * 0.2]'], {}), '([(Globals.\n DVH_film_reference_point[0] - Globals.DVH_ROI_coords[2][0]) * 0.2, (\n Globals.DVH_film_reference_point[1] - Globals.DVH_ROI_coords[2][1]) * 0.2])\n', (113863, 114027), False, 'import Globals\n'), ((114051, 114262), 'Globals.DVH_distance_reference_point_ROI.append', 'Globals.DVH_distance_reference_point_ROI.append', (['[(Globals.DVH_film_reference_point[0] - Globals.DVH_ROI_coords[3][0]) * 0.2,\n (Globals.DVH_film_reference_point[1] - Globals.DVH_ROI_coords[3][1]) * 0.2]'], {}), '([(Globals.\n DVH_film_reference_point[0] - Globals.DVH_ROI_coords[3][0]) * 0.2, (\n Globals.DVH_film_reference_point[1] - Globals.DVH_ROI_coords[3][1]) * 0.2])\n', (114098, 114262), False, 'import Globals\n'), ((114392, 114583), 'Globals.DVH_distance_isocenter_ROI.append', 'Globals.DVH_distance_isocenter_ROI.append', (['[(Globals.DVH_film_isocenter[0] - Globals.DVH_ROI_coords[0][0]) * 0.2, (\n Globals.DVH_film_isocenter[1] - Globals.DVH_ROI_coords[0][1]) * 0.2]'], {}), '([(Globals.DVH_film_isocenter[0] -\n Globals.DVH_ROI_coords[0][0]) * 0.2, (Globals.DVH_film_isocenter[1] -\n Globals.DVH_ROI_coords[0][1]) * 0.2])\n', (114433, 114583), False, 'import Globals\n'), ((114607, 114798), 'Globals.DVH_distance_isocenter_ROI.append', 'Globals.DVH_distance_isocenter_ROI.append', (['[(Globals.DVH_film_isocenter[0] - Globals.DVH_ROI_coords[1][0]) * 0.2, (\n Globals.DVH_film_isocenter[1] - Globals.DVH_ROI_coords[1][1]) * 0.2]'], {}), '([(Globals.DVH_film_isocenter[0] -\n Globals.DVH_ROI_coords[1][0]) * 0.2, (Globals.DVH_film_isocenter[1] -\n Globals.DVH_ROI_coords[1][1]) * 0.2])\n', (114648, 114798), False, 'import Globals\n'), ((114824, 115015), 'Globals.DVH_distance_isocenter_ROI.append', 'Globals.DVH_distance_isocenter_ROI.append', (['[(Globals.DVH_film_isocenter[0] - Globals.DVH_ROI_coords[2][0]) * 0.2, (\n Globals.DVH_film_isocenter[1] - Globals.DVH_ROI_coords[2][1]) * 0.2]'], {}), '([(Globals.DVH_film_isocenter[0] -\n Globals.DVH_ROI_coords[2][0]) * 0.2, (Globals.DVH_film_isocenter[1] -\n Globals.DVH_ROI_coords[2][1]) * 0.2])\n', (114865, 115015), False, 'import Globals\n'), ((115041, 115232), 'Globals.DVH_distance_isocenter_ROI.append', 'Globals.DVH_distance_isocenter_ROI.append', (['[(Globals.DVH_film_isocenter[0] - Globals.DVH_ROI_coords[3][0]) * 0.2, (\n Globals.DVH_film_isocenter[1] - Globals.DVH_ROI_coords[3][1]) * 0.2]'], {}), '([(Globals.DVH_film_isocenter[0] -\n Globals.DVH_ROI_coords[3][0]) * 0.2, (Globals.DVH_film_isocenter[1] -\n Globals.DVH_ROI_coords[3][1]) * 0.2])\n', (115082, 115232), False, 'import Globals\n'), ((4141, 4201), 'numpy.round', 'np.round', (['(Globals.DVH_distance_reference_point_ROI[0][0] / 2)'], {}), '(Globals.DVH_distance_reference_point_ROI[0][0] / 2)\n', (4149, 4201), True, 'import numpy as np\n'), ((4216, 4276), 'numpy.round', 'np.round', (['(Globals.DVH_distance_reference_point_ROI[0][1] / 2)'], {}), '(Globals.DVH_distance_reference_point_ROI[0][1] / 2)\n', (4224, 4276), True, 'import numpy as np\n'), ((4339, 4399), 'numpy.round', 'np.round', (['(Globals.DVH_distance_reference_point_ROI[1][0] / 2)'], {}), '(Globals.DVH_distance_reference_point_ROI[1][0] / 2)\n', (4347, 4399), True, 'import numpy as np\n'), ((4414, 4474), 'numpy.round', 'np.round', (['(Globals.DVH_distance_reference_point_ROI[1][1] / 2)'], {}), '(Globals.DVH_distance_reference_point_ROI[1][1] / 2)\n', (4422, 4474), True, 'import numpy as np\n'), ((4537, 4597), 'numpy.round', 'np.round', (['(Globals.DVH_distance_reference_point_ROI[2][0] / 2)'], {}), '(Globals.DVH_distance_reference_point_ROI[2][0] / 2)\n', (4545, 4597), True, 'import numpy as np\n'), ((4612, 4672), 'numpy.round', 'np.round', (['(Globals.DVH_distance_reference_point_ROI[2][1] / 2)'], {}), '(Globals.DVH_distance_reference_point_ROI[2][1] / 2)\n', (4620, 4672), True, 'import numpy as np\n'), ((4735, 4795), 'numpy.round', 'np.round', (['(Globals.DVH_distance_reference_point_ROI[3][0] / 2)'], {}), '(Globals.DVH_distance_reference_point_ROI[3][0] / 2)\n', (4743, 4795), True, 'import numpy as np\n'), ((4810, 4870), 'numpy.round', 'np.round', (['(Globals.DVH_distance_reference_point_ROI[3][1] / 2)'], {}), '(Globals.DVH_distance_reference_point_ROI[3][1] / 2)\n', (4818, 4870), True, 'import numpy as np\n'), ((5682, 5742), 'numpy.round', 'np.round', (['(Globals.DVH_distance_reference_point_ROI[0][0] / 3)'], {}), '(Globals.DVH_distance_reference_point_ROI[0][0] / 3)\n', (5690, 5742), True, 'import numpy as np\n'), ((5757, 5817), 'numpy.round', 'np.round', (['(Globals.DVH_distance_reference_point_ROI[0][1] / 3)'], {}), '(Globals.DVH_distance_reference_point_ROI[0][1] / 3)\n', (5765, 5817), True, 'import numpy as np\n'), ((5880, 5940), 'numpy.round', 'np.round', (['(Globals.DVH_distance_reference_point_ROI[1][0] / 3)'], {}), '(Globals.DVH_distance_reference_point_ROI[1][0] / 3)\n', (5888, 5940), True, 'import numpy as np\n'), ((5955, 6015), 'numpy.round', 'np.round', (['(Globals.DVH_distance_reference_point_ROI[1][1] / 3)'], {}), '(Globals.DVH_distance_reference_point_ROI[1][1] / 3)\n', (5963, 6015), True, 'import numpy as np\n'), ((6078, 6138), 'numpy.round', 'np.round', (['(Globals.DVH_distance_reference_point_ROI[2][0] / 3)'], {}), '(Globals.DVH_distance_reference_point_ROI[2][0] / 3)\n', (6086, 6138), True, 'import numpy as np\n'), ((6153, 6213), 'numpy.round', 'np.round', (['(Globals.DVH_distance_reference_point_ROI[2][1] / 3)'], {}), '(Globals.DVH_distance_reference_point_ROI[2][1] / 3)\n', (6161, 6213), True, 'import numpy as np\n'), ((6276, 6336), 'numpy.round', 'np.round', (['(Globals.DVH_distance_reference_point_ROI[3][0] / 3)'], {}), '(Globals.DVH_distance_reference_point_ROI[3][0] / 3)\n', (6284, 6336), True, 'import numpy as np\n'), ((6351, 6411), 'numpy.round', 'np.round', (['(Globals.DVH_distance_reference_point_ROI[3][1] / 3)'], {}), '(Globals.DVH_distance_reference_point_ROI[3][1] / 3)\n', (6359, 6411), True, 'import numpy as np\n'), ((11066, 11100), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (11098, 11100), False, 'import Globals\n'), ((11209, 11272), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Something has gone wrong here."""'], {}), "('Error', 'Something has gone wrong here.')\n", (11229, 11272), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((11710, 11744), 'Globals.DCH_film_orientation.get', 'Globals.DCH_film_orientation.get', ([], {}), '()\n', (11742, 11744), False, 'import Globals\n'), ((11789, 11848), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(2)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 2)\n', (11800, 11848), True, 'import numpy as np\n'), ((12014, 12048), 'numpy.swapaxes', 'np.swapaxes', (['dataset_swapped', '(1)', '(2)'], {}), '(dataset_swapped, 1, 2)\n', (12025, 12048), True, 'import numpy as np\n'), ((12858, 12892), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (12890, 12892), False, 'import Globals\n'), ((12936, 12995), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(2)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 2)\n', (12947, 12995), True, 'import numpy as np\n'), ((13161, 13195), 'numpy.swapaxes', 'np.swapaxes', (['dataset_swapped', '(1)', '(2)'], {}), '(dataset_swapped, 1, 2)\n', (13172, 13195), True, 'import numpy as np\n'), ((25399, 25414), 'matplotlib.cm.viridis', 'cm.viridis', (['img'], {}), '(img)\n', (25409, 25414), False, 'from matplotlib import cm\n'), ((25715, 25760), 'Globals.profiles_doseplan_text_image.height', 'Globals.profiles_doseplan_text_image.height', ([], {}), '()\n', (25758, 25760), False, 'import Globals\n'), ((25793, 25837), 'Globals.profiles_doseplan_text_image.width', 'Globals.profiles_doseplan_text_image.width', ([], {}), '()\n', (25835, 25837), False, 'import Globals\n'), ((25950, 25995), 'Globals.profiles_doseplan_text_image.height', 'Globals.profiles_doseplan_text_image.height', ([], {}), '()\n', (25993, 25995), False, 'import Globals\n'), ((26028, 26072), 'Globals.profiles_doseplan_text_image.width', 'Globals.profiles_doseplan_text_image.width', ([], {}), '()\n', (26070, 26072), False, 'import Globals\n'), ((27859, 27920), 'cv2.resize', 'cv2.resize', (['img'], {'dsize': '(img.shape[1] * 10, img.shape[0] * 10)'}), '(img, dsize=(img.shape[1] * 10, img.shape[0] * 10))\n', (27869, 27920), False, 'import cv2\n'), ((27974, 28035), 'cv2.resize', 'cv2.resize', (['img'], {'dsize': '(img.shape[1] * 15, img.shape[0] * 15)'}), '(img, dsize=(img.shape[1] * 15, img.shape[0] * 15))\n', (27984, 28035), False, 'import cv2\n'), ((30324, 30378), 'numpy.round', 'np.round', (['(Globals.DVH_distance_isocenter_ROI[0][0] / 2)'], {}), '(Globals.DVH_distance_isocenter_ROI[0][0] / 2)\n', (30332, 30378), True, 'import numpy as np\n'), ((30393, 30447), 'numpy.round', 'np.round', (['(Globals.DVH_distance_isocenter_ROI[0][1] / 2)'], {}), '(Globals.DVH_distance_isocenter_ROI[0][1] / 2)\n', (30401, 30447), True, 'import numpy as np\n'), ((30510, 30564), 'numpy.round', 'np.round', (['(Globals.DVH_distance_isocenter_ROI[1][0] / 2)'], {}), '(Globals.DVH_distance_isocenter_ROI[1][0] / 2)\n', (30518, 30564), True, 'import numpy as np\n'), ((30579, 30633), 'numpy.round', 'np.round', (['(Globals.DVH_distance_isocenter_ROI[1][1] / 2)'], {}), '(Globals.DVH_distance_isocenter_ROI[1][1] / 2)\n', (30587, 30633), True, 'import numpy as np\n'), ((30696, 30750), 'numpy.round', 'np.round', (['(Globals.DVH_distance_isocenter_ROI[2][0] / 2)'], {}), '(Globals.DVH_distance_isocenter_ROI[2][0] / 2)\n', (30704, 30750), True, 'import numpy as np\n'), ((30765, 30819), 'numpy.round', 'np.round', (['(Globals.DVH_distance_isocenter_ROI[2][1] / 2)'], {}), '(Globals.DVH_distance_isocenter_ROI[2][1] / 2)\n', (30773, 30819), True, 'import numpy as np\n'), ((30882, 30936), 'numpy.round', 'np.round', (['(Globals.DVH_distance_isocenter_ROI[3][0] / 2)'], {}), '(Globals.DVH_distance_isocenter_ROI[3][0] / 2)\n', (30890, 30936), True, 'import numpy as np\n'), ((30951, 31005), 'numpy.round', 'np.round', (['(Globals.DVH_distance_isocenter_ROI[3][1] / 2)'], {}), '(Globals.DVH_distance_isocenter_ROI[3][1] / 2)\n', (30959, 31005), True, 'import numpy as np\n'), ((31409, 31463), 'numpy.round', 'np.round', (['(Globals.DVH_distance_isocenter_ROI[0][0] / 3)'], {}), '(Globals.DVH_distance_isocenter_ROI[0][0] / 3)\n', (31417, 31463), True, 'import numpy as np\n'), ((31478, 31532), 'numpy.round', 'np.round', (['(Globals.DVH_distance_isocenter_ROI[0][1] / 3)'], {}), '(Globals.DVH_distance_isocenter_ROI[0][1] / 3)\n', (31486, 31532), True, 'import numpy as np\n'), ((31595, 31649), 'numpy.round', 'np.round', (['(Globals.DVH_distance_isocenter_ROI[1][0] / 3)'], {}), '(Globals.DVH_distance_isocenter_ROI[1][0] / 3)\n', (31603, 31649), True, 'import numpy as np\n'), ((31664, 31718), 'numpy.round', 'np.round', (['(Globals.DVH_distance_isocenter_ROI[1][1] / 3)'], {}), '(Globals.DVH_distance_isocenter_ROI[1][1] / 3)\n', (31672, 31718), True, 'import numpy as np\n'), ((31781, 31835), 'numpy.round', 'np.round', (['(Globals.DVH_distance_isocenter_ROI[2][0] / 3)'], {}), '(Globals.DVH_distance_isocenter_ROI[2][0] / 3)\n', (31789, 31835), True, 'import numpy as np\n'), ((31850, 31904), 'numpy.round', 'np.round', (['(Globals.DVH_distance_isocenter_ROI[2][1] / 3)'], {}), '(Globals.DVH_distance_isocenter_ROI[2][1] / 3)\n', (31858, 31904), True, 'import numpy as np\n'), ((31967, 32021), 'numpy.round', 'np.round', (['(Globals.DVH_distance_isocenter_ROI[3][0] / 3)'], {}), '(Globals.DVH_distance_isocenter_ROI[3][0] / 3)\n', (31975, 32021), True, 'import numpy as np\n'), ((32036, 32090), 'numpy.round', 'np.round', (['(Globals.DVH_distance_isocenter_ROI[3][1] / 3)'], {}), '(Globals.DVH_distance_isocenter_ROI[3][1] / 3)\n', (32044, 32090), True, 'import numpy as np\n'), ((33218, 33252), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (33250, 33252), False, 'import Globals\n'), ((33361, 33424), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Something has gone wrong here."""'], {}), "('Error', 'Something has gone wrong here.')\n", (33381, 33424), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((33829, 33863), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (33861, 33863), False, 'import Globals\n'), ((33908, 33967), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(2)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 2)\n', (33919, 33967), True, 'import numpy as np\n'), ((34133, 34167), 'numpy.swapaxes', 'np.swapaxes', (['dataset_swapped', '(1)', '(2)'], {}), '(dataset_swapped, 1, 2)\n', (34144, 34167), True, 'import numpy as np\n'), ((34944, 34978), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (34976, 34978), False, 'import Globals\n'), ((35022, 35081), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(2)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 2)\n', (35033, 35081), True, 'import numpy as np\n'), ((35247, 35281), 'numpy.swapaxes', 'np.swapaxes', (['dataset_swapped', '(1)', '(2)'], {}), '(dataset_swapped, 1, 2)\n', (35258, 35281), True, 'import numpy as np\n'), ((47396, 47411), 'matplotlib.cm.viridis', 'cm.viridis', (['img'], {}), '(img)\n', (47406, 47411), False, 'from matplotlib import cm\n'), ((47712, 47757), 'Globals.profiles_doseplan_text_image.height', 'Globals.profiles_doseplan_text_image.height', ([], {}), '()\n', (47755, 47757), False, 'import Globals\n'), ((47790, 47834), 'Globals.profiles_doseplan_text_image.width', 'Globals.profiles_doseplan_text_image.width', ([], {}), '()\n', (47832, 47834), False, 'import Globals\n'), ((47947, 47992), 'Globals.profiles_doseplan_text_image.height', 'Globals.profiles_doseplan_text_image.height', ([], {}), '()\n', (47990, 47992), False, 'import Globals\n'), ((48025, 48069), 'Globals.profiles_doseplan_text_image.width', 'Globals.profiles_doseplan_text_image.width', ([], {}), '()\n', (48067, 48069), False, 'import Globals\n'), ((49852, 49913), 'cv2.resize', 'cv2.resize', (['img'], {'dsize': '(img.shape[1] * 10, img.shape[0] * 10)'}), '(img, dsize=(img.shape[1] * 10, img.shape[0] * 10))\n', (49862, 49913), False, 'import cv2\n'), ((49967, 50028), 'cv2.resize', 'cv2.resize', (['img'], {'dsize': '(img.shape[1] * 15, img.shape[0] * 15)'}), '(img, dsize=(img.shape[1] * 15, img.shape[0] * 15))\n', (49977, 50028), False, 'import cv2\n'), ((59554, 59573), 'matplotlib.cm.viridis', 'cm.viridis', (['img_ROI'], {}), '(img_ROI)\n', (59564, 59573), False, 'from matplotlib import cm\n'), ((59874, 59919), 'Globals.profiles_doseplan_text_image.height', 'Globals.profiles_doseplan_text_image.height', ([], {}), '()\n', (59917, 59919), False, 'import Globals\n'), ((59952, 59996), 'Globals.profiles_doseplan_text_image.width', 'Globals.profiles_doseplan_text_image.width', ([], {}), '()\n', (59994, 59996), False, 'import Globals\n'), ((60109, 60154), 'Globals.profiles_doseplan_text_image.height', 'Globals.profiles_doseplan_text_image.height', ([], {}), '()\n', (60152, 60154), False, 'import Globals\n'), ((60187, 60231), 'Globals.profiles_doseplan_text_image.width', 'Globals.profiles_doseplan_text_image.width', ([], {}), '()\n', (60229, 60231), False, 'import Globals\n'), ((71126, 71170), 'Globals.DVH_done_button.config', 'Globals.DVH_done_button.config', ([], {'state': 'ACTIVE'}), '(state=ACTIVE)\n', (71156, 71170), False, 'import Globals\n'), ((112287, 112337), 'Globals.profiles_scanned_image_text_image.height', 'Globals.profiles_scanned_image_text_image.height', ([], {}), '()\n', (112335, 112337), False, 'import Globals\n'), ((112345, 112394), 'Globals.profiles_scanned_image_text_image.width', 'Globals.profiles_scanned_image_text_image.width', ([], {}), '()\n', (112392, 112394), False, 'import Globals\n'), ((112674, 112724), 'Globals.profiles_film_dose_map_text_image.height', 'Globals.profiles_film_dose_map_text_image.height', ([], {}), '()\n', (112722, 112724), False, 'import Globals\n'), ((112732, 112781), 'Globals.profiles_film_dose_map_text_image.width', 'Globals.profiles_film_dose_map_text_image.width', ([], {}), '()\n', (112779, 112781), False, 'import Globals\n'), ((12197, 12231), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (12229, 12231), False, 'import Globals\n'), ((12273, 12332), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(1)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 1)\n', (12284, 12332), True, 'import numpy as np\n'), ((12494, 12552), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Something has gone wrong."""'], {}), "('Error', 'Something has gone wrong.')\n", (12514, 12552), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((13344, 13378), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (13376, 13378), False, 'import Globals\n'), ((13423, 13482), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(1)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 1)\n', (13434, 13482), True, 'import numpy as np\n'), ((13648, 13682), 'numpy.swapaxes', 'np.swapaxes', (['dataset_swapped', '(1)', '(2)'], {}), '(dataset_swapped, 1, 2)\n', (13659, 13682), True, 'import numpy as np\n'), ((14492, 14526), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (14524, 14526), False, 'import Globals\n'), ((14570, 14629), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(2)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 2)\n', (14581, 14629), True, 'import numpy as np\n'), ((34316, 34350), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (34348, 34350), False, 'import Globals\n'), ((34392, 34451), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(1)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 1)\n', (34403, 34451), True, 'import numpy as np\n'), ((34613, 34671), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Something has gone wrong."""'], {}), "('Error', 'Something has gone wrong.')\n", (34633, 34671), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((35430, 35464), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (35462, 35464), False, 'import Globals\n'), ((35509, 35568), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(1)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 1)\n', (35520, 35568), True, 'import numpy as np\n'), ((35734, 35768), 'numpy.swapaxes', 'np.swapaxes', (['dataset_swapped', '(1)', '(2)'], {}), '(dataset_swapped, 1, 2)\n', (35745, 35768), True, 'import numpy as np\n'), ((36545, 36579), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (36577, 36579), False, 'import Globals\n'), ((36623, 36682), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(2)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 2)\n', (36634, 36682), True, 'import numpy as np\n'), ((58848, 58950), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Invalid factor. Must be number.\n (Code: closeUploadDoseplans)"""'], {}), '(\'Error\',\n """Invalid factor. Must be number.\n (Code: closeUploadDoseplans)""")\n', (58868, 58950), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((99669, 99728), 'Globals.DVH_input_lateral_displacement.get', 'Globals.DVH_input_lateral_displacement.get', (['"""1.0"""', '"""end-1c"""'], {}), "('1.0', 'end-1c')\n", (99711, 99728), False, 'import Globals\n'), ((100179, 100243), 'Globals.DVH_input_longitudinal_displacement.get', 'Globals.DVH_input_longitudinal_displacement.get', (['"""1.0"""', '"""end-1c"""'], {}), "('1.0', 'end-1c')\n", (100226, 100243), False, 'import Globals\n'), ((100715, 100775), 'Globals.DVH_input_vertical_displacement.get', 'Globals.DVH_input_vertical_displacement.get', (['"""1.0"""', '"""end-1c"""'], {}), "('1.0', 'end-1c')\n", (100758, 100775), False, 'import Globals\n'), ((101489, 101534), 'Globals.DVH_slice_offset.get', 'Globals.DVH_slice_offset.get', (['"""1.0"""', '"""end-1c"""'], {}), "('1.0', 'end-1c')\n", (101517, 101534), False, 'import Globals\n'), ((104853, 105002), 'tkinter.Radiobutton', 'Radiobutton', (['choose_batch_frame'], {'text': '""""""', 'bg': '"""#ffffff"""', 'cursor': '"""hand2"""', 'font': "('calibri', '14')", 'variable': 'Globals.DVH_film_batch', 'value': 'batch_cnt'}), "(choose_batch_frame, text='', bg='#ffffff', cursor='hand2', font\n =('calibri', '14'), variable=Globals.DVH_film_batch, value=batch_cnt)\n", (104864, 105002), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((110406, 110456), 'Globals.profiles_scanned_image_text_image.height', 'Globals.profiles_scanned_image_text_image.height', ([], {}), '()\n', (110454, 110456), False, 'import Globals\n'), ((110493, 110542), 'Globals.profiles_scanned_image_text_image.width', 'Globals.profiles_scanned_image_text_image.width', ([], {}), '()\n', (110540, 110542), False, 'import Globals\n'), ((110664, 110714), 'Globals.profiles_scanned_image_text_image.height', 'Globals.profiles_scanned_image_text_image.height', ([], {}), '()\n', (110712, 110714), False, 'import Globals\n'), ((110751, 110800), 'Globals.profiles_scanned_image_text_image.width', 'Globals.profiles_scanned_image_text_image.width', ([], {}), '()\n', (110798, 110800), False, 'import Globals\n'), ((111037, 111087), 'Globals.profiles_film_dose_map_text_image.height', 'Globals.profiles_film_dose_map_text_image.height', ([], {}), '()\n', (111085, 111087), False, 'import Globals\n'), ((111124, 111173), 'Globals.profiles_film_dose_map_text_image.width', 'Globals.profiles_film_dose_map_text_image.width', ([], {}), '()\n', (111171, 111173), False, 'import Globals\n'), ((111294, 111344), 'Globals.profiles_film_dose_map_text_image.height', 'Globals.profiles_film_dose_map_text_image.height', ([], {}), '()\n', (111342, 111344), False, 'import Globals\n'), ((111381, 111430), 'Globals.profiles_film_dose_map_text_image.width', 'Globals.profiles_film_dose_map_text_image.width', ([], {}), '()\n', (111428, 111430), False, 'import Globals\n'), ((13831, 13865), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (13863, 13865), False, 'import Globals\n'), ((13907, 13966), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(1)', '(2)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 1, 2)\n', (13918, 13966), True, 'import numpy as np\n'), ((14128, 14186), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Something has gone wrong."""'], {}), "('Error', 'Something has gone wrong.')\n", (14148, 14186), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((14778, 14812), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (14810, 14812), False, 'import Globals\n'), ((14857, 14916), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(1)', '(2)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 1, 2)\n', (14868, 14916), True, 'import numpy as np\n'), ((15926, 15960), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (15958, 15960), False, 'import Globals\n'), ((16004, 16063), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(1)', '(2)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 1, 2)\n', (16015, 16063), True, 'import numpy as np\n'), ((18395, 18453), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Something has gone wrong."""'], {}), "('Error', 'Something has gone wrong.')\n", (18415, 18453), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((35917, 35951), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (35949, 35951), False, 'import Globals\n'), ((35993, 36052), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(1)', '(2)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 1, 2)\n', (36004, 36052), True, 'import numpy as np\n'), ((36214, 36272), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Something has gone wrong."""'], {}), "('Error', 'Something has gone wrong.')\n", (36234, 36272), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((36831, 36865), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (36863, 36865), False, 'import Globals\n'), ((36910, 36969), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(1)', '(2)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 1, 2)\n', (36921, 36969), True, 'import numpy as np\n'), ((37946, 37980), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (37978, 37980), False, 'import Globals\n'), ((38024, 38083), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(1)', '(2)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 1, 2)\n', (38035, 38083), True, 'import numpy as np\n'), ((40382, 40440), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Something has gone wrong."""'], {}), "('Error', 'Something has gone wrong.')\n", (40402, 40440), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((99798, 99857), 'Globals.DVH_input_lateral_displacement.get', 'Globals.DVH_input_lateral_displacement.get', (['"""1.0"""', '"""end-1c"""'], {}), "('1.0', 'end-1c')\n", (99840, 99857), False, 'import Globals\n'), ((99961, 100065), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""The displacements must be numbers\n (Code: lateral displacement)"""'], {}), '(\'Error\',\n """The displacements must be numbers\n (Code: lateral displacement)""")\n', (99981, 100065), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((100313, 100377), 'Globals.DVH_input_longitudinal_displacement.get', 'Globals.DVH_input_longitudinal_displacement.get', (['"""1.0"""', '"""end-1c"""'], {}), "('1.0', 'end-1c')\n", (100360, 100377), False, 'import Globals\n'), ((100487, 100596), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""The displacements must be numbers\n (Code: longitudinal displacement)"""'], {}), '(\'Error\',\n """The displacements must be numbers\n (Code: longitudinal displacement)""")\n', (100507, 100596), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((100845, 100905), 'Globals.DVH_input_vertical_displacement.get', 'Globals.DVH_input_vertical_displacement.get', (['"""1.0"""', '"""end-1c"""'], {}), "('1.0', 'end-1c')\n", (100888, 100905), False, 'import Globals\n'), ((101011, 101116), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""The displacements must be numbers\n (Code: vertical displacement)"""'], {}), '(\'Error\',\n """The displacements must be numbers\n (Code: vertical displacement)""")\n', (101031, 101116), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((101604, 101649), 'Globals.DVH_slice_offset.get', 'Globals.DVH_slice_offset.get', (['"""1.0"""', '"""end-1c"""'], {}), "('1.0', 'end-1c')\n", (101632, 101649), False, 'import Globals\n'), ((101754, 101857), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Slice offset must be a number \n(Code: finishFilmMarkers(false)"""'], {}), '(\'Error\',\n """Slice offset must be a number \n(Code: finishFilmMarkers(false)""")\n', (101774, 101857), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((105510, 105538), 'Globals.DVH_film_batch.get', 'Globals.DVH_film_batch.get', ([], {}), '()\n', (105536, 105538), False, 'import Globals\n'), ((107552, 107572), 'matplotlib.cm.viridis', 'cm.viridis', (['img_film'], {}), '(img_film)\n', (107562, 107572), False, 'from matplotlib import cm\n'), ((15065, 15099), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (15097, 15099), False, 'import Globals\n'), ((15141, 15200), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(1)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 1)\n', (15152, 15200), True, 'import numpy as np\n'), ((15366, 15400), 'numpy.swapaxes', 'np.swapaxes', (['dataset_swapped', '(1)', '(2)'], {}), '(dataset_swapped, 1, 2)\n', (15377, 15400), True, 'import numpy as np\n'), ((15562, 15620), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Something has gone wrong."""'], {}), "('Error', 'Something has gone wrong.')\n", (15582, 15620), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((16212, 16246), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (16244, 16246), False, 'import Globals\n'), ((16291, 16350), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(1)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 1)\n', (16302, 16350), True, 'import numpy as np\n'), ((17360, 17394), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (17392, 17394), False, 'import Globals\n'), ((17438, 17497), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(2)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 2)\n', (17449, 17497), True, 'import numpy as np\n'), ((17663, 17697), 'numpy.swapaxes', 'np.swapaxes', (['dataset_swapped', '(0)', '(1)'], {}), '(dataset_swapped, 0, 1)\n', (17674, 17697), True, 'import numpy as np\n'), ((37118, 37152), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (37150, 37152), False, 'import Globals\n'), ((37194, 37253), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(1)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 1)\n', (37205, 37253), True, 'import numpy as np\n'), ((37419, 37453), 'numpy.swapaxes', 'np.swapaxes', (['dataset_swapped', '(1)', '(2)'], {}), '(dataset_swapped, 1, 2)\n', (37430, 37453), True, 'import numpy as np\n'), ((37615, 37673), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Something has gone wrong."""'], {}), "('Error', 'Something has gone wrong.')\n", (37635, 37673), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((38232, 38266), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (38264, 38266), False, 'import Globals\n'), ((38311, 38370), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(1)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 1)\n', (38322, 38370), True, 'import numpy as np\n'), ((39347, 39381), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (39379, 39381), False, 'import Globals\n'), ((39425, 39484), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(2)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 2)\n', (39436, 39484), True, 'import numpy as np\n'), ((39650, 39684), 'numpy.swapaxes', 'np.swapaxes', (['dataset_swapped', '(0)', '(1)'], {}), '(dataset_swapped, 0, 1)\n', (39661, 39684), True, 'import numpy as np\n'), ((16499, 16533), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (16531, 16533), False, 'import Globals\n'), ((16575, 16634), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(1)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 1)\n', (16586, 16634), True, 'import numpy as np\n'), ((16800, 16834), 'numpy.swapaxes', 'np.swapaxes', (['dataset_swapped', '(0)', '(2)'], {}), '(dataset_swapped, 0, 2)\n', (16811, 16834), True, 'import numpy as np\n'), ((16996, 17054), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Something has gone wrong."""'], {}), "('Error', 'Something has gone wrong.')\n", (17016, 17054), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((17846, 17880), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (17878, 17880), False, 'import Globals\n'), ((38519, 38553), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (38551, 38553), False, 'import Globals\n'), ((38595, 38654), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(1)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 1)\n', (38606, 38654), True, 'import numpy as np\n'), ((38820, 38854), 'numpy.swapaxes', 'np.swapaxes', (['dataset_swapped', '(0)', '(2)'], {}), '(dataset_swapped, 0, 2)\n', (38831, 38854), True, 'import numpy as np\n'), ((39016, 39074), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Something has gone wrong."""'], {}), "('Error', 'Something has gone wrong.')\n", (39036, 39074), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((39833, 39867), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (39865, 39867), False, 'import Globals\n'), ((17979, 18013), 'Globals.DCH_film_orientation.get', 'Globals.DCH_film_orientation.get', ([], {}), '()\n', (18011, 18013), False, 'import Globals\n'), ((18055, 18114), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(2)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 2)\n', (18066, 18114), True, 'import numpy as np\n'), ((18276, 18334), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Something has gone wrong."""'], {}), "('Error', 'Something has gone wrong.')\n", (18296, 18334), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n'), ((39966, 40000), 'Globals.DVH_film_orientation.get', 'Globals.DVH_film_orientation.get', ([], {}), '()\n', (39998, 40000), False, 'import Globals\n'), ((40042, 40101), 'numpy.swapaxes', 'np.swapaxes', (['Globals.DVH_dataset_doseplan.pixel_array', '(0)', '(2)'], {}), '(Globals.DVH_dataset_doseplan.pixel_array, 0, 2)\n', (40053, 40101), True, 'import numpy as np\n'), ((40263, 40321), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Something has gone wrong."""'], {}), "('Error', 'Something has gone wrong.')\n", (40283, 40321), False, 'from tkinter import filedialog, INSERT, DISABLED, messagebox, NORMAL, simpledialog, PhotoImage, BOTH, Canvas, N, S, W, E, ALL, Frame, SUNKEN, Radiobutton, GROOVE, ACTIVE, FLAT, END, Scrollbar, HORIZONTAL, VERTICAL, ttk, TOP, RIGHT, LEFT, ttk\n')]
|
import numpy as np
import sys
import pandas as pd
import matplotlib.pyplot as plt
# read in csv as data frame
data_val = pd.read_csv('val_event.csv',delimiter=' ')
data_mu = pd.read_csv('ylength_mu_michel.csv',delimiter=' ')
data_pi = pd.read_csv('ylength_pi.csv',delimiter=' ')
mergedmu = data_val.merge(data_mu, on=['Subrun','Event','Type'])
mergedpi = data_val.merge(data_pi, on=['Subrun','Event','Type'])
# make histogram
plt.figure();
'''
Subtracting the probability from the prediction give a probability of being a muon from 0 to 1
I tried to match the color scheme of the hist you sent me, but you can change to whatever you want :)
'''
# first plot pions
# everything in [] are the conditions for how you select rows in pandas and the data.<whatever> is the column you select in data
plt.hist(np.abs(mergedpi.Pred[mergedpi.Type == 'Pion'] - mergedpi.Prob[mergedpi.Type == 'Pion']),bins=np.linspace(0,1,20),color='red',alpha=0.6,label='Pions');
# then plot muons
plt.hist(np.abs(mergedmu.Pred[mergedmu.Type == 'Muon'] - mergedmu.Prob[mergedmu.Type == 'Muon']),bins=np.linspace(0,1,20),color='blue',alpha=0.6,label='Muons');
plt.xlabel('Probability');
plt.ylabel('Event');
plt.legend(loc='upper center',frameon=False);
# uncomment to save
plt.savefig("probhist_primary.png")
|
[
"numpy.abs",
"pandas.read_csv",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.figure",
"numpy.linspace",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.savefig"
] |
[((123, 166), 'pandas.read_csv', 'pd.read_csv', (['"""val_event.csv"""'], {'delimiter': '""" """'}), "('val_event.csv', delimiter=' ')\n", (134, 166), True, 'import pandas as pd\n'), ((177, 228), 'pandas.read_csv', 'pd.read_csv', (['"""ylength_mu_michel.csv"""'], {'delimiter': '""" """'}), "('ylength_mu_michel.csv', delimiter=' ')\n", (188, 228), True, 'import pandas as pd\n'), ((239, 283), 'pandas.read_csv', 'pd.read_csv', (['"""ylength_pi.csv"""'], {'delimiter': '""" """'}), "('ylength_pi.csv', delimiter=' ')\n", (250, 283), True, 'import pandas as pd\n'), ((432, 444), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (442, 444), True, 'import matplotlib.pyplot as plt\n'), ((1140, 1165), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Probability"""'], {}), "('Probability')\n", (1150, 1165), True, 'import matplotlib.pyplot as plt\n'), ((1167, 1186), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Event"""'], {}), "('Event')\n", (1177, 1186), True, 'import matplotlib.pyplot as plt\n'), ((1188, 1233), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper center"""', 'frameon': '(False)'}), "(loc='upper center', frameon=False)\n", (1198, 1233), True, 'import matplotlib.pyplot as plt\n'), ((1255, 1290), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""probhist_primary.png"""'], {}), "('probhist_primary.png')\n", (1266, 1290), True, 'import matplotlib.pyplot as plt\n'), ((809, 900), 'numpy.abs', 'np.abs', (["(mergedpi.Pred[mergedpi.Type == 'Pion'] - mergedpi.Prob[mergedpi.Type ==\n 'Pion'])"], {}), "(mergedpi.Pred[mergedpi.Type == 'Pion'] - mergedpi.Prob[mergedpi.Type ==\n 'Pion'])\n", (815, 900), True, 'import numpy as np\n'), ((987, 1078), 'numpy.abs', 'np.abs', (["(mergedmu.Pred[mergedmu.Type == 'Muon'] - mergedmu.Prob[mergedmu.Type ==\n 'Muon'])"], {}), "(mergedmu.Pred[mergedmu.Type == 'Muon'] - mergedmu.Prob[mergedmu.Type ==\n 'Muon'])\n", (993, 1078), True, 'import numpy as np\n'), ((902, 923), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(20)'], {}), '(0, 1, 20)\n', (913, 923), True, 'import numpy as np\n'), ((1080, 1101), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(20)'], {}), '(0, 1, 20)\n', (1091, 1101), True, 'import numpy as np\n')]
|
"""
Python Markdown
A Python implementation of <NAME>'s Markdown.
Documentation: https://python-markdown.github.io/
GitHub: https://github.com/Python-Markdown/markdown/
PyPI: https://pypi.org/project/Markdown/
Started by <NAME> (http://www.dwerg.net/).
Maintained for a few years by <NAME> (http://www.freewisdom.org).
Currently maintained by <NAME> (https://github.com/waylan),
<NAME> (https://github.com/mitya57) and <NAME> (https://github.com/facelessuser).
Copyright 2007-2019 The Python Markdown Project (v. 1.7 and later)
Copyright 2004, 2005, 2006 <NAME> (v. 0.2-1.6b)
Copyright 2004 <NAME> (the original version)
License: BSD (see LICENSE.md for details).
"""
from markdown.test_tools import TestCase
from markdown.extensions.toc import TocExtension
class TestTOC(TestCase):
# TODO: Move the rest of the TOC tests here.
def test_escaped_code(self):
self.assertMarkdownRenders(
self.dedent(
'''
[TOC]
# `<test>`
'''
),
self.dedent(
'''
<div class="toc">
<ul>
<li><a href="#test"><test></a></li>
</ul>
</div>
<h1 id="test"><code><test></code></h1>
'''
),
extensions=['toc']
)
def test_escaped_char_in_id(self):
self.assertMarkdownRenders(
r'# escaped\_character',
'<h1 id="escaped_character">escaped_character</h1>',
extensions=['toc']
)
def testAnchorLinkWithCustomClass(self):
self.assertMarkdownRenders(
self.dedent(
'''
# Header 1
## Header *2*
'''
),
self.dedent(
'''
<h1 id="header-1"><a class="custom" href="#header-1">Header 1</a></h1>
<h2 id="header-2"><a class="custom" href="#header-2">Header <em>2</em></a></h2>
'''
),
extensions=[TocExtension(anchorlink=True, anchorlink_class="custom")]
)
def testAnchorLinkWithCustomClasses(self):
self.assertMarkdownRenders(
self.dedent(
'''
# Header 1
## Header *2*
'''
),
self.dedent(
'''
<h1 id="header-1"><a class="custom1 custom2" href="#header-1">Header 1</a></h1>
<h2 id="header-2"><a class="custom1 custom2" href="#header-2">Header <em>2</em></a></h2>
'''
),
extensions=[TocExtension(anchorlink=True, anchorlink_class="custom1 custom2")]
)
def testPermalinkWithEmptyText(self):
self.assertMarkdownRenders(
'# Header',
'<h1 id="header">' # noqa
'Header' # noqa
'<a class="headerlink" href="#header" title="Permanent link"></a>' # noqa
'</h1>', # noqa
extensions=[TocExtension(permalink="")]
)
def testPermalinkWithCustomClass(self):
self.assertMarkdownRenders(
'# Header',
'<h1 id="header">' # noqa
'Header' # noqa
'<a class="custom" href="#header" title="Permanent link">¶</a>' # noqa
'</h1>', # noqa
extensions=[TocExtension(permalink=True, permalink_class="custom")]
)
def testPermalinkWithCustomClasses(self):
self.assertMarkdownRenders(
'# Header',
'<h1 id="header">' # noqa
'Header' # noqa
'<a class="custom1 custom2" href="#header" title="Permanent link">¶</a>' # noqa
'</h1>', # noqa
extensions=[TocExtension(permalink=True, permalink_class="custom1 custom2")]
)
def testPermalinkWithCustomTitle(self):
self.assertMarkdownRenders(
'# Header',
'<h1 id="header">' # noqa
'Header' # noqa
'<a class="headerlink" href="#header" title="custom">¶</a>' # noqa
'</h1>', # noqa
extensions=[TocExtension(permalink=True, permalink_title="custom")]
)
def testPermalinkWithEmptyTitle(self):
self.assertMarkdownRenders(
'# Header',
'<h1 id="header">' # noqa
'Header' # noqa
'<a class="headerlink" href="#header">¶</a>' # noqa
'</h1>', # noqa
extensions=[TocExtension(permalink=True, permalink_title="")]
)
def testPermalinkWithUnicodeInID(self):
from markdown.extensions.toc import slugify_unicode
self.assertMarkdownRenders(
'# Unicode ヘッダー',
'<h1 id="unicode-ヘッター">' # noqa
'Unicode ヘッダー' # noqa
'<a class="headerlink" href="#unicode-ヘッター" title="Permanent link">¶</a>' # noqa
'</h1>', # noqa
extensions=[TocExtension(permalink=True, slugify=slugify_unicode)]
)
def testPermalinkWithUnicodeTitle(self):
from markdown.extensions.toc import slugify_unicode
self.assertMarkdownRenders(
'# Unicode ヘッダー',
'<h1 id="unicode-ヘッター">' # noqa
'Unicode ヘッダー' # noqa
'<a class="headerlink" href="#unicode-ヘッター" title="パーマリンク">¶</a>' # noqa
'</h1>', # noqa
extensions=[TocExtension(permalink=True, permalink_title="パーマリンク", slugify=slugify_unicode)]
)
|
[
"markdown.extensions.toc.TocExtension"
] |
[((2107, 2163), 'markdown.extensions.toc.TocExtension', 'TocExtension', ([], {'anchorlink': '(True)', 'anchorlink_class': '"""custom"""'}), "(anchorlink=True, anchorlink_class='custom')\n", (2119, 2163), False, 'from markdown.extensions.toc import TocExtension\n'), ((2702, 2767), 'markdown.extensions.toc.TocExtension', 'TocExtension', ([], {'anchorlink': '(True)', 'anchorlink_class': '"""custom1 custom2"""'}), "(anchorlink=True, anchorlink_class='custom1 custom2')\n", (2714, 2767), False, 'from markdown.extensions.toc import TocExtension\n'), ((3270, 3296), 'markdown.extensions.toc.TocExtension', 'TocExtension', ([], {'permalink': '""""""'}), "(permalink='')\n", (3282, 3296), False, 'from markdown.extensions.toc import TocExtension\n'), ((3809, 3863), 'markdown.extensions.toc.TocExtension', 'TocExtension', ([], {'permalink': '(True)', 'permalink_class': '"""custom"""'}), "(permalink=True, permalink_class='custom')\n", (3821, 3863), False, 'from markdown.extensions.toc import TocExtension\n'), ((4414, 4477), 'markdown.extensions.toc.TocExtension', 'TocExtension', ([], {'permalink': '(True)', 'permalink_class': '"""custom1 custom2"""'}), "(permalink=True, permalink_class='custom1 custom2')\n", (4426, 4477), False, 'from markdown.extensions.toc import TocExtension\n'), ((4974, 5028), 'markdown.extensions.toc.TocExtension', 'TocExtension', ([], {'permalink': '(True)', 'permalink_title': '"""custom"""'}), "(permalink=True, permalink_title='custom')\n", (4986, 5028), False, 'from markdown.extensions.toc import TocExtension\n'), ((5524, 5572), 'markdown.extensions.toc.TocExtension', 'TocExtension', ([], {'permalink': '(True)', 'permalink_title': '""""""'}), "(permalink=True, permalink_title='')\n", (5536, 5572), False, 'from markdown.extensions.toc import TocExtension\n'), ((6195, 6248), 'markdown.extensions.toc.TocExtension', 'TocExtension', ([], {'permalink': '(True)', 'slugify': 'slugify_unicode'}), '(permalink=True, slugify=slugify_unicode)\n', (6207, 6248), False, 'from markdown.extensions.toc import TocExtension\n'), ((6866, 6945), 'markdown.extensions.toc.TocExtension', 'TocExtension', ([], {'permalink': '(True)', 'permalink_title': '"""パーマリンク"""', 'slugify': 'slugify_unicode'}), "(permalink=True, permalink_title='パーマリンク', slugify=slugify_unicode)\n", (6878, 6945), False, 'from markdown.extensions.toc import TocExtension\n')]
|